From ad42cd93cd11828f7b3cb1671b81fd852413c019 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 15 Aug 2024 02:57:18 +0800 Subject: [PATCH 001/126] feat: support conditional encode --- encoding/codecv2/codecv2.go | 34 ++++++-- encoding/codecv2/codecv2_test.go | 20 ++--- encoding/codecv3/codecv3.go | 16 ++-- encoding/codecv3/codecv3_test.go | 136 +++++++++++++++---------------- encoding/da.go | 2 +- 5 files changed, 115 insertions(+), 93 deletions(-) diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index 38ef939..ff7667f 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -89,7 +89,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no conditional encode */, false /* no mock */) if err != nil { return nil, err } @@ -119,7 +119,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -184,6 +184,20 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 return nil, common.Hash{}, nil, err } + if conditionalEncode { + encoded := len(blobBytes) < len(batchBytes) + if encoded { + blobBytes = append([]byte{1}, blobBytes...) + } else { + blobBytes = append([]byte{0}, batchBytes...) + } + } + + if len(blobBytes) > 126976 { + log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") + } + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. if !useMockTxData && len(batchBytes) > 131072 { // Check compressed data compatibility. @@ -306,7 +320,7 @@ func (b *DABatch) Blob() *kzg4844.Blob { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEncode bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) if err != nil { return 0, 0, err @@ -315,11 +329,15 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint6 if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + blobBytesLen := uint64(len(blobBytes)) + if conditionalEncode { + blobBytesLen += 1 + } + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLen), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEncode bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload(b.Chunks) if err != nil { return 0, 0, err @@ -328,7 +346,11 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint6 if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + blobBytesLen := uint64(len(blobBytes)) + if conditionalEncode { + blobBytesLen += 1 + } + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLen), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. diff --git a/encoding/codecv2/codecv2_test.go b/encoding/codecv2/codecv2_test.go index 808c71f..a623f66 100644 --- a/encoding/codecv2/codecv2_test.go +++ b/encoding/codecv2/codecv2_test.go @@ -674,7 +674,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, false /* no conditional encode */, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -870,52 +870,52 @@ func TestCodecV2BatchSkipBitmap(t *testing.T) { func TestCodecV2ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(237), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(237), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2933), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2933), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(54), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(54), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3149), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(54), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index bfe0d2a..8be9694 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -53,7 +53,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { +func NewDABatch(batch *encoding.Batch, conditionalEncode bool) (*DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -80,7 +80,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, conditionalEncode, false /* no mock */) if err != nil { return nil, err } @@ -118,8 +118,8 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { - return codecv2.ConstructBlobPayload(chunks, useMockTxData) +func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { + return codecv2.ConstructBlobPayload(chunks, conditionalEncode, useMockTxData) } // NewDABatchFromBytes decodes the given byte slice into a DABatch. @@ -232,13 +232,13 @@ func (b *DABatch) Blob() *kzg4844.Blob { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c) +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEncode bool) (uint64, uint64, error) { + return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c, conditionalEncode) } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b) +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEncode bool) (uint64, uint64, error) { + return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b, conditionalEncode) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. diff --git a/encoding/codecv3/codecv3_test.go b/encoding/codecv3/codecv3_test.go index f2eb41a..080f743 100644 --- a/encoding/codecv3/codecv3_test.go +++ b/encoding/codecv3/codecv3_test.go @@ -217,7 +217,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) @@ -225,7 +225,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "03000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) @@ -233,7 +233,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) @@ -241,7 +241,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) @@ -249,7 +249,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) @@ -257,13 +257,13 @@ func TestCodecV3BatchEncode(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "03000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) @@ -271,7 +271,7 @@ func TestCodecV3BatchEncode(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) @@ -285,54 +285,54 @@ func TestCodecV3BatchHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xc5065afb8f29f620ae1edb4c6ebaf7380faf4226fb83ee920d70d489fe51c5c2", batch.Hash().Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x9ec8eabaa13229ec9c9d0687133afd7435afcfe260fc4c73fea052c0911522ac", batch.Hash().Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xda944b66dcaa6dc1442be2230233e97286ee1ed3c51cde155a36643b293b07c4", batch.Hash().Hex()) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x20e2324fac82e484c569eb286a221c61151c2b3c38a63b289f6ef6c30fb31e49", batch.Hash().Hex()) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xc962bce28a34a4eb9ec81393edcf2e6367e84aad9c4fc5641da6f18f54053ed5", batch.Hash().Hex()) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x405e0fc4b7efbe5b6d1dcc63c1f3253bbb6fbefedd1afe6b2067629f9da1f1cc", batch.Hash().Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x3d5d24c951cb55e56f3b4e2defcd8f32d6d048565e6723ac7cdff7ed5e580e3a", batch.Hash().Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb", batch.Hash().Hex()) } @@ -341,54 +341,54 @@ func TestCodecV3BatchDataHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) } @@ -397,7 +397,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) @@ -406,7 +406,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) @@ -415,7 +415,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) @@ -425,7 +425,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -434,7 +434,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -443,7 +443,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -451,7 +451,7 @@ func TestCodecV3BatchBlob(t *testing.T) { // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060300d2d0700140d000f000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10005004124d3c68f60b26d07d824482d965c11", encoded) @@ -460,7 +460,7 @@ func TestCodecV3BatchBlob(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) @@ -471,55 +471,55 @@ func TestCodecV3BatchChallenge(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea", hex.EncodeToString(batch.z[:])) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b7792", hex.EncodeToString(batch.z[:])) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf", hex.EncodeToString(batch.z[:])) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a", hex.EncodeToString(batch.z[:])) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b32", hex.EncodeToString(batch.z[:])) } @@ -668,7 +668,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, false /* no conditional encode */, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -710,7 +710,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err := batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -719,7 +719,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -728,7 +728,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -737,7 +737,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -746,7 +746,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -755,7 +755,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -763,7 +763,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -772,7 +772,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -783,7 +783,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -791,7 +791,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -799,7 +799,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) @@ -807,13 +807,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -821,7 +821,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) @@ -829,13 +829,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) @@ -843,13 +843,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 32, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -858,52 +858,52 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { func TestCodecV3ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(237), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(237), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2933), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2933), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(54), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(54), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3149), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(54), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) diff --git a/encoding/da.go b/encoding/da.go index 73d8b23..35befdb 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -21,7 +21,7 @@ const ( // CodecV2 represents the version 2 of the encoder and decoder. CodecV2 - // CodecV3 represents the version 2 of the encoder and decoder. + // CodecV3 represents the version 3 of the encoder and decoder. CodecV3 ) From 62758c888b007774f3d22f2728aa90ce92e3455a Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 15 Aug 2024 03:16:25 +0800 Subject: [PATCH 002/126] move append conditionalEncode flag after validity check --- encoding/codecv2/codecv2.go | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index ff7667f..5e4b464 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -184,6 +184,15 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useM return nil, common.Hash{}, nil, err } + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. + if !useMockTxData && len(batchBytes) > 131072 { + // Check compressed data compatibility. + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, err + } + } + if conditionalEncode { encoded := len(blobBytes) < len(batchBytes) if encoded { @@ -198,15 +207,6 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useM return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") } - // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if !useMockTxData && len(batchBytes) > 131072 { - // Check compressed data compatibility. - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, err - } - } - // convert raw data to BLSFieldElements blob, err := MakeBlobCanonical(blobBytes) if err != nil { From 6901956c991008919ebfac9ee45c9ff8f370c365 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sun, 18 Aug 2024 20:28:43 +0800 Subject: [PATCH 003/126] update da-codec --- encoding/codecv1/codecv1.go | 5 +- encoding/codecv2/codecv2.go | 34 +- encoding/codecv2/codecv2_test.go | 28 +- encoding/codecv3/codecv3.go | 16 +- encoding/codecv3/codecv3_test.go | 144 +++--- encoding/codecv4/codecv4.go | 534 ++++++++++++++++++++ encoding/codecv4/codecv4_test.go | 837 +++++++++++++++++++++++++++++++ encoding/da.go | 7 + 8 files changed, 1480 insertions(+), 125 deletions(-) create mode 100644 encoding/codecv4/codecv4.go create mode 100644 encoding/codecv4/codecv4_test.go diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1/codecv1.go index a6190d4..205f257 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -20,9 +20,6 @@ import ( "github.com/scroll-tech/da-codec/encoding/codecv0" ) -// BLSModulus is the BLS modulus defined in EIP-4844. -var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) - // MaxNumChunks is the maximum number of chunks that a batch can contain. const MaxNumChunks = 15 @@ -280,7 +277,7 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index 5e4b464..3edf328 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -26,9 +26,6 @@ import ( "github.com/scroll-tech/da-codec/encoding/codecv1" ) -// BLSModulus is the BLS modulus defined in EIP-4844. -var BLSModulus = codecv1.BLSModulus - // MaxNumChunks is the maximum number of chunks that a batch can contain. const MaxNumChunks = 45 @@ -89,7 +86,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no conditional encode */, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -119,7 +116,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -193,15 +190,6 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useM } } - if conditionalEncode { - encoded := len(blobBytes) < len(batchBytes) - if encoded { - blobBytes = append([]byte{1}, blobBytes...) - } else { - blobBytes = append([]byte{0}, batchBytes...) - } - } - if len(blobBytes) > 126976 { log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") @@ -225,7 +213,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useM // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -320,7 +308,7 @@ func (b *DABatch) Blob() *kzg4844.Blob { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEncode bool) (uint64, uint64, error) { +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) if err != nil { return 0, 0, err @@ -329,15 +317,11 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEnc if err != nil { return 0, 0, err } - blobBytesLen := uint64(len(blobBytes)) - if conditionalEncode { - blobBytesLen += 1 - } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLen), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEncode bool) (uint64, uint64, error) { +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { batchBytes, err := constructBatchPayload(b.Chunks) if err != nil { return 0, 0, err @@ -346,11 +330,7 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEnc if err != nil { return 0, 0, err } - blobBytesLen := uint64(len(blobBytes)) - if conditionalEncode { - blobBytesLen += 1 - } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLen), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. diff --git a/encoding/codecv2/codecv2_test.go b/encoding/codecv2/codecv2_test.go index a623f66..3db2fe4 100644 --- a/encoding/codecv2/codecv2_test.go +++ b/encoding/codecv2/codecv2_test.go @@ -59,17 +59,17 @@ func TestCodecV2BlockEncode(t *testing.T) { encoded = hex.EncodeToString(block.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - // sanity check: v0 and v1 block encodings are identical + // sanity check: v0 and v2 block encodings are identical for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { blockv0, err := codecv0.NewDABlock(trace, 0) assert.NoError(t, err) encodedv0 := hex.EncodeToString(blockv0.Encode()) - blockv1, err := NewDABlock(trace, 0) + blockv2, err := NewDABlock(trace, 0) assert.NoError(t, err) - encodedv1 := hex.EncodeToString(blockv1.Encode()) + encodedv2 := hex.EncodeToString(blockv2.Encode()) - assert.Equal(t, encodedv0, encodedv1) + assert.Equal(t, encodedv0, encodedv2) } } @@ -674,7 +674,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, false /* no conditional encode */, true /* use mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -870,52 +870,52 @@ func TestCodecV2BatchSkipBitmap(t *testing.T) { func TestCodecV2ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, false /* no conditional encode */) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(237), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, false /* no conditional encode */) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(237), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, false /* no conditional encode */) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2933), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, false /* no conditional encode */) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2933), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, false /* no conditional encode */) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(54), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, false /* no conditional encode */) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(54), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, false /* no conditional encode */) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3149), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, false /* no conditional encode */) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(54), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, false /* no conditional encode */) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index 8be9694..bfe0d2a 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -53,7 +53,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch, conditionalEncode bool) (*DABatch, error) { +func NewDABatch(batch *encoding.Batch) (*DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -80,7 +80,7 @@ func NewDABatch(batch *encoding.Batch, conditionalEncode bool) (*DABatch, error) } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, conditionalEncode, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -118,8 +118,8 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { - return codecv2.ConstructBlobPayload(chunks, conditionalEncode, useMockTxData) +func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { + return codecv2.ConstructBlobPayload(chunks, useMockTxData) } // NewDABatchFromBytes decodes the given byte slice into a DABatch. @@ -232,13 +232,13 @@ func (b *DABatch) Blob() *kzg4844.Blob { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEncode bool) (uint64, uint64, error) { - return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c, conditionalEncode) +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { + return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c) } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEncode bool) (uint64, uint64, error) { - return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b, conditionalEncode) +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { + return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. diff --git a/encoding/codecv3/codecv3_test.go b/encoding/codecv3/codecv3_test.go index 080f743..0b22312 100644 --- a/encoding/codecv3/codecv3_test.go +++ b/encoding/codecv3/codecv3_test.go @@ -59,17 +59,17 @@ func TestCodecV3BlockEncode(t *testing.T) { encoded = hex.EncodeToString(block.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - // sanity check: v0 and v1 block encodings are identical + // sanity check: v0 and v3 block encodings are identical for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { blockv0, err := codecv0.NewDABlock(trace, 0) assert.NoError(t, err) encodedv0 := hex.EncodeToString(blockv0.Encode()) - blockv1, err := NewDABlock(trace, 0) + blockv3, err := NewDABlock(trace, 0) assert.NoError(t, err) - encodedv1 := hex.EncodeToString(blockv1.Encode()) + encodedv3 := hex.EncodeToString(blockv3.Encode()) - assert.Equal(t, encodedv0, encodedv1) + assert.Equal(t, encodedv0, encodedv3) } } @@ -217,7 +217,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) @@ -225,7 +225,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "03000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) @@ -233,7 +233,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) @@ -241,7 +241,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) @@ -249,7 +249,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) @@ -257,13 +257,13 @@ func TestCodecV3BatchEncode(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "03000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) @@ -271,7 +271,7 @@ func TestCodecV3BatchEncode(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) @@ -285,54 +285,54 @@ func TestCodecV3BatchHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xc5065afb8f29f620ae1edb4c6ebaf7380faf4226fb83ee920d70d489fe51c5c2", batch.Hash().Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x9ec8eabaa13229ec9c9d0687133afd7435afcfe260fc4c73fea052c0911522ac", batch.Hash().Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xda944b66dcaa6dc1442be2230233e97286ee1ed3c51cde155a36643b293b07c4", batch.Hash().Hex()) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x20e2324fac82e484c569eb286a221c61151c2b3c38a63b289f6ef6c30fb31e49", batch.Hash().Hex()) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xc962bce28a34a4eb9ec81393edcf2e6367e84aad9c4fc5641da6f18f54053ed5", batch.Hash().Hex()) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x405e0fc4b7efbe5b6d1dcc63c1f3253bbb6fbefedd1afe6b2067629f9da1f1cc", batch.Hash().Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x3d5d24c951cb55e56f3b4e2defcd8f32d6d048565e6723ac7cdff7ed5e580e3a", batch.Hash().Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb", batch.Hash().Hex()) } @@ -341,54 +341,54 @@ func TestCodecV3BatchDataHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) } @@ -397,7 +397,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) @@ -406,7 +406,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) @@ -415,7 +415,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) @@ -425,7 +425,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -434,7 +434,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -443,7 +443,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -451,7 +451,7 @@ func TestCodecV3BatchBlob(t *testing.T) { // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060300d2d0700140d000f000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10005004124d3c68f60b26d07d824482d965c11", encoded) @@ -460,7 +460,7 @@ func TestCodecV3BatchBlob(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) @@ -471,55 +471,55 @@ func TestCodecV3BatchChallenge(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea", hex.EncodeToString(batch.z[:])) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b7792", hex.EncodeToString(batch.z[:])) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf", hex.EncodeToString(batch.z[:])) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a", hex.EncodeToString(batch.z[:])) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b32", hex.EncodeToString(batch.z[:])) } @@ -668,7 +668,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, false /* no conditional encode */, true /* use mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -710,7 +710,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err := batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -719,7 +719,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -728,7 +728,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -737,7 +737,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -746,7 +746,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -755,7 +755,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -763,7 +763,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -772,7 +772,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -783,7 +783,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -791,7 +791,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -799,7 +799,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) @@ -807,13 +807,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -821,7 +821,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) @@ -829,13 +829,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) @@ -843,13 +843,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 32, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -858,52 +858,52 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { func TestCodecV3ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, false /* no conditional encode */) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(237), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, false /* no conditional encode */) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(237), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, false /* no conditional encode */) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2933), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, false /* no conditional encode */) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2933), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, false /* no conditional encode */) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(54), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, false /* no conditional encode */) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(54), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, false /* no conditional encode */) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3149), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, false /* no conditional encode */) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(54), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, false /* no conditional encode */) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go new file mode 100644 index 0000000..9d16ef2 --- /dev/null +++ b/encoding/codecv4/codecv4.go @@ -0,0 +1,534 @@ +package codecv4 + +/* +#include +char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); +*/ +import "C" + +import ( + "crypto/sha256" + "encoding/binary" + "encoding/hex" + "errors" + "fmt" + "math/big" + "unsafe" + + "github.com/scroll-tech/go-ethereum/accounts/abi" + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/log" + + "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/da-codec/encoding/codecv1" + "github.com/scroll-tech/da-codec/encoding/codecv3" +) + +// MaxNumChunks is the maximum number of chunks that a batch can contain. +const MaxNumChunks = codecv3.MaxNumChunks + +// DABlock represents a Data Availability Block. +type DABlock = codecv3.DABlock + +// DAChunk groups consecutive DABlocks with their transactions. +type DAChunk = codecv3.DAChunk + +// DABatch contains metadata about a batch of DAChunks. +type DABatch struct { + // header + Version uint8 `json:"version"` + BatchIndex uint64 `json:"batch_index"` + L1MessagePopped uint64 `json:"l1_message_popped"` + TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` + DataHash common.Hash `json:"data_hash"` + BlobVersionedHash common.Hash `json:"blob_versioned_hash"` + ParentBatchHash common.Hash `json:"parent_batch_hash"` + LastBlockTimestamp uint64 `json:"last_block_timestamp"` + BlobDataProof [2]common.Hash `json:"blob_data_proof"` + + // blob payload + blob *kzg4844.Blob + z *kzg4844.Point +} + +// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. +func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { + return codecv3.NewDABlock(block, totalL1MessagePoppedBefore) +} + +// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. +func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { + return codecv3.NewDAChunk(chunk, totalL1MessagePoppedBefore) +} + +// NewDABatch creates a DABatch from the provided encoding.Batch. +func NewDABatch(batch *encoding.Batch, enableEncoding bool) (*DABatch, error) { + // this encoding can only support a fixed number of chunks per batch + if len(batch.Chunks) > MaxNumChunks { + return nil, errors.New("too many chunks in batch") + } + + if len(batch.Chunks) == 0 { + return nil, errors.New("too few chunks in batch") + } + + if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { + return nil, errors.New("too few blocks in last chunk of the batch") + } + + // batch data hash + dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // skipped L1 messages bitmap + _, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // blob payload + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, enableEncoding, false /* no mock */) + if err != nil { + return nil, err + } + + lastChunk := batch.Chunks[len(batch.Chunks)-1] + lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] + + daBatch := DABatch{ + Version: uint8(encoding.CodecV4), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + BlobVersionedHash: blobVersionedHash, + ParentBatchHash: batch.ParentBatchHash, + LastBlockTimestamp: lastBlock.Header.Time, + blob: blob, + z: z, + } + + daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() + if err != nil { + return nil, err + } + + return &daBatch, nil +} + +// ComputeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return codecv3.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) +} + +// ConstructBlobPayload constructs the 4844 blob payload. +func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncoding bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + MaxNumChunks*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // challenge digest preimage + // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash + challengePreimage := make([]byte, (1+MaxNumChunks+1)*32) + + // the chunk data hash used for calculating the challenge preimage + var chunkDataHash common.Hash + + // blob metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode blob metadata and L2 transactions, + // and simultaneously also build challenge preimage + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into blob payload + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + if err != nil { + return nil, common.Hash{}, nil, err + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // blob metadata: chunki_size + if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + } + + // challenge: compute chunk data hash + chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) + copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + } + + // if we have fewer than MaxNumChunks chunks, the rest + // of the blob metadata is correctly initialized to 0, + // but we need to add padding to the challenge preimage + for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ { + // use the last chunk's data hash as padding + copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + } + + // challenge: compute metadata hash + hash := crypto.Keccak256Hash(batchBytes[0:metadataLength]) + copy(challengePreimage[0:], hash[:]) + + var blobBytes []byte + if enableEncoding { + // blobBytes represents the compressed blob payload (batchBytes) + var err error + blobBytes, err = compressScrollBatchBytes(batchBytes) + if err != nil { + return nil, common.Hash{}, nil, err + } + if !useMockTxData { + // Check compressed data compatibility. + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, err + } + } + blobBytes = append([]byte{1}, blobBytes...) + } else { + blobBytes = batchBytes + blobBytes = append([]byte{0}, batchBytes...) + } + + if len(blobBytes) > 126976 { + log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") + } + + // convert raw data to BLSFieldElements + blob, err := MakeBlobCanonical(blobBytes) + if err != nil { + return nil, common.Hash{}, nil, err + } + + // compute blob versioned hash + c, err := kzg4844.BlobToCommitment(blob) + if err != nil { + return nil, common.Hash{}, nil, errors.New("failed to create blob commitment") + } + blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) + + // challenge: append blob versioned hash + copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:]) + + // compute z = challenge_digest % BLS_MODULUS + challengeDigest := crypto.Keccak256Hash(challengePreimage) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBytes := pointBigInt.Bytes() + + // the challenge point z + var z kzg4844.Point + start := 32 - len(pointBytes) + copy(z[start:], pointBytes) + + return blob, blobVersionedHash, &z, nil +} + +// NewDABatchFromBytes decodes the given byte slice into a DABatch. +// Note: This function only populates the batch header, it leaves the blob-related fields empty. +func NewDABatchFromBytes(data []byte) (*DABatch, error) { + if len(data) != 193 { + return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) + } + + b := &DABatch{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + BlobVersionedHash: common.BytesToHash(data[57:89]), + ParentBatchHash: common.BytesToHash(data[89:121]), + LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), + BlobDataProof: [2]common.Hash{ + common.BytesToHash(data[129:161]), + common.BytesToHash(data[161:193]), + }, + } + + return b, nil +} + +// Encode serializes the DABatch into bytes. +func (b *DABatch) Encode() []byte { + batchBytes := make([]byte, 193) + batchBytes[0] = b.Version + binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) + binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) + copy(batchBytes[25:57], b.DataHash[:]) + copy(batchBytes[57:89], b.BlobVersionedHash[:]) + copy(batchBytes[89:121], b.ParentBatchHash[:]) + binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) + copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) + copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *DABatch) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// blobDataProofForPICircuit computes the abi-encoded blob verification data. +func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { + if b.blob == nil { + return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") + } + if b.z == nil { + return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty z") + } + + _, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return [2]common.Hash{}, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of result: + // | z | y | + // |---------|---------| + // | bytes32 | bytes32 | + var result [2]common.Hash + result[0] = common.BytesToHash(b.z[:]) + result[1] = common.BytesToHash(y[:]) + + return result, nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { + if b.blob == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, errors.New("failed to create blob commitment") + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of ``_blobDataProof``: + // | z | y | kzg_commitment | kzg_proof | + // |---------|---------|----------------|-----------| + // | bytes32 | bytes32 | bytes48 | bytes48 | + + values := []interface{}{*b.z, y, commitment, proof} + blobDataProofArgs, err := GetBlobDataProofArgs() + if err != nil { + return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) + } + return blobDataProofArgs.Pack(values...) +} + +// Blob returns the blob of the batch. +func (b *DABatch) Blob() *kzg4844.Blob { + return b.blob +} + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableEncode bool) (uint64, uint64, error) { + batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + if err != nil { + return 0, 0, err + } + var blobBytesLength uint64 + if enableEncode { + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, err + } + blobBytesLength = 1 + uint64(len(blobBytes)) + } else { + blobBytesLength = 1 + uint64(len(batchBytes)) + } + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableEncode bool) (uint64, uint64, error) { + batchBytes, err := constructBatchPayload(b.Chunks) + if err != nil { + return 0, 0, err + } + var blobBytesLength uint64 + if enableEncode { + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, err + } + blobBytesLength = 1 + uint64(len(blobBytes)) + } else { + blobBytesLength = 1 + uint64(len(batchBytes)) + } + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil +} + +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { + batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + if err != nil { + return false, err + } + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { + batchBytes, err := constructBatchPayload(b.Chunks) + if err != nil { + return false, err + } + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. +func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { + return codecv3.EstimateChunkL1CommitCalldataSize(c) +} + +// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. +func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { + return codecv3.EstimateBatchL1CommitCalldataSize(b) +} + +// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. +func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { + return codecv3.EstimateChunkL1CommitGas(c) +} + +// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. +func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { + return codecv3.EstimateBatchL1CommitGas(b) +} + +// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. +func GetBlobDataProofArgs() (*abi.Arguments, error) { + return codecv3.GetBlobDataProofArgs() +} + +// checkBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. +func checkBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { + batchBytes, err := constructBatchPayload(b.Chunks) + if err != nil { + return false, err + } + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// constructBatchPayload constructs the batch payload. +// This function is only used in compressed batch payload length estimation. +func constructBatchPayload(chunks []*encoding.Chunk) ([]byte, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + MaxNumChunks*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // batch metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode batch metadata and L2 transactions, + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into batch payload + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) + if err != nil { + return nil, err + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // batch metadata: chunki_size + if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + } + } + return batchBytes, nil +} + +// compressScrollBatchBytes compresses the given batch of bytes. +// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. +func compressScrollBatchBytes(batchBytes []byte) ([]byte, error) { + srcSize := C.uint64_t(len(batchBytes)) + outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes + outbuf := make([]byte, outbufSize) + + if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, + (*C.uchar)(unsafe.Pointer(&outbuf[0])), &outbufSize); err != nil { + return nil, fmt.Errorf("failed to compress scroll batch bytes: %s", C.GoString(err)) + } + + return outbuf[:int(outbufSize)], nil +} + +// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. +func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { + return codecv1.MakeBlobCanonical(blobBytes) +} + +// CalculatePaddedBlobSize calculates the required size on blob storage +// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. +func CalculatePaddedBlobSize(dataSize uint64) uint64 { + return codecv1.CalculatePaddedBlobSize(dataSize) +} diff --git a/encoding/codecv4/codecv4_test.go b/encoding/codecv4/codecv4_test.go new file mode 100644 index 0000000..7faf096 --- /dev/null +++ b/encoding/codecv4/codecv4_test.go @@ -0,0 +1,837 @@ +package codecv4 + +import ( + "encoding/hex" + "encoding/json" + "os" + "testing" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/da-codec/encoding/codecv0" +) + +func TestCodecV4BlockEncode(t *testing.T) { + block := &DABlock{} + encoded := hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + block, err := NewDABlock(trace2, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + block, err = NewDABlock(trace3, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + block, err = NewDABlock(trace4, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") + block, err = NewDABlock(trace5, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") + block, err = NewDABlock(trace6, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") + block, err = NewDABlock(trace7, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) + + // sanity check: v0 and v4 block encodings are identical + for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { + blockv0, err := codecv0.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv0 := hex.EncodeToString(blockv0.Encode()) + + blockv4, err := NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv4 := hex.EncodeToString(blockv4.Encode()) + + assert.Equal(t, encodedv0, encodedv4) + } +} + +func TestCodecV4ChunkEncode(t *testing.T) { + // chunk with a single empty block + block := DABlock{} + chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} + encoded := hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + // transactions are not part of the encoding + chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err := NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} + +func TestCodecV4ChunkHash(t *testing.T) { + // chunk with a single empty block + block := DABlock{} + chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} + hash, err := chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) + + // L1 transactions are part of the hash + chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // L2 transactions are not part of the hash + chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // numL1Messages are not part of the hash + chunk.Blocks[0].NumL1Messages = 1 + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // invalid hash + chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + _, err = chunk.Hash() + assert.Error(t, err) + + trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) +} + +func TestCodecV4BatchEncode(t *testing.T) { + // empty batch + batch := &DABatch{Version: uint8(encoding.CodecV4)} + encoded := hex.EncodeToString(batch.Encode()) + assert.Equal(t, "04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch, err := NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f2900000000000000000000000000000000000000000000000000000000000000000000000063807b2a26451ed31542ed15543973f8bc8c3b6382ba0cba5650a7faf14625377029203c1b6db22aa24613cb68dee10ca50bbbc88fc15b8a6abf9dcf3ad382a2642e480d", encoded) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "04000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df7105000000000000000000000000000000000000000000000000000000000000000000000000063807b2d30702c0ea39553a0601a9c6fc5b27c076ddfc1044001fb0a8ad1fd9016304a61233de2770e0fb9a5578e5f633846ef9fa4c2ab8b80b8f9a30f09be07cda8d725", encoded) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93000000000000000000000000000000000000000000000000000000000000000000000000646b6e1338122423f3cebb92645f9ac93c8ee50edb75ea93a951f278007e721a7b9f995824895b00195499dfe77d201cf3627050d866abb2685f87e10466c4fcaf3a8588", encoded) + + trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) + + trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) + + trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") + chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "04000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) + + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d80113ba3d5c53a035f4b4ec6f8a2ba9ab521bccab9f90e3a713ab5fffc0adec57000000000000000000000000000000000000000000000000000000000000000000000000646b6ed012e49b70b64652e5cab5dfdd1f58958d863de1d7fcb959e09f147a98b0b895171560f81b17ec3a2fe1c8ed2d308ca5bf002d7e3c18db9682a8d0f5379bf213aa", encoded) + + chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} + chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26000000000000000000000000000000000000000000000000000000000000000000000000646b6ed046aedf214a661b6b37b9c325fef4484ff3613a6fb52719609bf02a66bc7ba23b6e9b7bcbe3be0ba95654f16f715bf7e39ef87a84199340423f6487cf56058085", encoded) +} + +func TestCodecV4BatchHash(t *testing.T) { + // empty batch + batch := &DABatch{Version: uint8(encoding.CodecV4)} + assert.Equal(t, "0xdaf0827d02b32d41458aea0d5796dd0072d0a016f9834a2cb1a964d2c6ee135c", batch.Hash().Hex()) + + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch, err := NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0x53d6da35c9b6f0413b6ebb80f4a8c19b0e3279481ddf602398a54d3b4e5d4f2c", batch.Hash().Hex()) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0x08feefdb19215bb0f51f85a3b02a0954ac7da67681e274db49b9102f4c6e0857", batch.Hash().Hex()) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0xc56c5e51993342232193d1d93124bae30a5b1444eebf49b2dd5f2c5962d4d54d", batch.Hash().Hex()) + + trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, "0x2c32177c8b4c6289d977361c7fd0f1a6ea15add64da2eb8caf0420ac9b35231e", batch.Hash().Hex()) + + trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, "0x909bebbebdbf5ba9c85c6894e839c0b044d2878c457c4942887e3d64469ad342", batch.Hash().Hex()) + + trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") + chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, "0x53765a37bbd72655df586b530d79cb4ad0fb814d72ddc95e01e0ede579f45117", batch.Hash().Hex()) + + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0x74ccf9cc265f423cc6e6e53ed294000637a832cdc93c76485855289bebb6764a", batch.Hash().Hex()) + + chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} + chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0x8d5ee00a80d7dbdc083d0cdedd35c2cb722e5944f9d88f7450c9186f3ef3da44", batch.Hash().Hex()) +} + +func TestCodecV4ChunkAndBatchCommitGasEstimation(t *testing.T) { + block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} + chunk2Gas := EstimateChunkL1CommitGas(chunk2) + assert.Equal(t, uint64(51124), chunk2Gas) + batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch2Gas := EstimateBatchL1CommitGas(batch2) + assert.Equal(t, uint64(207649), batch2Gas) + + block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} + chunk3Gas := EstimateChunkL1CommitGas(chunk3) + assert.Equal(t, uint64(51124), chunk3Gas) + batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch3Gas := EstimateBatchL1CommitGas(batch3) + assert.Equal(t, uint64(207649), batch3Gas) + + block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} + chunk4Gas := EstimateChunkL1CommitGas(chunk4) + assert.Equal(t, uint64(53745), chunk4Gas) + batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch4Gas := EstimateBatchL1CommitGas(batch4) + assert.Equal(t, uint64(210302), batch4Gas) + + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} + chunk5Gas := EstimateChunkL1CommitGas(chunk5) + assert.Equal(t, uint64(52202), chunk5Gas) + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} + chunk6Gas := EstimateChunkL1CommitGas(chunk6) + assert.Equal(t, uint64(53745), chunk6Gas) + batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} + batch5Gas := EstimateBatchL1CommitGas(batch5) + assert.Equal(t, uint64(213087), batch5Gas) +} + +func repeat(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) +} + +func TestCodecV4BatchStandardTestCases(t *testing.T) { + // Taking into consideration compression, we allow up to 5x of max blob bytes. + // We then ignore the metadata rows for 45 chunks. + maxChunks := 45 + nRowsData := 5*126976 - (maxChunks*4 + 2) + + for _, tc := range []struct { + chunks [][]string + expectedz string + expectedy string + expectedBlobVersionedHash string + expectedBatchHash string + }{ + // single empty chunk + {chunks: [][]string{{}}, expectedz: "1517a7f04a9f2517aaad8440792de202bd1fef70a861e12134c882ccf0c5a537", expectedy: "1ff0c5ea938308566ab022bc30d0136792084dc9adca93612ec925411915d4a9", expectedBlobVersionedHash: "015f16731c3e7864a08edae95f11db8c96e39a487427d7e58b691745d87f8a21", expectedBatchHash: "c3cfeead404a6de1ec5feaa29b6c1c1a5e6a40671c5d5e9cf1dd86fdf5a2e44a"}, + // single non-empty chunk + {chunks: [][]string{{"0x010203"}}, expectedz: "2cbd5fb174611060e72a2afcc385cea273b0f5ea8656f04f3661d757a6b00ff9", expectedy: "68d653e973d32fc5b79763d1b7de1699f37e2527830331b1a02f39d58d7070a9", expectedBlobVersionedHash: "019de38b4472451c5e8891dbb01bc2e834d660198cb9878e6b94fb55e4aaf92b", expectedBatchHash: "41e1c4a5220feb7fed5ba9e3980d138b8d5b4b06b8a46a87d796dbf5ed9265f5"}, + // multiple empty chunks + {chunks: [][]string{{}, {}}, expectedz: "0f9270fd0f21c1eef46334614c586759a2fb71ae46fef50560e92ef7ec926ccc", expectedy: "028f18fc74210d214d3e78a5f92f5c68a9d4dcc633e6e7ffb4144651a39b9dce", expectedBlobVersionedHash: "014a46e5be597971d313e300a052dc406b9f06fad394e1ba115df7da9ca5746d", expectedBatchHash: "94cac32609ae6c3d99dacf5af3650a7748b4dcf8c9779353b932a75e85bc2632"}, + // multiple non-empty chunks + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "3a199bd64627e67c320add8a5932870535c667236eda365c989f0b73176bb000", expectedy: "221d60db4912e9067df77ee3d71587ea1023ec0238c23044a3325f909fd5ceb3", expectedBlobVersionedHash: "0145df6dbf8070bb3137156fe4540c11330e84487fcac24239442859d95e925c", expectedBatchHash: "d2332749a82a3b94766493ee3826074b8af74efc98367d14fd82e1056e2abf88"}, + // empty chunk followed by non-empty chunk + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "0a421d448784eb111c2ae9a8031a7cf79e4638b300c48d0c7ff38322e25268fc", expectedy: "48ad5516b1370ac6be17a1d3220e286c9522366ec36fc66a584bbe1ee904eaf1", expectedBlobVersionedHash: "019e5c4c0bfa68324657a0d2e49075eeee2e7c928811bc9c8b2c03888d9d3a5d", expectedBatchHash: "5eac258323d1a4d166d2d116b330262440f46f1ecf07b247cc792bca4a905761"}, + // non-empty chunk followed by empty chunk + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "6aa26c5d595fa1b72c4e1aa4f06b35788060a7504137c7dd6896486819445230", expectedy: "72c082827841ab84576b49cd63bd06af07cb090626ea3e91a8e77de29b3e61dc", expectedBlobVersionedHash: "0166c93797bf7d4e5701d36bfc8bcea5270c1c4ff18d1aaa248125c87746cf3d", expectedBatchHash: "03e0bdf053fa21d37bf55ac27e7774298b95465123c353e30761e51965269a10"}, + // max number of chunks all empty + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4a04cb1860de2c0d03a78520da62a447ef2af92e36dc0b1806db501d7cf63469", expectedy: "17ca30439aed3d9a96f4336d2a416da04a0803667922c7b0765557bb0162493f", expectedBlobVersionedHash: "014b8172c9e2ef89ac8d2ff0c9991baafff3602459250f5870721ac4f05dca09", expectedBatchHash: "216add0492703b12b841ebf6d217a41d1907dd4acd54d07a870472d31d4fde0d"}, + // max number of chunks all non-empty + {chunks: [][]string{ + {"0x0a"}, + {"0x0a0b"}, + {"0x0a0b0c"}, + {"0x0a0b0c0d"}, + {"0x0a0b0c0d0e"}, + {"0x0a0b0c0d0e0f"}, + {"0x0a0b0c0d0e0f10"}, + {"0x0a0b0c0d0e0f1011"}, + {"0x0a0b0c0d0e0f101112"}, + {"0x0a0b0c0d0e0f10111213"}, + {"0x0a0b0c0d0e0f1011121314"}, + {"0x0a0b0c0d0e0f101112131415"}, + {"0x0a0b0c0d0e0f10111213141516"}, + {"0x0a0b0c0d0e0f1011121314151617"}, + {"0x0a0b0c0d0e0f101112131415161718"}, + {"0x0a0b0c0d0e0f10111213141516171819"}, + {"0x0a0b0c0d0e0f101112131415161718191a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, + }, expectedz: "53eafb50809b3473cb4f8764f7e5d598af9eaaddc45a5a6da7cddac3380e39bb", expectedy: "40751ed98861f5c2058b4062b275f94a3d505a3221f6abe8dbe1074a4f10d0f4", expectedBlobVersionedHash: "01b78b07dbe03b960cd73ea45088b231a50ce88408fa938765e971c5dc7bbb6b", expectedBatchHash: "257175785213c68b10bb94396b657892fb7ae70708bf98ce357752906a80a6f0"}, + // single chunk blob full + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "37ca5366d9f5ddd9471f074f8019050ea6a13097368e84f298ffa1bd806ad851", expectedy: "5aa602da97cc438a039431c799b5f97467bcd45e693273dd1215f201b19fa5bd", expectedBlobVersionedHash: "01e531e7351a271839b2ae6ddec58818efd5f426fd6a7c0bc5c33c9171ed74bf", expectedBatchHash: "d3809d6b2fd10a62c6c58f9e7c32772f4ac062a78d363f46cd3ee301e87dbad2"}, + // multiple chunks blob full + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "250fc907e7ba3b5affb90a624566e337b02dd89a265677571cc0d1c51b60af19", expectedy: "1b2898bb001d962717159f49b015ae7228b21e9a590f836be0d79a0870c7d82b", expectedBlobVersionedHash: "01f3c431a72bbfd43c42dbd638d7f6d109be2b9449b96386b214f92b9e28ccc4", expectedBatchHash: "a51631991f6210b13e9c8ac9260704cca29fdc08adcfbd210053dc77c956e82f"}, + // max number of chunks only last one non-empty not full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "6ba09c6123b374f1828ce5b3e52c69ac7e2251f1a573ba4d51e71b386eef9c38", expectedy: "3104f9e81ecf4ade3281cc8ea68c4f451341388e2a2c84be4b5e5ed938b6bb26", expectedBlobVersionedHash: "017813036e3c57d5259d5b1d89ca0fe253e43d740f5ee287eabc916b3486f15d", expectedBatchHash: "ebfaf617cc91d9147b00968263993f70e0efc57c1189877092a87ea60b55a2d7"}, + // max number of chunks only last one non-empty full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "295f6ba39b866f6635a1e11ffe16badf42174ba120bdcb973806620370f665fc", expectedy: "553772861d517aefd58332d87d75a388523b40dbd69c1d73b7d78fd18d895513", expectedBlobVersionedHash: "013a5cb4a098dfa068b82acea202eac5c7b1ec8f16c7cb37b2a9629e7359a4b1", expectedBatchHash: "b4c58eb1be9b2b21f6a43b4170ee92d6ee0af46e20848fff508a07d40b2bac29"}, + // max number of chunks but last is empty + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "4affa105e7c5d72a3223482b237296fead99e6d716b97bab0cb3447f93309692", expectedy: "4a850a8c7b84d568d8505121c92ebf284e88aa7a881290cf3939d52040871e56", expectedBlobVersionedHash: "01d3ce566fbdbcab307095bdc05de7bc2905d25f3dd4453b0f7d5f7ba8da9f08", expectedBatchHash: "ac29c2e8c26749cf99fca994cde6d33147e9e9aa60f162c964720b4937cae8fb"}, + } { + chunks := []*encoding.Chunk{} + + for _, c := range tc.chunks { + block := &encoding.Block{Transactions: []*types.TransactionData{}} + + for _, data := range c { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + + chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} + chunks = append(chunks, chunk) + } + + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* enble encode */, true /* use mock */) + require.NoError(t, err) + actualZ := hex.EncodeToString(z[:]) + assert.Equal(t, tc.expectedz, actualZ) + assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + + _, y, err := kzg4844.ComputeProof(blob, *z) + require.NoError(t, err) + actualY := hex.EncodeToString(y[:]) + assert.Equal(t, tc.expectedy, actualY) + + // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) + dataBytes := make([]byte, 32*len(chunks)) + for i := range chunks { + copy(dataBytes[32*i:32*i+32], []byte{255 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + } + dataHash := crypto.Keccak256Hash(dataBytes) + + batch := DABatch{ + Version: uint8(encoding.CodecV4), + BatchIndex: 6789, + L1MessagePopped: 101, + TotalL1MessagePopped: 10101, + DataHash: dataHash, + BlobVersionedHash: blobVersionedHash, + ParentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + LastBlockTimestamp: 192837, + blob: blob, + z: z, + } + + batch.BlobDataProof, err = batch.blobDataProofForPICircuit() + require.NoError(t, err) + + assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) + } +} + +func TestCodecV4BatchL1MessagePopped(t *testing.T) { + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch, err := NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 0, int(batch.L1MessagePopped)) + assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 0, int(batch.L1MessagePopped)) + assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 + assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) + + trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 + assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) + + originalBatch.TotalL1MessagePoppedBefore = 37 + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 + assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) + + trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 + assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) + + trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") + chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 + assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) + + originalBatch.TotalL1MessagePoppedBefore = 1 + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 + assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) + + chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 + chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 42, int(batch.L1MessagePopped)) + assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) + + originalBatch.TotalL1MessagePoppedBefore = 10 + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 32, int(batch.L1MessagePopped)) + assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) +} + +func TestCodecV4ChunkAndBatchBlobSizeEstimation(t *testing.T) { + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(412), chunk2BatchBytesSize) + assert.Equal(t, uint64(238), chunk2BlobSize) + batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(412), batch2BatchBytesSize) + assert.Equal(t, uint64(238), batch2BlobSize) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), chunk3BatchBytesSize) + assert.Equal(t, uint64(2934), chunk3BlobSize) + batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), batch3BatchBytesSize) + assert.Equal(t, uint64(2934), batch3BlobSize) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk4BatchBytesSize) + assert.Equal(t, uint64(55), chunk4BlobSize) + batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(214), blob4BatchBytesSize) + assert.Equal(t, uint64(55), batch4BlobSize) + + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(6093), chunk5BatchBytesSize) + assert.Equal(t, uint64(3150), chunk5BlobSize) + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk6BatchBytesSize) + assert.Equal(t, uint64(55), chunk6BlobSize) + batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(6125), batch5BatchBytesSize) + assert.Equal(t, uint64(3187), batch5BlobSize) +} + +func TestCodecV4ChunkAndBatchCalldataSizeEstimation(t *testing.T) { + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) + assert.Equal(t, uint64(60), chunk2CalldataSize) + batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) + assert.Equal(t, uint64(60), batch2CalldataSize) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) + assert.Equal(t, uint64(60), chunk3CalldataSize) + batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) + assert.Equal(t, uint64(60), batch3CalldataSize) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) + assert.Equal(t, uint64(60), chunk4CalldataSize) + batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4) + assert.Equal(t, uint64(60), batch4CalldataSize) + + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} + chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) + assert.Equal(t, uint64(120), chunk5CalldataSize) + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6) + assert.Equal(t, uint64(60), chunk6CalldataSize) + batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} + batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) + assert.Equal(t, uint64(180), batch5CalldataSize) +} + +func TestCodecV4DABatchJSONMarshalUnmarshal(t *testing.T) { + t.Run("Case 1", func(t *testing.T) { + jsonStr := `{ + "version": 4, + "batch_index": 293212, + "l1_message_popped": 7, + "total_l1_message_popped": 904750, + "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", + "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", + "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", + "last_block_timestamp": 1721130505, + "blob_data_proof": [ + "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", + "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" + ] + }` + + var batch DABatch + err := json.Unmarshal([]byte(jsonStr), &batch) + require.NoError(t, err) + + assert.Equal(t, uint8(4), batch.Version) + assert.Equal(t, uint64(293212), batch.BatchIndex) + assert.Equal(t, uint64(7), batch.L1MessagePopped) + assert.Equal(t, uint64(904750), batch.TotalL1MessagePopped) + assert.Equal(t, common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), batch.DataHash) + assert.Equal(t, common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), batch.BlobVersionedHash) + assert.Equal(t, common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), batch.ParentBatchHash) + assert.Equal(t, uint64(1721130505), batch.LastBlockTimestamp) + assert.Equal(t, common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), batch.BlobDataProof[0]) + assert.Equal(t, common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), batch.BlobDataProof[1]) + + batchHash := batch.Hash() + + expectedHash := common.HexToHash("0x64ba42153a4f642b2d8a37cf74a53067c37bba7389b85e7e07521f584e6b73d0") + assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") + + // Marshal and Unmarshal test + data, err := json.Marshal(&batch) + require.NoError(t, err) + + var decodedBatch DABatch + err = json.Unmarshal(data, &decodedBatch) + require.NoError(t, err) + + assert.Equal(t, batch, decodedBatch) + }) + + t.Run("Case 2", func(t *testing.T) { + jsonStr := `{ + "version": 5, + "batch_index": 123, + "l1_message_popped": 0, + "total_l1_message_popped": 0, + "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", + "last_block_timestamp": 1720174236, + "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", + "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", + "blob_data_proof": [ + "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", + "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" + ] + }` + + var batch DABatch + err := json.Unmarshal([]byte(jsonStr), &batch) + require.NoError(t, err) + + assert.Equal(t, uint8(5), batch.Version) + assert.Equal(t, uint64(123), batch.BatchIndex) + assert.Equal(t, uint64(0), batch.L1MessagePopped) + assert.Equal(t, uint64(0), batch.TotalL1MessagePopped) + assert.Equal(t, common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), batch.ParentBatchHash) + assert.Equal(t, uint64(1720174236), batch.LastBlockTimestamp) + assert.Equal(t, common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), batch.DataHash) + assert.Equal(t, common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), batch.BlobVersionedHash) + assert.Equal(t, common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), batch.BlobDataProof[0]) + assert.Equal(t, common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), batch.BlobDataProof[1]) + + batchHash := batch.Hash() + + expectedHash := common.HexToHash("0xd14f142dbc5c384e9920d5bf82c6bbf7c98030ffd7a3cace6c8a6e9639a285f9") + assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") + + // Marshal and Unmarshal test + data, err := json.Marshal(&batch) + require.NoError(t, err) + + var decodedBatch DABatch + err = json.Unmarshal(data, &decodedBatch) + require.NoError(t, err) + + assert.Equal(t, batch, decodedBatch) + }) + + t.Run("Case 3", func(t *testing.T) { + jsonStr := `{ + "version": 4, + "batch_index": 293205, + "l1_message_popped": 0, + "total_l1_message_popped": 904737, + "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", + "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", + "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", + "last_block_timestamp": 1721129563, + "blob_data_proof": [ + "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", + "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" + ] + }` + + var batch DABatch + err := json.Unmarshal([]byte(jsonStr), &batch) + require.NoError(t, err) + + assert.Equal(t, uint8(4), batch.Version) + assert.Equal(t, uint64(293205), batch.BatchIndex) + assert.Equal(t, uint64(0), batch.L1MessagePopped) + assert.Equal(t, uint64(904737), batch.TotalL1MessagePopped) + assert.Equal(t, common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), batch.ParentBatchHash) + assert.Equal(t, uint64(1721129563), batch.LastBlockTimestamp) + assert.Equal(t, common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), batch.DataHash) + assert.Equal(t, common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), batch.BlobVersionedHash) + assert.Equal(t, common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), batch.BlobDataProof[0]) + assert.Equal(t, common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), batch.BlobDataProof[1]) + + batchHash := batch.Hash() + + expectedHash := common.HexToHash("0x19638ca802926b93946fe281666205958838d46172587d150ca4c720ae244cd3") + assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") + + // Marshal and Unmarshal test + data, err := json.Marshal(&batch) + require.NoError(t, err) + + var decodedBatch DABatch + err = json.Unmarshal(data, &decodedBatch) + require.NoError(t, err) + + assert.Equal(t, batch, decodedBatch) + }) +} + +func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { + data, err := os.ReadFile(filename) + assert.NoError(t, err) + + block := &encoding.Block{} + assert.NoError(t, json.Unmarshal(data, block)) + return block +} diff --git a/encoding/da.go b/encoding/da.go index 35befdb..4e88635 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -2,12 +2,16 @@ package encoding import ( "fmt" + "math/big" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" ) +// BLSModulus is the BLS modulus defined in EIP-4844. +var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) + // CodecVersion defines the version of encoder and decoder. type CodecVersion uint8 @@ -23,6 +27,9 @@ const ( // CodecV3 represents the version 3 of the encoder and decoder. CodecV3 + + // CodecV4 represents the version 4 of the encoder and decoder. + CodecV4 ) // Block represents an L2 block. From e4bf12e26677cc5994677257d50033e980a16a5a Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sun, 18 Aug 2024 23:40:36 +0800 Subject: [PATCH 004/126] align naming --- encoding/codecv4/codecv4.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 9d16ef2..05bbff8 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -65,7 +65,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch, enableEncoding bool) (*DABatch, error) { +func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -92,7 +92,7 @@ func NewDABatch(batch *encoding.Batch, enableEncoding bool) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, enableEncoding, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, enableEncode, false /* no mock */) if err != nil { return nil, err } @@ -130,7 +130,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncoding bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -190,7 +190,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncoding bool, useMock copy(challengePreimage[0:], hash[:]) var blobBytes []byte - if enableEncoding { + if enableEncode { // blobBytes represents the compressed blob payload (batchBytes) var err error blobBytes, err = compressScrollBatchBytes(batchBytes) From 030349d59730d9624a5f7d7be0165740ed6ee3d9 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 19 Aug 2024 16:50:52 +0800 Subject: [PATCH 005/126] add ConvertBlobToBlobBytes utility functions --- encoding/codecv3/codecv3.go | 28 ++++++++++++++++++++++++++++ encoding/codecv4/codecv4.go | 29 +++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+) diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index bfe0d2a..d3e9570 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -231,6 +231,34 @@ func (b *DABatch) Blob() *kzg4844.Blob { return b.blob } +// ConvertBlobToBlobBytes converts the canonical blob representation into DA blob bytes. +func (b *DABatch) ConvertBlobToBlobBytes() ([]byte, error) { + var blobBytes [126976]byte + + for from := 0; from < len(b.blob); from += 32 { + copy(blobBytes[from/32*31:], b.blob[from+1:from+32]) + } + + metadataLength := 2 + MaxNumChunks*4 + numChunks := binary.BigEndian.Uint16(blobBytes[:2]) + + if numChunks > MaxNumChunks { + return nil, fmt.Errorf("number of chunks (%d) exceeds maximum allowed chunks (%d)", numChunks, MaxNumChunks) + } + + totalSize := metadataLength + for i := 0; i < int(numChunks); i++ { + chunkSize := binary.BigEndian.Uint32(blobBytes[2+4*i:]) + totalSize += int(chunkSize) + + if totalSize > len(blobBytes) { + return nil, fmt.Errorf("calculated total size (%d) exceeds the length of blobBytes (%d)", totalSize, len(blobBytes)) + } + } + + return blobBytes[:totalSize], nil +} + // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 05bbff8..3da14cf 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -353,6 +353,35 @@ func (b *DABatch) Blob() *kzg4844.Blob { return b.blob } +// ConvertBlobToBlobBytes converts the canonical blob representation into DA blob bytes. +func (b *DABatch) ConvertBlobToBlobBytes() ([]byte, error) { + var blobBytes [126976]byte + + for from := 0; from < len(b.blob); from += 32 { + copy(blobBytes[from/32*31:], b.blob[from+1:from+32]) + } + + startIndex := 1 // Skip the flag byte in codecv4 + metadataLength := startIndex + 2 + MaxNumChunks*4 + numChunks := binary.BigEndian.Uint16(blobBytes[startIndex : startIndex+2]) + + if numChunks > MaxNumChunks { + return nil, fmt.Errorf("number of chunks (%d) exceeds maximum allowed chunks (%d)", numChunks, MaxNumChunks) + } + + totalSize := metadataLength + for i := 0; i < int(numChunks); i++ { + chunkSize := binary.BigEndian.Uint32(blobBytes[startIndex+2+4*i:]) + totalSize += int(chunkSize) + + if totalSize > len(blobBytes) { + return nil, fmt.Errorf("calculated total size (%d) exceeds the length of blobBytes (%d)", totalSize, len(blobBytes)) + } + } + + return blobBytes[:totalSize], nil +} + // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableEncode bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) From ed4de9e92f76c5a6cfee00c59ff7783f4a1c5a9d Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 19 Aug 2024 17:08:15 +0800 Subject: [PATCH 006/126] kept blob bytes --- encoding/codecv2/codecv2.go | 18 +++++------ encoding/codecv2/codecv2_test.go | 2 +- encoding/codecv3/codecv3.go | 37 ++++++----------------- encoding/codecv3/codecv3_test.go | 2 +- encoding/codecv4/codecv4.go | 52 ++++++++++---------------------- encoding/codecv4/codecv4_test.go | 2 +- 6 files changed, 37 insertions(+), 76 deletions(-) diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index 3edf328..b5ed267 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -86,7 +86,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -116,7 +116,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -147,7 +147,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // encode L2 txs into blob payload rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } batchBytes = append(batchBytes, rlpTxData...) } @@ -178,7 +178,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // blobBytes represents the compressed blob payload (batchBytes) blobBytes, err := compressScrollBatchBytes(batchBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } // Only apply this check when the uncompressed batch data has exceeded 128 KiB. @@ -186,25 +186,25 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // Check compressed data compatibility. if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } } if len(blobBytes) > 126976 { log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") + return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") } // convert raw data to BLSFieldElements blob, err := MakeBlobCanonical(blobBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } // compute blob versioned hash c, err := kzg4844.BlobToCommitment(blob) if err != nil { - return nil, common.Hash{}, nil, errors.New("failed to create blob commitment") + return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") } blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) @@ -221,7 +221,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 start := 32 - len(pointBytes) copy(z[start:], pointBytes) - return blob, blobVersionedHash, &z, nil + return blob, blobVersionedHash, &z, blobBytes, nil } // MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. diff --git a/encoding/codecv2/codecv2_test.go b/encoding/codecv2/codecv2_test.go index 3db2fe4..c34f608 100644 --- a/encoding/codecv2/codecv2_test.go +++ b/encoding/codecv2/codecv2_test.go @@ -674,7 +674,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index d3e9570..5c82d10 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -40,6 +40,9 @@ type DABatch struct { // blob payload blob *kzg4844.Blob z *kzg4844.Point + + // for batch task + blobBytes []byte } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. @@ -80,7 +83,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -99,6 +102,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { LastBlockTimestamp: lastBlock.Header.Time, blob: blob, z: z, + blobBytes: blobBytes, } daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() @@ -118,7 +122,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { return codecv2.ConstructBlobPayload(chunks, useMockTxData) } @@ -231,32 +235,9 @@ func (b *DABatch) Blob() *kzg4844.Blob { return b.blob } -// ConvertBlobToBlobBytes converts the canonical blob representation into DA blob bytes. -func (b *DABatch) ConvertBlobToBlobBytes() ([]byte, error) { - var blobBytes [126976]byte - - for from := 0; from < len(b.blob); from += 32 { - copy(blobBytes[from/32*31:], b.blob[from+1:from+32]) - } - - metadataLength := 2 + MaxNumChunks*4 - numChunks := binary.BigEndian.Uint16(blobBytes[:2]) - - if numChunks > MaxNumChunks { - return nil, fmt.Errorf("number of chunks (%d) exceeds maximum allowed chunks (%d)", numChunks, MaxNumChunks) - } - - totalSize := metadataLength - for i := 0; i < int(numChunks); i++ { - chunkSize := binary.BigEndian.Uint32(blobBytes[2+4*i:]) - totalSize += int(chunkSize) - - if totalSize > len(blobBytes) { - return nil, fmt.Errorf("calculated total size (%d) exceeds the length of blobBytes (%d)", totalSize, len(blobBytes)) - } - } - - return blobBytes[:totalSize], nil +// BlobBytes returns the blob bytes of the batch. +func (b *DABatch) BlobBytes() []byte { + return b.blobBytes } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. diff --git a/encoding/codecv3/codecv3_test.go b/encoding/codecv3/codecv3_test.go index 0b22312..fef0c12 100644 --- a/encoding/codecv3/codecv3_test.go +++ b/encoding/codecv3/codecv3_test.go @@ -668,7 +668,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 3da14cf..8ab046a 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -52,6 +52,9 @@ type DABatch struct { // blob payload blob *kzg4844.Blob z *kzg4844.Point + + // for batch task + blobBytes []byte } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. @@ -92,7 +95,7 @@ func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, enableEncode, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, enableEncode, false /* no mock */) if err != nil { return nil, err } @@ -111,6 +114,7 @@ func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { LastBlockTimestamp: lastBlock.Header.Time, blob: blob, z: z, + blobBytes: blobBytes, } daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() @@ -130,7 +134,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -161,7 +165,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx // encode L2 txs into blob payload rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } batchBytes = append(batchBytes, rlpTxData...) } @@ -195,13 +199,13 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx var err error blobBytes, err = compressScrollBatchBytes(batchBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } if !useMockTxData { // Check compressed data compatibility. if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } } blobBytes = append([]byte{1}, blobBytes...) @@ -212,19 +216,19 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx if len(blobBytes) > 126976 { log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") + return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") } // convert raw data to BLSFieldElements blob, err := MakeBlobCanonical(blobBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } // compute blob versioned hash c, err := kzg4844.BlobToCommitment(blob) if err != nil { - return nil, common.Hash{}, nil, errors.New("failed to create blob commitment") + return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") } blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) @@ -241,7 +245,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx start := 32 - len(pointBytes) copy(z[start:], pointBytes) - return blob, blobVersionedHash, &z, nil + return blob, blobVersionedHash, &z, blobBytes, nil } // NewDABatchFromBytes decodes the given byte slice into a DABatch. @@ -353,33 +357,9 @@ func (b *DABatch) Blob() *kzg4844.Blob { return b.blob } -// ConvertBlobToBlobBytes converts the canonical blob representation into DA blob bytes. -func (b *DABatch) ConvertBlobToBlobBytes() ([]byte, error) { - var blobBytes [126976]byte - - for from := 0; from < len(b.blob); from += 32 { - copy(blobBytes[from/32*31:], b.blob[from+1:from+32]) - } - - startIndex := 1 // Skip the flag byte in codecv4 - metadataLength := startIndex + 2 + MaxNumChunks*4 - numChunks := binary.BigEndian.Uint16(blobBytes[startIndex : startIndex+2]) - - if numChunks > MaxNumChunks { - return nil, fmt.Errorf("number of chunks (%d) exceeds maximum allowed chunks (%d)", numChunks, MaxNumChunks) - } - - totalSize := metadataLength - for i := 0; i < int(numChunks); i++ { - chunkSize := binary.BigEndian.Uint32(blobBytes[startIndex+2+4*i:]) - totalSize += int(chunkSize) - - if totalSize > len(blobBytes) { - return nil, fmt.Errorf("calculated total size (%d) exceeds the length of blobBytes (%d)", totalSize, len(blobBytes)) - } - } - - return blobBytes[:totalSize], nil +// BlobBytes returns the blob bytes of the batch. +func (b *DABatch) BlobBytes() []byte { + return b.blobBytes } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. diff --git a/encoding/codecv4/codecv4_test.go b/encoding/codecv4/codecv4_test.go index 7faf096..a824c64 100644 --- a/encoding/codecv4/codecv4_test.go +++ b/encoding/codecv4/codecv4_test.go @@ -480,7 +480,7 @@ func TestCodecV4BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* enble encode */, true /* use mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* enble encode */, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) From c6af3bbe7068da2b356509f2cb9eaf6c8d514bdf Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 19 Aug 2024 18:09:36 +0800 Subject: [PATCH 007/126] rename enableEncode to enableCompress --- encoding/codecv4/codecv4.go | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 8ab046a..4402b6a 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -68,7 +68,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { +func NewDABatch(batch *encoding.Batch, enableCompress bool) (*DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -95,7 +95,7 @@ func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, enableEncode, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, enableCompress, false /* no mock */) if err != nil { return nil, err } @@ -134,7 +134,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -194,7 +194,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx copy(challengePreimage[0:], hash[:]) var blobBytes []byte - if enableEncode { + if enableCompress { // blobBytes represents the compressed blob payload (batchBytes) var err error blobBytes, err = compressScrollBatchBytes(batchBytes) @@ -363,13 +363,13 @@ func (b *DABatch) BlobBytes() []byte { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableEncode bool) (uint64, uint64, error) { +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) if err != nil { return 0, 0, err } var blobBytesLength uint64 - if enableEncode { + if enableCompress { blobBytes, err := compressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err @@ -382,13 +382,13 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableEncode b } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableEncode bool) (uint64, uint64, error) { +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload(b.Chunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 - if enableEncode { + if enableCompress { blobBytes, err := compressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err From a5691d4eee3baa866a9d217f8d390fc8bc2efa77 Mon Sep 17 00:00:00 2001 From: colin <102356659+colinlyguo@users.noreply.github.com> Date: Tue, 20 Aug 2024 16:45:50 +0800 Subject: [PATCH 008/126] refactor: move some common functions to encoding (#24) * refactor: move some common functions to encoding * fix golint --- encoding/codecv1/codecv1.go | 87 +----------- encoding/codecv2/codecv2.go | 105 ++------------- encoding/codecv3/codecv3.go | 8 +- encoding/codecv4/codecv4.go | 125 ++---------------- encoding/da.go | 120 +++++++++++++++++ .../libscroll_zstd_darwin_arm64.a | Bin .../libscroll_zstd_darwin_arm64.go | 2 +- .../libscroll_zstd_linux_amd64.a | Bin .../libscroll_zstd_linux_amd64.go | 2 +- .../libscroll_zstd_linux_arm64.a | Bin .../libscroll_zstd_linux_arm64.go | 2 +- encoding/zstd/zstd.go | 26 ++++ 12 files changed, 182 insertions(+), 295 deletions(-) rename encoding/{codecv2 => zstd}/libscroll_zstd_darwin_arm64.a (100%) rename encoding/{codecv2 => zstd}/libscroll_zstd_darwin_arm64.go (81%) rename encoding/{codecv2 => zstd}/libscroll_zstd_linux_amd64.a (100%) rename encoding/{codecv2 => zstd}/libscroll_zstd_linux_amd64.go (86%) rename encoding/{codecv2 => zstd}/libscroll_zstd_linux_arm64.a (100%) rename encoding/{codecv2 => zstd}/libscroll_zstd_linux_arm64.go (86%) create mode 100644 encoding/zstd/zstd.go diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1/codecv1.go index 205f257..4ed048b 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -8,9 +8,7 @@ import ( "fmt" "math/big" "strings" - "sync" - "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -260,7 +258,7 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 copy(challengePreimage[0:], hash[:]) // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, err } @@ -288,31 +286,6 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 return blob, blobVersionedHash, &z, nil } -// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. -func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { - // blob contains 131072 bytes but we can only utilize 31/32 of these - if len(blobBytes) > 126976 { - return nil, fmt.Errorf("oversized batch payload, blob bytes length: %v, max length: %v", len(blobBytes), 126976) - } - - // the canonical (padded) blob payload - var blob kzg4844.Blob - - // encode blob payload by prepending every 31 bytes with 1 zero byte - index := 0 - - for from := 0; from < len(blobBytes); from += 31 { - to := from + 31 - if to > len(blobBytes) { - to = len(blobBytes) - } - copy(blob[index+1:], blobBytes[from:to]) - index += 32 - } - - return &blob, nil -} - // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. func NewDABatchFromBytes(data []byte) (*DABatch, error) { @@ -379,7 +352,7 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() + blobDataProofArgs, err := encoding.GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -398,7 +371,7 @@ func EstimateChunkL1CommitBlobSize(c *encoding.Chunk) (uint64, error) { if err != nil { return 0, err } - return CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil + return encoding.CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil } // EstimateBatchL1CommitBlobSize estimates the total size of the L1 commit blob for a batch. @@ -412,7 +385,7 @@ func EstimateBatchL1CommitBlobSize(b *encoding.Batch) (uint64, error) { } batchDataSize += chunkDataSize } - return CalculatePaddedBlobSize(metadataSize + batchDataSize), nil + return encoding.CalculatePaddedBlobSize(metadataSize + batchDataSize), nil } func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { @@ -550,55 +523,3 @@ func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { } return totalL1CommitCalldataSize } - -// CalculatePaddedBlobSize calculates the required size on blob storage -// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. -func CalculatePaddedBlobSize(dataSize uint64) uint64 { - paddedSize := (dataSize / 31) * 32 - - if dataSize%31 != 0 { - paddedSize += 1 + dataSize%31 // Add 1 byte for the first empty byte plus the remainder bytes - } - - return paddedSize -} - -var ( - blobDataProofArgs *abi.Arguments - initBlobDataProofArgsOnce sync.Once -) - -// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. -func GetBlobDataProofArgs() (*abi.Arguments, error) { - var initError error - - initBlobDataProofArgsOnce.Do(func() { - // Initialize bytes32 type - bytes32Type, err := abi.NewType("bytes32", "bytes32", nil) - if err != nil { - initError = fmt.Errorf("failed to initialize abi type bytes32: %w", err) - return - } - - // Initialize bytes48 type - bytes48Type, err := abi.NewType("bytes48", "bytes48", nil) - if err != nil { - initError = fmt.Errorf("failed to initialize abi type bytes48: %w", err) - return - } - - // Successfully create the argument list - blobDataProofArgs = &abi.Arguments{ - {Type: bytes32Type, Name: "z"}, - {Type: bytes32Type, Name: "y"}, - {Type: bytes48Type, Name: "kzg_commitment"}, - {Type: bytes48Type, Name: "kzg_proof"}, - } - }) - - if initError != nil { - return nil, initError - } - - return blobDataProofArgs, nil -} diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index b5ed267..7588394 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -1,11 +1,5 @@ package codecv2 -/* -#include -char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); -*/ -import "C" - import ( "crypto/sha256" "encoding/binary" @@ -13,9 +7,7 @@ import ( "errors" "fmt" "math/big" - "unsafe" - "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -24,6 +16,7 @@ import ( "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/da-codec/encoding/codecv1" + "github.com/scroll-tech/da-codec/encoding/zstd" ) // MaxNumChunks is the maximum number of chunks that a batch can contain. @@ -176,7 +169,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 copy(challengePreimage[0:], hash[:]) // blobBytes represents the compressed blob payload (batchBytes) - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -196,7 +189,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 } // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -224,11 +217,6 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 return blob, blobVersionedHash, &z, blobBytes, nil } -// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. -func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { - return codecv1.MakeBlobCanonical(blobBytes) -} - // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. func NewDABatchFromBytes(data []byte) (*DABatch, error) { @@ -295,7 +283,7 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() + blobDataProofArgs, err := encoding.GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -309,38 +297,38 @@ func (b *DABatch) Blob() *kzg4844.Blob { // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) if err != nil { return 0, 0, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - batchBytes, err := constructBatchPayload(b.Chunks) + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) if err != nil { return 0, 0, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) if err != nil { return false, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return false, err } @@ -358,11 +346,11 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := constructBatchPayload(b.Chunks) + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) if err != nil { return false, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return false, err } @@ -401,68 +389,3 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { return codecv1.EstimateBatchL1CommitGas(b) } - -// constructBatchPayload constructs the batch payload. -// This function is only used in compressed batch payload length estimation. -func constructBatchPayload(chunks []*encoding.Chunk) ([]byte, error) { - // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 - - // batchBytes represents the raw (un-compressed and un-padded) blob payload - batchBytes := make([]byte, metadataLength) - - // batch metadata: num_chunks - binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) - - // encode batch metadata and L2 transactions, - for chunkID, chunk := range chunks { - currentChunkStartIndex := len(batchBytes) - - for _, block := range chunk.Blocks { - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { - continue - } - - // encode L2 txs into batch payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) - if err != nil { - return nil, err - } - batchBytes = append(batchBytes, rlpTxData...) - } - } - - // batch metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } - } - return batchBytes, nil -} - -// compressScrollBatchBytes compresses the given batch of bytes. -// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. -func compressScrollBatchBytes(batchBytes []byte) ([]byte, error) { - srcSize := C.uint64_t(len(batchBytes)) - outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes - outbuf := make([]byte, outbufSize) - - if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, - (*C.uchar)(unsafe.Pointer(&outbuf[0])), &outbufSize); err != nil { - return nil, fmt.Errorf("failed to compress scroll batch bytes: %s", C.GoString(err)) - } - - return outbuf[:int(outbufSize)], nil -} - -// CalculatePaddedBlobSize calculates the required size on blob storage -// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. -func CalculatePaddedBlobSize(dataSize uint64) uint64 { - return codecv1.CalculatePaddedBlobSize(dataSize) -} - -// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. -func GetBlobDataProofArgs() (*abi.Arguments, error) { - return codecv1.GetBlobDataProofArgs() -} diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index 5c82d10..0a85efa 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -6,7 +6,6 @@ import ( "errors" "fmt" - "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" @@ -223,7 +222,7 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() + blobDataProofArgs, err := encoding.GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -279,8 +278,3 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { return codecv2.EstimateBatchL1CommitGas(b) + 50000 // plus 50000 for the point-evaluation precompile call. } - -// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. -func GetBlobDataProofArgs() (*abi.Arguments, error) { - return codecv2.GetBlobDataProofArgs() -} diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 4402b6a..b07e2be 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -1,11 +1,5 @@ package codecv4 -/* -#include -char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); -*/ -import "C" - import ( "crypto/sha256" "encoding/binary" @@ -13,9 +7,7 @@ import ( "errors" "fmt" "math/big" - "unsafe" - "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -23,8 +15,8 @@ import ( "github.com/scroll-tech/go-ethereum/log" "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv1" "github.com/scroll-tech/da-codec/encoding/codecv3" + "github.com/scroll-tech/da-codec/encoding/zstd" ) // MaxNumChunks is the maximum number of chunks that a batch can contain. @@ -197,7 +189,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock if enableCompress { // blobBytes represents the compressed blob payload (batchBytes) var err error - blobBytes, err = compressScrollBatchBytes(batchBytes) + blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -210,7 +202,6 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock } blobBytes = append([]byte{1}, blobBytes...) } else { - blobBytes = batchBytes blobBytes = append([]byte{0}, batchBytes...) } @@ -220,7 +211,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock } // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -345,7 +336,7 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() + blobDataProofArgs, err := encoding.GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -364,13 +355,13 @@ func (b *DABatch) BlobBytes() []byte { // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress bool) (uint64, uint64, error) { - batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 if enableCompress { - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err } @@ -378,18 +369,18 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress bool) (uint64, uint64, error) { - batchBytes, err := constructBatchPayload(b.Chunks) + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 if enableCompress { - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err } @@ -397,16 +388,16 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) if err != nil { return false, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return false, err } @@ -419,11 +410,11 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := constructBatchPayload(b.Chunks) + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) if err != nil { return false, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return false, err } @@ -453,91 +444,3 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { return codecv3.EstimateBatchL1CommitGas(b) } - -// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. -func GetBlobDataProofArgs() (*abi.Arguments, error) { - return codecv3.GetBlobDataProofArgs() -} - -// checkBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func checkBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := constructBatchPayload(b.Chunks) - if err != nil { - return false, err - } - blobBytes, err := compressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil -} - -// constructBatchPayload constructs the batch payload. -// This function is only used in compressed batch payload length estimation. -func constructBatchPayload(chunks []*encoding.Chunk) ([]byte, error) { - // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 - - // batchBytes represents the raw (un-compressed and un-padded) blob payload - batchBytes := make([]byte, metadataLength) - - // batch metadata: num_chunks - binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) - - // encode batch metadata and L2 transactions, - for chunkID, chunk := range chunks { - currentChunkStartIndex := len(batchBytes) - - for _, block := range chunk.Blocks { - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { - continue - } - - // encode L2 txs into batch payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) - if err != nil { - return nil, err - } - batchBytes = append(batchBytes, rlpTxData...) - } - } - - // batch metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } - } - return batchBytes, nil -} - -// compressScrollBatchBytes compresses the given batch of bytes. -// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. -func compressScrollBatchBytes(batchBytes []byte) ([]byte, error) { - srcSize := C.uint64_t(len(batchBytes)) - outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes - outbuf := make([]byte, outbufSize) - - if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, - (*C.uchar)(unsafe.Pointer(&outbuf[0])), &outbufSize); err != nil { - return nil, fmt.Errorf("failed to compress scroll batch bytes: %s", C.GoString(err)) - } - - return outbuf[:int(outbufSize)], nil -} - -// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. -func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { - return codecv1.MakeBlobCanonical(blobBytes) -} - -// CalculatePaddedBlobSize calculates the required size on blob storage -// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. -func CalculatePaddedBlobSize(dataSize uint64) uint64 { - return codecv1.CalculatePaddedBlobSize(dataSize) -} diff --git a/encoding/da.go b/encoding/da.go index 4e88635..b085351 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -1,12 +1,16 @@ package encoding import ( + "encoding/binary" "fmt" "math/big" + "sync" + "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) // BLSModulus is the BLS modulus defined in EIP-4844. @@ -326,3 +330,119 @@ func CheckCompressedDataCompatibility(data []byte) error { return nil } + +// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. +func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { + // blob contains 131072 bytes but we can only utilize 31/32 of these + if len(blobBytes) > 126976 { + return nil, fmt.Errorf("oversized batch payload, blob bytes length: %v, max length: %v", len(blobBytes), 126976) + } + + // the canonical (padded) blob payload + var blob kzg4844.Blob + + // encode blob payload by prepending every 31 bytes with 1 zero byte + index := 0 + + for from := 0; from < len(blobBytes); from += 31 { + to := from + 31 + if to > len(blobBytes) { + to = len(blobBytes) + } + copy(blob[index+1:], blobBytes[from:to]) + index += 32 + } + + return &blob, nil +} + +var ( + blobDataProofArgs *abi.Arguments + initBlobDataProofArgsOnce sync.Once +) + +// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. +func GetBlobDataProofArgs() (*abi.Arguments, error) { + var initError error + + initBlobDataProofArgsOnce.Do(func() { + // Initialize bytes32 type + bytes32Type, err := abi.NewType("bytes32", "bytes32", nil) + if err != nil { + initError = fmt.Errorf("failed to initialize abi type bytes32: %w", err) + return + } + + // Initialize bytes48 type + bytes48Type, err := abi.NewType("bytes48", "bytes48", nil) + if err != nil { + initError = fmt.Errorf("failed to initialize abi type bytes48: %w", err) + return + } + + // Successfully create the argument list + blobDataProofArgs = &abi.Arguments{ + {Type: bytes32Type, Name: "z"}, + {Type: bytes32Type, Name: "y"}, + {Type: bytes48Type, Name: "kzg_commitment"}, + {Type: bytes48Type, Name: "kzg_proof"}, + } + }) + + if initError != nil { + return nil, initError + } + + return blobDataProofArgs, nil +} + +// CalculatePaddedBlobSize calculates the required size on blob storage +// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. +func CalculatePaddedBlobSize(dataSize uint64) uint64 { + paddedSize := (dataSize / 31) * 32 + + if dataSize%31 != 0 { + paddedSize += 1 + dataSize%31 // Add 1 byte for the first empty byte plus the remainder bytes + } + + return paddedSize +} + +// ConstructBatchPayloadInBlob constructs the batch payload. +// This function is only used in compressed batch payload length estimation. +func ConstructBatchPayloadInBlob(chunks []*Chunk, MaxNumChunks uint64) ([]byte, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + MaxNumChunks*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // batch metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode batch metadata and L2 transactions, + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into batch payload + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, false /* no mock */) + if err != nil { + return nil, err + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // batch metadata: chunki_size + if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + } + } + return batchBytes, nil +} diff --git a/encoding/codecv2/libscroll_zstd_darwin_arm64.a b/encoding/zstd/libscroll_zstd_darwin_arm64.a similarity index 100% rename from encoding/codecv2/libscroll_zstd_darwin_arm64.a rename to encoding/zstd/libscroll_zstd_darwin_arm64.a diff --git a/encoding/codecv2/libscroll_zstd_darwin_arm64.go b/encoding/zstd/libscroll_zstd_darwin_arm64.go similarity index 81% rename from encoding/codecv2/libscroll_zstd_darwin_arm64.go rename to encoding/zstd/libscroll_zstd_darwin_arm64.go index 8ace74c..d83ec17 100644 --- a/encoding/codecv2/libscroll_zstd_darwin_arm64.go +++ b/encoding/zstd/libscroll_zstd_darwin_arm64.go @@ -1,4 +1,4 @@ -package codecv2 +package zstd /* #cgo LDFLAGS: ${SRCDIR}/libscroll_zstd_darwin_arm64.a diff --git a/encoding/codecv2/libscroll_zstd_linux_amd64.a b/encoding/zstd/libscroll_zstd_linux_amd64.a similarity index 100% rename from encoding/codecv2/libscroll_zstd_linux_amd64.a rename to encoding/zstd/libscroll_zstd_linux_amd64.a diff --git a/encoding/codecv2/libscroll_zstd_linux_amd64.go b/encoding/zstd/libscroll_zstd_linux_amd64.go similarity index 86% rename from encoding/codecv2/libscroll_zstd_linux_amd64.go rename to encoding/zstd/libscroll_zstd_linux_amd64.go index 0b22575..f1a686e 100644 --- a/encoding/codecv2/libscroll_zstd_linux_amd64.go +++ b/encoding/zstd/libscroll_zstd_linux_amd64.go @@ -1,7 +1,7 @@ //go:build !musl // +build !musl -package codecv2 +package zstd /* #cgo LDFLAGS: ${SRCDIR}/libscroll_zstd_linux_amd64.a diff --git a/encoding/codecv2/libscroll_zstd_linux_arm64.a b/encoding/zstd/libscroll_zstd_linux_arm64.a similarity index 100% rename from encoding/codecv2/libscroll_zstd_linux_arm64.a rename to encoding/zstd/libscroll_zstd_linux_arm64.a diff --git a/encoding/codecv2/libscroll_zstd_linux_arm64.go b/encoding/zstd/libscroll_zstd_linux_arm64.go similarity index 86% rename from encoding/codecv2/libscroll_zstd_linux_arm64.go rename to encoding/zstd/libscroll_zstd_linux_arm64.go index ebf3943..f3775d2 100644 --- a/encoding/codecv2/libscroll_zstd_linux_arm64.go +++ b/encoding/zstd/libscroll_zstd_linux_arm64.go @@ -1,7 +1,7 @@ //go:build !musl // +build !musl -package codecv2 +package zstd /* #cgo LDFLAGS: ${SRCDIR}/libscroll_zstd_linux_arm64.a diff --git a/encoding/zstd/zstd.go b/encoding/zstd/zstd.go new file mode 100644 index 0000000..58eab2b --- /dev/null +++ b/encoding/zstd/zstd.go @@ -0,0 +1,26 @@ +package zstd + +/* +#include +char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); +*/ +import "C" +import ( + "fmt" + "unsafe" +) + +// CompressScrollBatchBytes compresses the given batch of bytes. +// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. +func CompressScrollBatchBytes(batchBytes []byte) ([]byte, error) { + srcSize := C.uint64_t(len(batchBytes)) + outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes + outbuf := make([]byte, outbufSize) + + if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, + (*C.uchar)(unsafe.Pointer(&outbuf[0])), &outbufSize); err != nil { + return nil, fmt.Errorf("failed to compress scroll batch bytes: %s", C.GoString(err)) + } + + return outbuf[:int(outbufSize)], nil +} From 9532963eb5c251a94a29bc1c35bcf8567b8fc57e Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 21:06:39 +0800 Subject: [PATCH 009/126] move symbol replace script to zstd folder --- .../add_scroll_prefix_in_zstd_related_symbols.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename encoding/{codecv2 => zstd}/add_scroll_prefix_in_zstd_related_symbols.sh (100%) diff --git a/encoding/codecv2/add_scroll_prefix_in_zstd_related_symbols.sh b/encoding/zstd/add_scroll_prefix_in_zstd_related_symbols.sh similarity index 100% rename from encoding/codecv2/add_scroll_prefix_in_zstd_related_symbols.sh rename to encoding/zstd/add_scroll_prefix_in_zstd_related_symbols.sh From 990bdb38b88ed3bf176182b269562c319a4e08f2 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 22:20:30 +0800 Subject: [PATCH 010/126] refactor: move some util functions to public package --- encoding/codecv0/codecv0.go | 53 +++++++++++++------------------------ encoding/codecv1/codecv1.go | 45 +++++++++++-------------------- encoding/da.go | 15 +++++++++++ 3 files changed, 49 insertions(+), 64 deletions(-) diff --git a/encoding/codecv0/codecv0.go b/encoding/codecv0/codecv0.go index f757a93..18b0b51 100644 --- a/encoding/codecv0/codecv0.go +++ b/encoding/codecv0/codecv0.go @@ -302,21 +302,6 @@ func (b *DABatch) Hash() common.Hash { return crypto.Keccak256Hash(bytes) } -// CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. -const CalldataNonZeroByteGas = 16 - -// GetKeccak256Gas calculates the gas cost for computing the keccak256 hash of a given size. -func GetKeccak256Gas(size uint64) uint64 { - return GetMemoryExpansionCost(size) + 30 + 6*((size+31)/32) -} - -// GetMemoryExpansionCost calculates the cost of memory expansion for a given memoryByteSize. -func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { - memorySizeWord := (memoryByteSize + 31) / 32 - memoryCost := (memorySizeWord*memorySizeWord)/512 + (3 * memorySizeWord) - return memoryCost -} - // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. func EstimateBlockL1CommitCalldataSize(b *encoding.Block) (uint64, error) { var size uint64 @@ -349,13 +334,13 @@ func EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { if err != nil { return 0, err } - total += CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero - total += CalldataNonZeroByteGas * 4 // 4 bytes payload length - total += GetKeccak256Gas(txPayloadLength) // l2 tx hash + total += encoding.CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero + total += encoding.CalldataNonZeroByteGas * 4 // 4 bytes payload length + total += encoding.GetKeccak256Gas(txPayloadLength) // l2 tx hash } // 60 bytes BlockContext calldata - total += CalldataNonZeroByteGas * 60 + total += encoding.CalldataNonZeroByteGas * 60 // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -400,11 +385,11 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * 60 // numBlocks of BlockContext in chunk + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += encoding.CalldataNonZeroByteGas * numBlocks * 60 // numBlocks of BlockContext in chunk - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil } @@ -413,22 +398,22 @@ func EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -442,14 +427,14 @@ func EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) totalL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas, nil diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1/codecv1.go index 4ed048b..1675c76 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -406,21 +406,6 @@ func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { return dataSize, nil } -// CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. -const CalldataNonZeroByteGas = 16 - -// GetKeccak256Gas calculates the gas cost for computing the keccak256 hash of a given size. -func GetKeccak256Gas(size uint64) uint64 { - return GetMemoryExpansionCost(size) + 30 + 6*((size+31)/32) -} - -// GetMemoryExpansionCost calculates the cost of memory expansion for a given memoryByteSize. -func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { - memorySizeWord := (memoryByteSize + 31) / 32 - memoryCost := (memorySizeWord*memorySizeWord)/512 + (3 * memorySizeWord) - return memoryCost -} - // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { var total uint64 @@ -433,7 +418,7 @@ func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { } // 60 bytes BlockContext calldata - total += CalldataNonZeroByteGas * 60 + total += encoding.CalldataNonZeroByteGas * 60 // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -467,10 +452,10 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas } @@ -479,22 +464,22 @@ func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -505,11 +490,11 @@ func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) totalL1CommitCalldataSize := EstimateChunkL1CommitCalldataSize(chunk) - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas diff --git a/encoding/da.go b/encoding/da.go index b085351..eb66b7c 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -16,6 +16,9 @@ import ( // BLSModulus is the BLS modulus defined in EIP-4844. var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) +// CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. +const CalldataNonZeroByteGas = 16 + // CodecVersion defines the version of encoder and decoder. type CodecVersion uint8 @@ -446,3 +449,15 @@ func ConstructBatchPayloadInBlob(chunks []*Chunk, MaxNumChunks uint64) ([]byte, } return batchBytes, nil } + +// GetKeccak256Gas calculates the gas cost for computing the keccak256 hash of a given size. +func GetKeccak256Gas(size uint64) uint64 { + return GetMemoryExpansionCost(size) + 30 + 6*((size+31)/32) +} + +// GetMemoryExpansionCost calculates the cost of memory expansion for a given memoryByteSize. +func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { + memorySizeWord := (memoryByteSize + 31) / 32 + memoryCost := (memorySizeWord*memorySizeWord)/512 + (3 * memorySizeWord) + return memoryCost +} From 6b868668c4b01175cd16e2b83ef07b968835f2b5 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 22:24:41 +0800 Subject: [PATCH 011/126] fix CI --- encoding/codecv0/codecv0.go | 10 +++++----- encoding/codecv1/codecv1.go | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/encoding/codecv0/codecv0.go b/encoding/codecv0/codecv0.go index 18b0b51..0eee94b 100644 --- a/encoding/codecv0/codecv0.go +++ b/encoding/codecv0/codecv0.go @@ -349,11 +349,11 @@ func EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1/codecv1.go index 1675c76..154bb26 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -427,11 +427,11 @@ func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total } From 3ad692a507444d6380d92baca214371ccd37d042 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 22:36:03 +0800 Subject: [PATCH 012/126] add interfaces of codec --- encoding/dacodec/dacodec.go | 48 +++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 encoding/dacodec/dacodec.go diff --git a/encoding/dacodec/dacodec.go b/encoding/dacodec/dacodec.go new file mode 100644 index 0000000..e210092 --- /dev/null +++ b/encoding/dacodec/dacodec.go @@ -0,0 +1,48 @@ +package dacodec + +import ( + "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +// DABlock represents a Data Availability Block. +type DABlock interface { + Encode() []byte + Decode([]byte) error +} + +// DAChunk groups consecutive DABlocks with their transactions. +type DAChunk interface { + Encode() []byte + Hash() (common.Hash, error) +} + +// DABatch contains metadata about a batch of DAChunks. +type DABatch interface { + Encode() []byte + Hash() common.Hash + BlobDataProofForPointEvaluation() ([]byte, error) + Blob() *kzg4844.Blob + BlobBytes() []byte +} + +// Codec represents the interface for encoding and decoding DA-related structures. +type Codec interface { + NewDABlock(*encoding.Block, uint64) (DABlock, error) + NewDAChunk(*encoding.Chunk, uint64) (DAChunk, error) + NewDABatch(*encoding.Batch) (DABatch, error) + NewDABatchFromBytes([]byte) (DABatch, error) + + ComputeBatchDataHash([]*encoding.Chunk, uint64) (common.Hash, error) + ConstructBlobPayload([]*encoding.Chunk, bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) + + EstimateChunkL1CommitBatchSizeAndBlobSize(*encoding.Chunk) (uint64, uint64, error) + EstimateBatchL1CommitBatchSizeAndBlobSize(*encoding.Batch) (uint64, uint64, error) + CheckChunkCompressedDataCompatibility(*encoding.Chunk) (bool, error) + CheckBatchCompressedDataCompatibility(*encoding.Batch) (bool, error) + EstimateChunkL1CommitCalldataSize(*encoding.Chunk) uint64 + EstimateChunkL1CommitGas(*encoding.Chunk) uint64 + EstimateBatchL1CommitGas(*encoding.Batch) uint64 + EstimateBatchL1CommitCalldataSize(*encoding.Batch) uint64 +} From a5c6430f1cfa1c62e72e87e844b5485a4f8e1680 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 23:00:46 +0800 Subject: [PATCH 013/126] add SetCompression --- encoding/dacodec/dacodec.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/encoding/dacodec/dacodec.go b/encoding/dacodec/dacodec.go index e210092..efafc44 100644 --- a/encoding/dacodec/dacodec.go +++ b/encoding/dacodec/dacodec.go @@ -45,4 +45,6 @@ type Codec interface { EstimateChunkL1CommitGas(*encoding.Chunk) uint64 EstimateBatchL1CommitGas(*encoding.Batch) uint64 EstimateBatchL1CommitCalldataSize(*encoding.Batch) uint64 + + SetCompression(enable bool) // only used for codecv4 } From 43f56e604d17c1a92c8ad7b52bbeb59d3b352e78 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 23:42:23 +0800 Subject: [PATCH 014/126] move interface to encoding --- encoding/dacodec/dacodec.go | 50 ------------------------------------- encoding/encoding.go | 49 ++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 50 deletions(-) delete mode 100644 encoding/dacodec/dacodec.go create mode 100644 encoding/encoding.go diff --git a/encoding/dacodec/dacodec.go b/encoding/dacodec/dacodec.go deleted file mode 100644 index efafc44..0000000 --- a/encoding/dacodec/dacodec.go +++ /dev/null @@ -1,50 +0,0 @@ -package dacodec - -import ( - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" -) - -// DABlock represents a Data Availability Block. -type DABlock interface { - Encode() []byte - Decode([]byte) error -} - -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk interface { - Encode() []byte - Hash() (common.Hash, error) -} - -// DABatch contains metadata about a batch of DAChunks. -type DABatch interface { - Encode() []byte - Hash() common.Hash - BlobDataProofForPointEvaluation() ([]byte, error) - Blob() *kzg4844.Blob - BlobBytes() []byte -} - -// Codec represents the interface for encoding and decoding DA-related structures. -type Codec interface { - NewDABlock(*encoding.Block, uint64) (DABlock, error) - NewDAChunk(*encoding.Chunk, uint64) (DAChunk, error) - NewDABatch(*encoding.Batch) (DABatch, error) - NewDABatchFromBytes([]byte) (DABatch, error) - - ComputeBatchDataHash([]*encoding.Chunk, uint64) (common.Hash, error) - ConstructBlobPayload([]*encoding.Chunk, bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) - - EstimateChunkL1CommitBatchSizeAndBlobSize(*encoding.Chunk) (uint64, uint64, error) - EstimateBatchL1CommitBatchSizeAndBlobSize(*encoding.Batch) (uint64, uint64, error) - CheckChunkCompressedDataCompatibility(*encoding.Chunk) (bool, error) - CheckBatchCompressedDataCompatibility(*encoding.Batch) (bool, error) - EstimateChunkL1CommitCalldataSize(*encoding.Chunk) uint64 - EstimateChunkL1CommitGas(*encoding.Chunk) uint64 - EstimateBatchL1CommitGas(*encoding.Batch) uint64 - EstimateBatchL1CommitCalldataSize(*encoding.Batch) uint64 - - SetCompression(enable bool) // only used for codecv4 -} diff --git a/encoding/encoding.go b/encoding/encoding.go new file mode 100644 index 0000000..db3b027 --- /dev/null +++ b/encoding/encoding.go @@ -0,0 +1,49 @@ +package encoding + +import ( + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +// DABlock represents a Data Availability Block. +type DABlock interface { + Encode() []byte + Decode([]byte) error +} + +// DAChunk groups consecutive DABlocks with their transactions. +type DAChunk interface { + Encode() []byte + Hash() (common.Hash, error) +} + +// DABatch contains metadata about a batch of DAChunks. +type DABatch interface { + Encode() []byte + Hash() common.Hash + BlobDataProofForPointEvaluation() ([]byte, error) + Blob() *kzg4844.Blob + BlobBytes() []byte +} + +// Codec represents the interface for encoding and decoding DA-related structures. +type Codec interface { + NewDABlock(*Block, uint64) (DABlock, error) + NewDAChunk(*Chunk, uint64) (DAChunk, error) + NewDABatch(*Batch) (DABatch, error) + NewDABatchFromBytes([]byte) (DABatch, error) + + ComputeBatchDataHash([]*Chunk, uint64) (common.Hash, error) + ConstructBlobPayload([]*Chunk, bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) + + EstimateChunkL1CommitBatchSizeAndBlobSize(*Chunk) (uint64, uint64, error) + EstimateBatchL1CommitBatchSizeAndBlobSize(*Batch) (uint64, uint64, error) + CheckChunkCompressedDataCompatibility(*Chunk) (bool, error) + CheckBatchCompressedDataCompatibility(*Batch) (bool, error) + EstimateChunkL1CommitCalldataSize(*Chunk) uint64 + EstimateChunkL1CommitGas(*Chunk) uint64 + EstimateBatchL1CommitGas(*Batch) uint64 + EstimateBatchL1CommitCalldataSize(*Batch) uint64 + + SetCompression(enable bool) // only used for codecv4 +} From cd280de54d4aeaad334693b73ef047da45646f10 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 03:30:44 +0800 Subject: [PATCH 015/126] refactor --- encoding/{codecv0 => }/codecv0.go | 183 +++-- encoding/codecv0/codecv0_test.go | 597 ---------------- encoding/{codecv1 => }/codecv1.go | 239 ++++--- encoding/codecv1/codecv1_test.go | 892 ----------------------- encoding/{codecv2 => }/codecv2.go | 137 ++-- encoding/codecv2/codecv2_test.go | 967 ------------------------- encoding/{codecv3 => }/codecv3.go | 120 ++-- encoding/codecv3/codecv3_test.go | 1098 ----------------------------- encoding/{codecv4 => }/codecv4.go | 155 ++-- encoding/codecv4/codecv4_test.go | 837 ---------------------- encoding/da.go | 28 +- encoding/encoding.go | 44 +- encoding/zstd/zstd.go | 1 + 13 files changed, 525 insertions(+), 4773 deletions(-) rename encoding/{codecv0 => }/codecv0.go (63%) delete mode 100644 encoding/codecv0/codecv0_test.go rename encoding/{codecv1 => }/codecv1.go (59%) delete mode 100644 encoding/codecv1/codecv1_test.go rename encoding/{codecv2 => }/codecv2.go (70%) delete mode 100644 encoding/codecv2/codecv2_test.go rename encoding/{codecv3 => }/codecv3.go (63%) delete mode 100644 encoding/codecv3/codecv3_test.go rename encoding/{codecv4 => }/codecv4.go (70%) delete mode 100644 encoding/codecv4/codecv4_test.go diff --git a/encoding/codecv0/codecv0.go b/encoding/codecv0.go similarity index 63% rename from encoding/codecv0/codecv0.go rename to encoding/codecv0.go index 0eee94b..9a92879 100644 --- a/encoding/codecv0/codecv0.go +++ b/encoding/codecv0.go @@ -1,4 +1,4 @@ -package codecv0 +package encoding import ( "encoding/binary" @@ -12,12 +12,13 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" - - "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) +type DACodecV0 struct{} + // DABlock represents a Data Availability Block. -type DABlock struct { +type DABlockV0 struct { BlockNumber uint64 Timestamp uint64 BaseFee *big.Int @@ -27,13 +28,13 @@ type DABlock struct { } // DAChunk groups consecutive DABlocks with their transactions. -type DAChunk struct { - Blocks []*DABlock +type DAChunkV0 struct { + Blocks []*DABlockV0 Transactions [][]*types.TransactionData } // DABatch contains metadata about a batch of DAChunks. -type DABatch struct { +type DABatchV0 struct { Version uint8 BatchIndex uint64 L1MessagePopped uint64 @@ -43,8 +44,8 @@ type DABatch struct { SkippedL1MessageBitmap []byte } -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -62,7 +63,7 @@ func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABl return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := DABlock{ + daBlock := DABlockV0{ BlockNumber: block.Header.Number.Uint64(), Timestamp: block.Header.Time, BaseFee: block.Header.BaseFee, @@ -75,7 +76,7 @@ func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABl } // Encode serializes the DABlock into a slice of bytes. -func (b *DABlock) Encode() []byte { +func (b *DABlockV0) Encode() []byte { bytes := make([]byte, 60) binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber) binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) @@ -89,7 +90,7 @@ func (b *DABlock) Encode() []byte { } // Decode populates the fields of a DABlock from a byte slice. -func (b *DABlock) Decode(bytes []byte) error { +func (b *DABlockV0) Decode(bytes []byte) error { if len(bytes) != 60 { return errors.New("block encoding is not 60 bytes long") } @@ -104,9 +105,9 @@ func (b *DABlock) Decode(bytes []byte) error { return nil } -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - var blocks []*DABlock +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + var blocks []*DABlockV0 var txs [][]*types.TransactionData if chunk == nil { @@ -122,16 +123,20 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } for _, block := range chunk.Blocks { - b, err := NewDABlock(block, totalL1MessagePoppedBefore) + b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) if err != nil { return nil, err } - blocks = append(blocks, b) + blockData, ok := b.(*DABlockV0) + if !ok { + return nil, errors.New("failed to cast block data") + } + blocks = append(blocks, blockData) totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) txs = append(txs, block.Transactions) } - daChunk := DAChunk{ + daChunk := DAChunkV0{ Blocks: blocks, Transactions: txs, } @@ -140,7 +145,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // Encode serializes the DAChunk into a slice of bytes. -func (c *DAChunk) Encode() ([]byte, error) { +func (c *DAChunkV0) Encode() ([]byte, error) { if len(c.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } @@ -165,7 +170,7 @@ func (c *DAChunk) Encode() ([]byte, error) { } var txLen [4]byte - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(txData, false /* no mock */) + rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) if err != nil { return nil, err } @@ -180,7 +185,7 @@ func (c *DAChunk) Encode() ([]byte, error) { } // Hash computes the hash of the DAChunk data. -func (c *DAChunk) Hash() (common.Hash, error) { +func (c *DAChunkV0) Hash() (common.Hash, error) { chunkBytes, err := c.Encode() if err != nil { return common.Hash{}, err @@ -222,15 +227,15 @@ func (c *DAChunk) Hash() (common.Hash, error) { return hash, nil } -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { +// NewDABatch creates a DABatch from the provided Batch. +func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // compute batch data hash var dataBytes []byte totalL1MessagePoppedBeforeChunk := batch.TotalL1MessagePoppedBefore for _, chunk := range batch.Chunks { // build data hash - daChunk, err := NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) if err != nil { return nil, err } @@ -246,13 +251,13 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { dataHash := crypto.Keccak256Hash(dataBytes) // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } - daBatch := DABatch{ - Version: uint8(encoding.CodecV0), + daBatch := DABatchV0{ + Version: uint8(CodecV0), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, TotalL1MessagePopped: totalL1MessagePoppedAfter, @@ -265,12 +270,12 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // NewDABatchFromBytes decodes the given byte slice into a DABatch. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { +func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 89 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 89 bytes but got %d", len(data)) } - b := &DABatch{ + b := &DABatchV0{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), @@ -284,7 +289,7 @@ func NewDABatchFromBytes(data []byte) (*DABatch, error) { } // Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { +func (b *DABatchV0) Encode() []byte { batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) batchBytes[0] = b.Version binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) @@ -297,20 +302,35 @@ func (b *DABatch) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { +func (b *DABatchV0) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } +// Blob returns the blob of the batch. +func (b *DABatchV0) Blob() *kzg4844.Blob { + return nil +} + +// BlobBytes returns the blob bytes of the batch. +func (b *DABatchV0) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { + return nil, nil +} + // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func EstimateBlockL1CommitCalldataSize(b *encoding.Block) (uint64, error) { +func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { var size uint64 for _, txData := range b.Transactions { if txData.Type == types.L1MessageTxType { continue } size += 4 // 4 bytes payload length - txPayloadLength, err := getTxPayloadLength(txData) + txPayloadLength, err := GetTxPayloadLength(txData) if err != nil { return 0, err } @@ -321,7 +341,7 @@ func EstimateBlockL1CommitCalldataSize(b *encoding.Block) (uint64, error) { } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { +func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -330,17 +350,17 @@ func EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { continue } - txPayloadLength, err := getTxPayloadLength(txData) + txPayloadLength, err := GetTxPayloadLength(txData) if err != nil { return 0, err } - total += encoding.CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero - total += encoding.CalldataNonZeroByteGas * 4 // 4 bytes payload length - total += encoding.GetKeccak256Gas(txPayloadLength) // l2 tx hash + total += CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero + total += CalldataNonZeroByteGas * 4 // 4 bytes payload length + total += GetKeccak256Gas(txPayloadLength) // l2 tx hash } // 60 bytes BlockContext calldata - total += encoding.CalldataNonZeroByteGas * 60 + total += CalldataNonZeroByteGas * 60 // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -349,20 +369,20 @@ func EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, block := range c.Blocks { - blockL1CommitCalldataSize, err := EstimateBlockL1CommitCalldataSize(block) + blockL1CommitCalldataSize, err := o.EstimateBlockL1CommitCalldataSize(block) if err != nil { return 0, err } @@ -372,12 +392,12 @@ func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalTxNum uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { totalTxNum += uint64(len(block.Transactions)) - blockL1CommitGas, err := EstimateBlockL1CommitGas(block) + blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) if err != nil { return 0, err } @@ -385,40 +405,40 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += encoding.CalldataNonZeroByteGas * numBlocks * 60 // numBlocks of BlockContext in chunk + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * 60 // numBlocks of BlockContext in chunk - totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash + totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore for _, chunk := range b.Chunks { - chunkL1CommitGas, err := EstimateChunkL1CommitGas(chunk) + chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) if err != nil { return 0, err } @@ -427,24 +447,24 @@ func EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - totalL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk) + totalL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas, nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { - chunkL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } @@ -453,10 +473,25 @@ func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { return totalL1CommitCalldataSize, nil } -func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(txData, false /* no mock */) - if err != nil { - return 0, err - } - return uint64(len(rlpTxData)), nil +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +func (o *DACodecV0) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + return true, nil +} + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +func (o *DACodecV0) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + return true, nil } + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +func (o *DACodecV0) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + return 0, 0, nil +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + return 0, 0, nil +} + +// SetCompression enables or disables compression. +func (o *DACodecV0) SetCompression(enable bool) {} diff --git a/encoding/codecv0/codecv0_test.go b/encoding/codecv0/codecv0_test.go deleted file mode 100644 index 330a826..0000000 --- a/encoding/codecv0/codecv0_test.go +++ /dev/null @@ -1,597 +0,0 @@ -package codecv0 - -import ( - "encoding/hex" - "encoding/json" - "math/big" - "os" - "testing" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/log" - "github.com/stretchr/testify/assert" - - "github.com/scroll-tech/da-codec/encoding" -) - -func TestCodecV0(t *testing.T) { - glogger := log.NewGlogHandler(log.StreamHandler(os.Stderr, log.LogfmtFormat())) - glogger.Verbosity(log.LvlInfo) - log.Root().SetHandler(glogger) - - parentDABatch, err := NewDABatch(&encoding.Batch{ - Index: 0, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: common.Hash{}, - Chunks: nil, - }) - assert.NoError(t, err) - parentBatchHash := parentDABatch.Hash() - - block1 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block2 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block3 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block4 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block5 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block6 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - - blockL1CommitCalldataSize, err := EstimateBlockL1CommitCalldataSize(block1) - assert.NoError(t, err) - assert.Equal(t, uint64(298), blockL1CommitCalldataSize) - blockL1CommitGas, err := EstimateBlockL1CommitGas(block1) - assert.NoError(t, err) - assert.Equal(t, uint64(4900), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block2) - assert.NoError(t, err) - assert.Equal(t, uint64(5745), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block2) - assert.NoError(t, err) - assert.Equal(t, uint64(93613), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block3) - assert.NoError(t, err) - assert.Equal(t, uint64(96), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block3) - assert.NoError(t, err) - assert.Equal(t, uint64(4187), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block4) - assert.NoError(t, err) - assert.Equal(t, uint64(60), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block4) - assert.NoError(t, err) - assert.Equal(t, uint64(14020), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block5) - assert.NoError(t, err) - assert.Equal(t, uint64(60), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block5) - assert.NoError(t, err) - assert.Equal(t, uint64(8796), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block6) - assert.NoError(t, err) - assert.Equal(t, uint64(60), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block6) - assert.NoError(t, err) - assert.Equal(t, uint64(6184), blockL1CommitGas) - - // Test case: when the batch and chunk contains one block. - chunk := &encoding.Chunk{ - Blocks: []*encoding.Block{block1}, - } - chunkL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(298), chunkL1CommitCalldataSize) - chunkL1CommitGas, err := EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(6042), chunkL1CommitGas) - - daChunk, err := NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err := daChunk.Encode() - assert.NoError(t, err) - chunkHexString := hex.EncodeToString(chunkBytes) - assert.Equal(t, 299, len(chunkBytes)) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e81840002000000000073f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8b00000073f87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1", chunkHexString) - daChunkHash, err := daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0xde642c68122634b33fa1e6e4243b17be3bfd0dc6f996f204ef6d7522516bd840"), daChunkHash) - - batch := &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err := EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(298), batchL1CommitCalldataSize) - batchL1CommitGas, err := EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(162591), batchL1CommitGas) - - daBatch, err := NewDABatch(batch) - assert.NoError(t, err) - batchBytes := daBatch.Encode() - batchHexString := hex.EncodeToString(batchBytes) - assert.Equal(t, 89, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000000000000000000000008fbc5eecfefc5bd9d1618ecef1fed160a7838448383595a2257d4c9bd5c5fa3eb0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab0", batchHexString) - assert.Equal(t, 0, len(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(0), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(0), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xa906c7d2b6b68ea5fec3ff9d60d41858676e0d365e5d5ef07b2ce20fcf24ecd7"), daBatch.Hash()) - - decodedDABatch, err := NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes := decodedDABatch.Encode() - decodedBatchHexString := hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: when the batch and chunk contains two block. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block1, block2}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(6043), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(100742), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 6044, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x014916a83eccdb0d01e814b4d4ab90eb9049ba9a3cb0994919b86ad873bcd028"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(6043), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(257897), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 89, len(batchBytes)) - assert.Equal(t, "0000000000000000010000000000000000000000000000000074dd561a36921590926bee01fd0d53747c5f3e48e48a2d5538b9ab0e1511cfd7b0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab0", batchHexString) - assert.Equal(t, 0, len(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(0), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(0), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xb02e39b740756824d20b2cac322ac365121411ced9d6e34de98a0b247c6e23e6"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: when the chunk contains one block with 1 L1MsgTx. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block3}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(96), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(5329), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - chunkHexString = hex.EncodeToString(chunkBytes) - assert.Equal(t, 97, len(chunkBytes)) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b00000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e1058080808080", chunkHexString) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x9e643c8a9203df542e39d9bfdcb07c99575b3c3d557791329fef9d83cc4147d0"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(96), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(161889), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000000b000000000000000b34f419ce7e882295bdb5aec6cce56ffa788a5fed4744d7fbd77e4acbf409f1cab0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab000000000000000000000000000000000000000000000000000000000000003ff", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap := "00000000000000000000000000000000000000000000000000000000000003ff" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(11), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(11), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xa18f07cb56ab4f2db5914d9b5699c5932bea4b5c73e71c8cec79151c11e9e986"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: batch contains multiple chunks, chunk contains multiple blocks. - chunk1 := &encoding.Chunk{ - Blocks: []*encoding.Block{block1, block2, block3}, - } - chunk1L1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk1) - assert.NoError(t, err) - assert.Equal(t, uint64(6139), chunk1L1CommitCalldataSize) - chunk1L1CommitGas, err := EstimateChunkL1CommitGas(chunk1) - assert.NoError(t, err) - assert.Equal(t, uint64(106025), chunk1L1CommitGas) - - daChunk1, err := NewDAChunk(chunk1, 0) - assert.NoError(t, err) - chunkBytes1, err := daChunk1.Encode() - assert.NoError(t, err) - assert.Equal(t, 6140, len(chunkBytes1)) - - chunk2 := &encoding.Chunk{ - Blocks: []*encoding.Block{block4}, - } - chunk2L1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunk2L1CommitCalldataSize) - chunk2L1CommitGas, err := EstimateChunkL1CommitGas(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(15189), chunk2L1CommitGas) - - daChunk2, err := NewDAChunk(chunk2, 0) - assert.NoError(t, err) - chunkBytes2, err := daChunk2.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes2)) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk1, chunk2}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(6199), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(279054), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000002a000000000000002a1f9b3d942a6ee14e7afc52225c91fa44faa0a7ec511df9a2d9348d33bcd142fcb0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab00000000000000000000000000000000000000000000000000000001ffffffbff", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "0000000000000000000000000000000000000000000000000000001ffffffbff" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(42), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(42), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xf7bd6afe02764e4e6df23a374d753182b57fa77be71aaf1cd8365e15a51872d1"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: many consecutive L1 Msgs in 1 bitmap, no leading skipped msgs. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block4}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(15189), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 37, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(60), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(171730), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "0000000000000000010000000000000005000000000000002ac62fb58ec2d5393e00960f1cc23cab883b685296efa03d13ea2dd4c6de79cc55b0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab00000000000000000000000000000000000000000000000000000000000000000", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "0000000000000000000000000000000000000000000000000000000000000000" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(42), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(5), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0x841f4657b7eb723cae35377cf2963b51191edad6a3b182d4c8524cb928d2a413"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: many consecutive L1 Msgs in 1 bitmap, with leading skipped msgs. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block4}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(15189), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(60), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(171810), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4b0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab00000000000000000000000000000000000000000000000000000001fffffffff", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "0000000000000000000000000000000000000000000000000000001fffffffff" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(42), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(42), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xa28766a3617cf244cc397fc4ce4c23022ec80f152b9f618807ac7e7c11486612"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: many sparse L1 Msgs in 1 bitmap. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block5}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(9947), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(60), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(166504), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4db0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab000000000000000000000000000000000000000000000000000000000000001dd", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "00000000000000000000000000000000000000000000000000000000000001dd" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(10), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(10), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0x2fee2073639eb9795007f7e765b3318f92658822de40b2134d34a478a0e9058a"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: many L1 Msgs in each of 2 bitmaps. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block6}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(7326), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(60), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(164388), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 153, len(batchBytes)) - assert.Equal(t, "00000000000000000100000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208b0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab0fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", batchHexString) - assert.Equal(t, 64, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(257), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(257), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0x84206bc6d0076a233fc7120a0bec4e03bf2512207437768828384dddb335ba2e"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) -} - -func TestErrorPaths(t *testing.T) { - // Test case: when the chunk is nil. - _, err := NewDAChunk(nil, 100) - assert.Error(t, err) - assert.Contains(t, err.Error(), "chunk is nil") - - // Test case: when the chunk contains no blocks. - chunk := &encoding.Chunk{ - Blocks: []*encoding.Block{}, - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "number of blocks is 0") - - // Test case: when the chunk contains more than 255 blocks. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{}, - } - for i := 0; i < 256; i++ { - chunk.Blocks = append(chunk.Blocks, &encoding.Block{}) - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "number of blocks exceeds 1 byte") - - // Test case: Header.Number is not a uint64. - block := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block.Header.Number = new(big.Int).Lsh(block.Header.Number, 64) - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block}, - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "block number is not uint64") - - // Test case: number of transactions exceeds max uint16. - block = readBlockFromJSON(t, "../testdata/blockTrace_02.json") - for i := 0; i < 65537; i++ { - block.Transactions = append(block.Transactions, block.Transactions[0]) - } - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block}, - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "number of transactions exceeds max uint16") - - // Test case: decode transaction with hex string without 0x prefix error. - block = readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block.Transactions = block.Transactions[:1] - block.Transactions[0].Data = "not-a-hex" - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block}, - } - _, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.Error(t, err) - assert.Contains(t, err.Error(), "hex string without 0x prefix") - _, err = EstimateChunkL1CommitGas(chunk) - assert.Error(t, err) - assert.Contains(t, err.Error(), "hex string without 0x prefix") - - // Test case: number of L1 messages exceeds max uint16. - block = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - for i := 0; i < 65535; i++ { - tx := &block.Transactions[i] - txCopy := *tx - txCopy.Nonce = uint64(i + 1) - block.Transactions = append(block.Transactions, txCopy) - } - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block}, - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "number of L1 messages exceeds max uint16") -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1.go similarity index 59% rename from encoding/codecv1/codecv1.go rename to encoding/codecv1.go index 154bb26..5a1d87d 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1.go @@ -1,4 +1,4 @@ -package codecv1 +package encoding import ( "crypto/sha256" @@ -13,22 +13,21 @@ import ( "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" ) -// MaxNumChunks is the maximum number of chunks that a batch can contain. -const MaxNumChunks = 15 +type DACodecV1 struct{} + +// Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. +const Codecv1MaxNumChunks = 15 -// DABlock represents a Data Availability Block. -type DABlock = codecv0.DABlock +// DABlockV1 represents a Data Availability Block. +type DABlockV1 = DABlockV0 -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk codecv0.DAChunk +// DAChunkV1 groups consecutive DABlocks with their transactions. +type DAChunkV1 DAChunkV0 -// DABatch contains metadata about a batch of DAChunks. -type DABatch struct { +// DABatchV1 contains metadata about a batch of DAChunks. +type DABatchV1 struct { // header Version uint8 BatchIndex uint64 @@ -44,13 +43,13 @@ type DABatch struct { z *kzg4844.Point } -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - return codecv0.NewDABlock(block, totalL1MessagePoppedBefore) +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { + return (&DACodecV0{}).NewDABlock(block, totalL1MessagePoppedBefore) } -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { if len(chunk.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } @@ -59,20 +58,24 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []*DABlock + var blocks []*DABlockV1 var txs [][]*types.TransactionData for _, block := range chunk.Blocks { - b, err := NewDABlock(block, totalL1MessagePoppedBefore) + b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) if err != nil { return nil, err } - blocks = append(blocks, b) + blockData, ok := b.(*DABlockV1) + if !ok { + return nil, errors.New("failed to cast block data") + } + blocks = append(blocks, blockData) totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) txs = append(txs, block.Transactions) } - daChunk := DAChunk{ + daChunk := DAChunkV1{ Blocks: blocks, Transactions: txs, } @@ -81,7 +84,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // Encode serializes the DAChunk into a slice of bytes. -func (c *DAChunk) Encode() []byte { +func (c *DAChunkV1) Encode() ([]byte, error) { var chunkBytes []byte chunkBytes = append(chunkBytes, byte(len(c.Blocks))) @@ -90,11 +93,11 @@ func (c *DAChunk) Encode() []byte { chunkBytes = append(chunkBytes, blockBytes...) } - return chunkBytes + return chunkBytes, nil } // Hash computes the hash of the DAChunk data. -func (c *DAChunk) Hash() (common.Hash, error) { +func (c *DAChunkV1) Hash() (common.Hash, error) { var dataBytes []byte // concatenate block contexts @@ -127,10 +130,10 @@ func (c *DAChunk) Hash() (common.Hash, error) { return hash, nil } -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { +// NewDABatch creates a DABatch from the provided Batch. +func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > MaxNumChunks { + if len(batch.Chunks) > Codecv1MaxNumChunks { return nil, errors.New("too many chunks in batch") } @@ -139,25 +142,25 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // batch data hash - dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // blob payload - blob, blobVersionedHash, z, err := constructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } - daBatch := DABatch{ - Version: uint8(encoding.CodecV1), + daBatch := DABatchV1{ + Version: uint8(CodecV1), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, TotalL1MessagePopped: totalL1MessagePoppedAfter, @@ -176,12 +179,12 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { +func (o *DACodecV1) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore for _, chunk := range chunks { - daChunk, err := NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) if err != nil { return common.Hash{}, err } @@ -198,16 +201,16 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // constructBlobPayload constructs the 4844 blob payload. -func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 + metadataLength := 2 + Codecv1MaxNumChunks*4 // the raw (un-padded) blob payload blobBytes := make([]byte, metadataLength) // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+MaxNumChunks+1)*32) + challengePreimage := make([]byte, (1+Codecv1MaxNumChunks+1)*32) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -227,7 +230,7 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 } // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, err } @@ -245,10 +248,10 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than MaxNumChunks chunks, the rest + // if we have fewer than Codecv1MaxNumChunks chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ { + for chunkID := len(chunks); chunkID < Codecv1MaxNumChunks; chunkID++ { // use the last chunk's data hash as padding copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } @@ -258,7 +261,7 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 copy(challengePreimage[0:], hash[:]) // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) + blob, err := MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, err } @@ -271,11 +274,11 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+Codecv1MaxNumChunks)*32:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -288,12 +291,12 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { +func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 121 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } - b := &DABatch{ + b := &DABatchV1{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), @@ -308,7 +311,7 @@ func NewDABatchFromBytes(data []byte) (*DABatch, error) { } // Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { +func (b *DABatchV1) Encode() []byte { batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) batchBytes[0] = b.Version binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) @@ -322,13 +325,13 @@ func (b *DABatch) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { +func (b *DABatchV1) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // BlobDataProof computes the abi-encoded blob verification data. -func (b *DABatch) BlobDataProof() ([]byte, error) { +func (b *DABatchV1) BlobDataProof() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProof with empty blob") } @@ -352,7 +355,7 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := encoding.GetBlobDataProofArgs() + blobDataProofArgs, err := GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -360,35 +363,45 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *DABatch) Blob() *kzg4844.Blob { +func (b *DABatchV1) Blob() *kzg4844.Blob { return b.blob } +// BlobBytes returns the blob bytes of the batch. +func (b *DABatchV1) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { + return nil, nil +} + // EstimateChunkL1CommitBlobSize estimates the size of the L1 commit blob for a single chunk. -func EstimateChunkL1CommitBlobSize(c *encoding.Chunk) (uint64, error) { - metadataSize := uint64(2 + 4*MaxNumChunks) // over-estimate: adding metadata length - chunkDataSize, err := chunkL1CommitBlobDataSize(c) +func (o *DACodecV1) EstimateChunkL1CommitBlobSize(c *Chunk) (uint64, error) { + metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) // over-estimate: adding metadata length + chunkDataSize, err := o.chunkL1CommitBlobDataSize(c) if err != nil { return 0, err } - return encoding.CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil + return CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil } // EstimateBatchL1CommitBlobSize estimates the total size of the L1 commit blob for a batch. -func EstimateBatchL1CommitBlobSize(b *encoding.Batch) (uint64, error) { - metadataSize := uint64(2 + 4*MaxNumChunks) +func (o *DACodecV1) EstimateBatchL1CommitBlobSize(b *Batch) (uint64, error) { + metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) var batchDataSize uint64 for _, c := range b.Chunks { - chunkDataSize, err := chunkL1CommitBlobDataSize(c) + chunkDataSize, err := o.chunkL1CommitBlobDataSize(c) if err != nil { return 0, err } batchDataSize += chunkDataSize } - return encoding.CalculatePaddedBlobSize(metadataSize + batchDataSize), nil + return CalculatePaddedBlobSize(metadataSize + batchDataSize), nil } -func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { var dataSize uint64 for _, block := range c.Blocks { for _, tx := range block.Transactions { @@ -396,7 +409,7 @@ func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { continue } - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, false /* no mock */) if err != nil { return 0, err } @@ -407,7 +420,7 @@ func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { +func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -418,7 +431,7 @@ func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { } // 60 bytes BlockContext calldata - total += encoding.CalldataNonZeroByteGas * 60 + total += CalldataNonZeroByteGas * 60 // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -427,84 +440,122 @@ func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl - return total + return total, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { - return uint64(60 * len(c.Blocks)) +func (o *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return uint64(60 * len(c.Blocks)), nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { +func (o *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() - blockL1CommitGas := EstimateBlockL1CommitGas(block) + blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + if err != nil { + return 0, err + } totalL1CommitGas += blockL1CommitGas } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash - return totalL1CommitGas + totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { +func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore for _, chunk := range b.Chunks { - chunkL1CommitGas := EstimateChunkL1CommitGas(chunk) + chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + if err != nil { + return 0, err + } totalL1CommitGas += chunkL1CommitGas totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - totalL1CommitCalldataSize := EstimateChunkL1CommitCalldataSize(chunk) - totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + var totalL1CommitCalldataSize uint64 + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) } - return totalL1CommitGas + return totalL1CommitGas, nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { +func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { - totalL1CommitCalldataSize += EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize } - return totalL1CommitCalldataSize + return totalL1CommitCalldataSize, nil +} + +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +func (o *DACodecV1) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + return true, nil } + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +func (o *DACodecV1) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + return true, nil +} + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + return 0, 0, nil +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + return 0, 0, nil +} + +// SetCompression enables or disables compression. +func (o *DACodecV1) SetCompression(enable bool) {} diff --git a/encoding/codecv1/codecv1_test.go b/encoding/codecv1/codecv1_test.go deleted file mode 100644 index b914ed6..0000000 --- a/encoding/codecv1/codecv1_test.go +++ /dev/null @@ -1,892 +0,0 @@ -package codecv1 - -import ( - "encoding/hex" - "encoding/json" - "os" - "strings" - "testing" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/stretchr/testify/assert" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" -) - -func TestCodecV1BlockEncode(t *testing.T) { - block := &DABlock{} - encoded := hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block, err := NewDABlock(trace2, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block, err = NewDABlock(trace3, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block, err = NewDABlock(trace4, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block, err = NewDABlock(trace5, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block, err = NewDABlock(trace6, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - block, err = NewDABlock(trace7, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - - // sanity check: v0 and v1 block encodings are identical - for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { - blockv0, err := codecv0.NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv0 := hex.EncodeToString(blockv0.Encode()) - - blockv1, err := NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv1 := hex.EncodeToString(blockv1.Encode()) - - assert.Equal(t, encodedv0, encodedv1) - } -} - -func TestCodecV1ChunkEncode(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - encoded := hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - // transactions are not part of the encoding - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err := NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) -} - -func TestCodecV1ChunkHash(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - hash, err := chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) - - // L1 transactions are part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // L2 transactions are not part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // numL1Messages are not part of the hash - chunk.Blocks[0].NumL1Messages = 1 - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // invalid hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) - _, err = chunk.Hash() - assert.Error(t, err) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) -} - -func TestCodecV1BatchEncode(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV1)} - encoded := hex.EncodeToString(batch.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "010000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101af944924715b48be6ce3c35aef7500a50e909265599bd2b3e544ac59fc75530000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6010c54fa675ed1b78f269827177019b0814a4ac4d269c68037e2c41cf08f94110000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "010000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01ea66c4de196d36e2c3a5d7c0045100b9e46ef65be8f7a921ef20e6f2e99ebd000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "010000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b401a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "010000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "01000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d520801a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a60000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "010000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8014ae5927a983081a8bcdbcce19e926c9e4c56e2dc89c91c32c034b875b8a1ca00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "010000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e13476701b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) -} - -func TestCodecV1BatchHash(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV1)} - assert.Equal(t, "0x4b6fe410f63051f6e93532087b42ece79fb7b966e2ba5845e6cd1c091f27e564", batch.Hash().Hex()) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xd557b02638c0385d5124f7fc188a025b33f8819b7f78c000751404997148ab8b", batch.Hash().Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xf13c7e249d00941c59fe4cd970241bbd6753eede8e043c438165674031792b3b", batch.Hash().Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xb64208f07fab641f7ebf831686d05ad667da0c7bfabcbd9c878cc22cbc8032fd", batch.Hash().Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x4f7426d164e885574a661838406083f5292b0a1bc6dc20c51129eed0723b8a27", batch.Hash().Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xfce89ec2aed85cebeb20eea722e3ae4ec622bff49218dbe249a2d358e2e85451", batch.Hash().Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x8fc063179b709bab338674278bb7b70dce2879a4e11ea857b3a202fb3313559f", batch.Hash().Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xf1c94cdf45967bc60bfccd599edd8cb07fd0201f41ab068637834f86140f62bf", batch.Hash().Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xfef0b56bd889529e3a1d884c88dd1c867e084fdc1369496907be8f865f43f0e0", batch.Hash().Hex()) -} - -func TestCodecV1BatchDataHash(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) -} - -func TestCodecV1BatchBlob(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, - // metadata - "00"+"0001"+"000000e6"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00"+"00"+"000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+ - // tx payload - "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1", encoded) - assert.Equal(t, "0x01af944924715b48be6ce3c35aef7500a50e909265599bd2b3e544ac59fc7553", batch.BlobVersionedHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "000001000016310000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002f9162d82cf5502843b9b0a17843b9b0a17831197e28080b915d26080604000523480156200001157600080fd5b50604051620014b2380380620014b283390081810160405260a08110156200003757600080fd5b8151602083015160408000850180519151939592948301929184640100000000821115620000635760000080fd5b9083019060208201858111156200007957600080fd5b8251640100000000008111828201881017156200009457600080fd5b8252508151602091820100929091019080838360005b83811015620000c357818101518382015260200100620000a9565b50505050905090810190601f168015620000f1578082038051006001836020036101000a031916815260200191505b5060405260200180516000405193929190846401000000008211156200011557600080fd5b908301906000208201858111156200012b57600080fd5b8251640100000000811182820188001017156200014657600080fd5b8252508151602091820192909101908083830060005b83811015620001755781810151838201526020016200015b565b5050005050905090810190601f168015620001a3578082038051600183602003610100000a031916815260200191505b506040526020908101518551909350859250008491620001c8916003918501906200026b565b508051620001de906004906000208401906200026b565b50506005805461ff001960ff199091166012171690005550600680546001600160a01b038088166001600160a01b031992831617900092556007805492871692909116919091179055620002308162000255565b5000506005805462010000600160b01b031916336201000002179055506200030700915050565b6005805460ff191660ff92909216919091179055565b82805460000181600116156101000203166002900490600052602060002090601f01602000900481019282601f10620002ae57805160ff1916838001178555620002de56005b82800160010185558215620002de579182015b82811115620002de57825100825591602001919060010190620002c1565b50620002ec929150620002f056005b5090565b5b80821115620002ec5760008155600101620002f1565b61119b0080620003176000396000f3fe608060405234801561001057600080fd5b50600004361061010b5760003560e01c80635c975abb116100a257806395d89b41110061007157806395d89b41146103015780639dc29fac14610309578063a457c200d714610335578063a9059cbb14610361578063dd62ed3e1461038d5761010b00565b80635c975abb1461029d57806370a08231146102a55780638456cb5914006102cb5780638e50817a146102d35761010b565b8063313ce567116100de57008063313ce5671461021d578063395093511461023b5780633f4ba83a146102006757806340c10f19146102715761010b565b806306fdde031461011057806300095ea7b31461018d57806318160ddd146101cd57806323b872dd146101e757005b600080fd5b6101186103bb565b604080516020808252835181830152835100919283929083019185019080838360005b838110156101525781810151838200015260200161013a565b50505050905090810190601f16801561017f578082000380516001836020036101000a031916815260200191505b50925050506040005180910390f35b6101b9600480360360408110156101a357600080fd5b50600001600160a01b038135169060200135610451565b60408051911515825251900081900360200190f35b6101d561046e565b6040805191825251908190036020000190f35b6101b9600480360360608110156101fd57600080fd5b50600160010060a01b03813581169160208101359091169060400135610474565b610225610004fb565b6040805160ff9092168252519081900360200190f35b6101b9600400803603604081101561025157600080fd5b506001600160a01b03813516906000200135610504565b61026f610552565b005b61026f600480360360408110150061028757600080fd5b506001600160a01b0381351690602001356105a9565b006101b9610654565b6101d5600480360360208110156102bb57600080fd5b5000356001600160a01b0316610662565b61026f61067d565b61026f60048036030060408110156102e957600080fd5b506001600160a01b0381358116916020010035166106d2565b610118610757565b61026f6004803603604081101561031f0057600080fd5b506001600160a01b0381351690602001356107b8565b6101b9006004803603604081101561034b57600080fd5b506001600160a01b0381351600906020013561085f565b6101b96004803603604081101561037757600080fd005b506001600160a01b0381351690602001356108c7565b6101d560048036030060408110156103a357600080fd5b506001600160a01b0381358116916020010035166108db565b60038054604080516020601f600260001961010060018816001502019095169490940493840181900481028201810190925282815260609300909290918301828280156104475780601f1061041c5761010080835404028300529160200191610447565b820191906000526020600020905b8154815290600001019060200180831161042a57829003601f168201915b505050505090509000565b600061046561045e610906565b848461090a565b50600192915050565b0060025490565b60006104818484846109f6565b6104f18461048d610906565b006104ec8560405180606001604052806028815260200161108560289139600100600160a01b038a166000908152600160205260408120906104cb610906565b006001600160a01b031681526020810191909152604001600020549190610b5100565b61090a565b5060019392505050565b60055460ff1690565b600061046500610511610906565b846104ec8560016000610522610906565b6001600160a0001b03908116825260208083019390935260409182016000908120918c16815200925290205490610be8565b6007546001600160a01b0316331461059f57604000805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0818005b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a7610c0049565b565b600554610100900460ff16156105f9576040805162461bcd60e5001b815260206004820152601060248201526f14185d5cd8589b194e881c185d005cd95960821b604482015290519081900360640190fd5b600654600160016000a01b03163314610646576040805162461bcd60e51b81526020600482015260000b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908190000360640190fd5b6106508282610ced565b5050565b600554610100900460ff001690565b6001600160a01b031660009081526020819052604090205490565b006007546001600160a01b031633146106ca576040805162461bcd60e51b81520060206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b60440082015290519081900360640190fd5b6105a7610ddd565b600554620100009000046001600160a01b03163314610726576040805162461bcd60e51b81526020006004820152600c60248201526b6f6e6c7920466163746f727960a01b60448200015290519081900360640190fd5b600780546001600160a01b03928316600100600160a01b0319918216179091556006805493909216921691909117905556005b60048054604080516020601f600260001961010060018816150201909516009490940493840181900481028201810190925282815260609390929091830100828280156104475780601f1061041c5761010080835404028352916020019100610447565b600554610100900460ff1615610808576040805162461bcd60e5001b815260206004820152601060248201526f14185d5cd8589b194e881c185d005cd95960821b604482015290519081900360640190fd5b600654600160016000a01b03163314610855576040805162461bcd60e51b81526020600482015260000b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908190000360640190fd5b6106508282610e65565b600061046561086c610906565b84006104ec85604051806060016040528060258152602001611117602591396001006000610896610906565b6001600160a01b0390811682526020808301939093005260409182016000908120918d16815292529020549190610b51565b6000610004656108d4610906565b84846109f6565b6001600160a01b0391821660009000815260016020908152604080832093909416825291909152205490565b339000565b6001600160a01b03831661094f5760405162461bcd60e51b8152600401008080602001828103825260248152602001806110f3602491396040019150500060405180910390fd5b6001600160a01b0382166109945760405162461bcd6000e51b815260040180806020018281038252602281526020018061103d602291003960400191505060405180910390fd5b6001600160a01b038084166000818100526001602090815260408083209487168084529482529182902085905581510085815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b00200ac8c7c3b9259281900390910190a3505050565b6001600160a01b03831600610a3b5760405162461bcd60e51b8152600401808060200182810382526025008152602001806110ce6025913960400191505060405180910390fd5b600160000160a01b038216610a805760405162461bcd60e51b815260040180806020010082810382526023815260200180610ff8602391396040019150506040518091000390fd5b610a8b838383610f61565b610ac8816040518060600160405280600026815260200161105f602691396001600160a01b038616600090815260208100905260409020549190610b51565b6001600160a01b03808516600090815260002081905260408082209390935590841681522054610af79082610be8565b600001600160a01b03808416600081815260208181526040918290209490945580005185815290519193928716927fddf252ad1be2c89b69c2b068fc378daa952b00a7f163c4a11628f55a4df523b3ef92918290030190a3505050565b6000818400841115610be05760405162461bcd60e51b8152600401808060200182810382005283818151815260200191508051906020019080838360005b83811015610b00a5578181015183820152602001610b8d565b50505050905090810190601f16008015610bd25780820380516001836020036101000a03191681526020019150005b509250505060405180910390fd5b505050900390565b60008282018381100015610c42576040805162461bcd60e51b815260206004820152601b6024820100527f536166654d6174683a206164646974696f6e206f766572666c6f77000000000000604482015290519081900360640190fd5b9392505050565b60055461000100900460ff16610c9c576040805162461bcd60e51b81526020600482015200601460248201527314185d5cd8589b194e881b9bdd081c185d5cd95960621b00604482015290519081900360640190fd5b6005805461ff00191690557f5db900ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa61000cd0610906565b604080516001600160a01b03909216825251908190036020000190a1565b6001600160a01b038216610d48576040805162461bcd60e51b81005260206004820152601f60248201527f45524332303a206d696e7420746f2000746865207a65726f20616464726573730060448201529051908190036064010090fd5b610d5460008383610f61565b600254610d619082610be8565b600255006001600160a01b038216600090815260208190526040902054610d87908261000be8565b6001600160a01b038316600081815260208181526040808320949000945583518581529351929391927fddf252ad1be2c89b69c2b068fc378daa95002ba7f163c4a11628f55a4df523b3ef9281900390910190a35050565b60055400610100900460ff1615610e2d576040805162461bcd60e51b81526020600482000152601060248201526f14185d5cd8589b194e881c185d5cd95960821b60440082015290519081900360640190fd5b6005805461ff0019166101001790557f0062e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc544b05a20058610cd0610906565b6001600160a01b038216610eaa5760405162461bcd6000e51b81526004018080602001828103825260218152602001806110ad602191003960400191505060405180910390fd5b610eb682600083610f61565b610ef3008160405180606001604052806022815260200161101b60229139600160016000a01b0385166000908152602081905260409020549190610b51565b600160010060a01b038316600090815260208190526040902055600254610f199082610f00b5565b6002556040805182815290516000916001600160a01b038516917fdd00f252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef009181900360200190a35050565b610f6c838383610fb0565b610f7461065456005b15610fb05760405162461bcd60e51b81526004018080602001828103825200602a81526020018061113c602a913960400191505060405180910390fd5b50005050565b6000610c4283836040518060400160405280601e81526020017f53006166654d6174683a207375627472616374696f6e206f766572666c6f77000000815250610b5156fe45524332303a207472616e7366657220746f2074686520007a65726f206164647265737345524332303a206275726e20616d6f756e742000657863656564732062616c616e636545524332303a20617070726f76652074006f20746865207a65726f206164647265737345524332303a207472616e736600657220616d6f756e7420657863656564732062616c616e636545524332303a00207472616e7366657220616d6f756e74206578636565647320616c6c6f7761006e636545524332303a206275726e2066726f6d20746865207a65726f20616400647265737345524332303a207472616e736665722066726f6d20746865207a0065726f206164647265737345524332303a20617070726f76652066726f6d2000746865207a65726f206164647265737345524332303a206465637265617365006420616c6c6f77616e63652062656c6f77207a65726f4552433230506175730061626c653a20746f6b656e207472616e73666572207768696c652070617573006564a2646970667358221220e96342bec8f6c2bf72815a39998973b64c3bed0057770f402e9a7b7eeda0265d4c64736f6c634300060c0033000000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b63000000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000000009570045544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e173700f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bd00a52095d44b8a9af7", encoded) - assert.Equal(t, "0x010c54fa675ed1b78f269827177019b0814a4ac4d269c68037e2c41cf08f9411", batch.BlobVersionedHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0000010000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e10580808080008", encoded) - assert.Equal(t, "0x01ea66c4de196d36e2c3a5d7c0045100b9e46ef65be8f7a921ef20e6f2e99ebd", batch.BlobVersionedHash.Hex()) - - // this batch only contains L1 txs - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "000001", encoded) - assert.Equal(t, "0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6", batch.BlobVersionedHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "000001", encoded) - assert.Equal(t, "0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6", batch.BlobVersionedHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "000001", encoded) - assert.Equal(t, "0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6", batch.BlobVersionedHash.Hex()) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, - // metadata - "00"+"000f"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"00"+"00"+"0000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+ - // tx payload - "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea003f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ece00a0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86d00f514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288b00baf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf000d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f0010c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f002b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b009aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d0002c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b00219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d199600b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a120940100bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000800083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393e00b095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f87938000aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b600e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae9900c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cb00d19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8007101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce941100ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b002cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec005bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e830700a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de10200513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c57008fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e900a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea000f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f7730016a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6e00ed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2ade00ceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7b00a5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd7300e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9a00ec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d0200c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c400c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808300019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a0041e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfa00ec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc060015b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f03998586600d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e0081065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f8710080843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca2008a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e9000cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c004d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a100209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e260004393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f00879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6a00cb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab0007ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df51400a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf4002a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d6900ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c100be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b460004bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec002e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c700e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b001de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b500243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae600bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000808301009ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb09500b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac100c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb009e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67a00a78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19f00eacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710100843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a00152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cac00e28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd400aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a1200094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d0056548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd700f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03f00b2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e008307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e1004af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bd00e27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483590096fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b100bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d825006f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e8106005f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f8718084003b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a15002d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc3002b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d190096b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a120940001bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e260439003eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f87930080aac1c09c6eed32f1", encoded) - assert.Equal(t, "0x01521b20f341588dea5978efb00d7b077a986598a6001fc2e5859d77f3ffc284", batch.BlobVersionedHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0000020000173700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a17843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380620014b2833981810160405260a0811015620000375760000080fd5b81516020830151604080850180519151939592948301929184640100000000008211156200006357600080fd5b908301906020820185811115620000007957600080fd5b8251640100000000811182820188101715620000945760000080fd5b82525081516020918201929091019080838360005b8381101562000000c3578181015183820152602001620000a9565b50505050905090810190601f00168015620000f15780820380516001836020036101000a03191681526020010091505b5060405260200180516040519392919084640100000000821115620000011557600080fd5b9083019060208201858111156200012b57600080fd5b8200516401000000008111828201881017156200014657600080fd5b8252508151006020918201929091019080838360005b8381101562000175578181015183820001526020016200015b565b50505050905090810190601f168015620001a3570080820380516001836020036101000a031916815260200191505b506040526000209081015185519093508592508491620001c8916003918501906200026b56005b508051620001de9060049060208401906200026b565b50506005805461ff00001960ff1990911660121716905550600680546001600160a01b03808816600001600160a01b031992831617909255600780549287169290911691909117900055620002308162000255565b50506005805462010000600160b01b031916330062010000021790555062000307915050565b6005805460ff191660ff9290920016919091179055565b82805460018160011615610100020316600290049060000052602060002090601f016020900481019282601f10620002ae57805160ff001916838001178555620002de565b82800160010185558215620002de57918200015b82811115620002de578251825591602001919060010190620002c1565b0050620002ec929150620002f0565b5090565b5b80821115620002ec576000810055600101620002f1565b61119b80620003176000396000f3fe60806040523400801561001057600080fd5b506004361061010b5760003560e01c80635c975a00bb116100a257806395d89b411161007157806395d89b41146103015780639d00c29fac14610309578063a457c2d714610335578063a9059cbb1461036157800063dd62ed3e1461038d5761010b565b80635c975abb1461029d57806370a0820031146102a55780638456cb59146102cb5780638e50817a146102d35761010b00565b8063313ce567116100de578063313ce5671461021d57806339509351140061023b5780633f4ba83a1461026757806340c10f19146102715761010b565b00806306fdde0314610110578063095ea7b31461018d57806318160ddd14610100cd57806323b872dd146101e7575b600080fd5b6101186103bb565b604080510060208082528351818301528351919283929083019185019080838360005b830081101561015257818101518382015260200161013a565b5050505090509081000190601f16801561017f5780820380516001836020036101000a03191681520060200191505b509250505060405180910390f35b6101b960048036036040810010156101a357600080fd5b506001600160a01b03813516906020013561045100565b604080519115158252519081900360200190f35b6101d561046e565b6000408051918252519081900360200190f35b6101b960048036036060811015610001fd57600080fd5b506001600160a01b0381358116916020810135909116900060400135610474565b6102256104fb565b6040805160ff909216825251908100900360200190f35b6101b96004803603604081101561025157600080fd5b50006001600160a01b038135169060200135610504565b61026f610552565b005b0061026f6004803603604081101561028757600080fd5b506001600160a01b030081351690602001356105a9565b6101b9610654565b6101d560048036036020008110156102bb57600080fd5b50356001600160a01b0316610662565b61026f0061067d565b61026f600480360360408110156102e957600080fd5b50600160000160a01b03813581169160200135166106d2565b610118610757565b61026f006004803603604081101561031f57600080fd5b506001600160a01b038135160090602001356107b8565b6101b96004803603604081101561034b57600080fd005b506001600160a01b03813516906020013561085f565b6101b9600480360300604081101561037757600080fd5b506001600160a01b038135169060200135006108c7565b6101d5600480360360408110156103a357600080fd5b50600160000160a01b03813581169160200135166108db565b6003805460408051602060001f6002600019610100600188161502019095169490940493840181900481020082018101909252828152606093909290918301828280156104475780601f100061041c57610100808354040283529160200191610447565b82019190600052006020600020905b81548152906001019060200180831161042a57829003601f00168201915b5050505050905090565b600061046561045e610906565b84846100090a565b50600192915050565b60025490565b60006104818484846109f656005b6104f18461048d610906565b6104ec8560405180606001604052806028810052602001611085602891396001600160a01b038a16600090815260016020520060408120906104cb610906565b6001600160a01b03168152602081019190910052604001600020549190610b51565b61090a565b5060019392505050565b6000055460ff1690565b6000610465610511610906565b846104ec856001600061000522610906565b6001600160a01b0390811682526020808301939093526040009182016000908120918c168152925290205490610be8565b600754600160010060a01b0316331461059f576040805162461bcd60e51b81526020600482015200600b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908100900360640190fd5b6105a7610c49565b565b600554610100900460ff1615610005f9576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610646576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610ced565b500050565b600554610100900460ff1690565b6001600160a01b03166000908152006020819052604090205490565b6007546001600160a01b031633146106ca57006040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0008185b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a700610ddd565b6005546201000090046001600160a01b0316331461072657604000805162461bcd60e51b815260206004820152600c60248201526b6f6e6c792000466163746f727960a01b604482015290519081900360640190fd5b60078054006001600160a01b039283166001600160a01b0319918216179091556006805400939092169216919091179055565b60048054604080516020601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c5761010000808354040283529160200191610447565b600554610100900460ff161561000808576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610855576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610e65565b60000061046561086c610906565b846104ec85604051806060016040528060258100526020016111176025913960016000610896610906565b6001600160a01b0300908116825260208083019390935260409182016000908120918d1681529252009020549190610b51565b60006104656108d4610906565b84846109f6565b600001600160a01b0391821660009081526001602090815260408083209390941600825291909152205490565b3390565b6001600160a01b03831661094f576040005162461bcd60e51b8152600401808060200182810382526024815260200180006110f36024913960400191505060405180910390fd5b6001600160a01b038200166109945760405162461bcd60e51b81526004018080602001828103825260002281526020018061103d6022913960400191505060405180910390fd5b600100600160a01b0380841660008181526001602090815260408083209487168084005294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f7142007d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9259281900390910190a350500050565b6001600160a01b038316610a3b5760405162461bcd60e51b8152600400018080602001828103825260258152602001806110ce602591396040019150005060405180910390fd5b6001600160a01b038216610a805760405162461bcd0060e51b8152600401808060200182810382526023815260200180610ff8602300913960400191505060405180910390fd5b610a8b838383610f61565b610ac8008160405180606001604052806026815260200161105f60269139600160016000a01b0386166000908152602081905260409020549190610b51565b600160010060a01b03808516600090815260208190526040808220939093559084168152002054610af79082610be8565b6001600160a01b03808416600081815260208100815260409182902094909455805185815290519193928716927fddf252ad1b00e2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9291829000030190a3505050565b60008184841115610be05760405162461bcd60e51b810052600401808060200182810382528381815181526020019150805190602001009080838360005b83811015610ba5578181015183820152602001610b8d565b0050505050905090810190601f168015610bd2578082038051600183602003610001000a031916815260200191505b509250505060405180910390fd5b50505000900390565b600082820183811015610c42576040805162461bcd60e51b81520060206004820152601b60248201527f536166654d6174683a20616464697469006f6e206f766572666c6f77000000000060448201529051908190036064019000fd5b9392505050565b600554610100900460ff16610c9c576040805162461b00cd60e51b815260206004820152601460248201527314185d5cd8589b194e88001b9bdd081c185d5cd95960621b604482015290519081900360640190fd5b600005805461ff00191690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a500e8aa4e537bd38aeae4b073aa610cd0610906565b604080516001600160a01b00039092168252519081900360200190a1565b6001600160a01b038216610d4800576040805162461bcd60e51b815260206004820152601f60248201527f4552004332303a206d696e7420746f20746865207a65726f20616464726573730060004482015290519081900360640190fd5b610d5460008383610f61565b60025400610d619082610be8565b6002556001600160a01b03821660009081526020810090526040902054610d879082610be8565b6001600160a01b038316600081810052602081815260408083209490945583518581529351929391927fddf252ad001be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef928190000390910190a35050565b600554610100900460ff1615610e2d57604080516200461bcd60e51b815260206004820152601060248201526f14185d5cd8589b19004e881c185d5cd95960821b604482015290519081900360640190fd5b600580005461ff0019166101001790557f62e78cea01bee320cd4e420270b5ea74000d0011b0c9f74754ebdbfc544b05a258610cd0610906565b6001600160a01b03820016610eaa5760405162461bcd60e51b8152600401808060200182810382526000218152602001806110ad6021913960400191505060405180910390fd5b610e00b682600083610f61565b610ef3816040518060600160405280602281526020000161101b602291396001600160a01b038516600090815260208190526040900020549190610b51565b6001600160a01b03831660009081526020819052604000902055600254610f199082610fb5565b600255604080518281529051600091006001600160a01b038516917fddf252ad1be2c89b69c2b068fc378daa952ba700f163c4a11628f55a4df523b3ef9181900360200190a35050565b610f6c83830083610fb0565b610f74610654565b15610fb05760405162461bcd60e51b81520060040180806020018281038252602a81526020018061113c602a91396040010091505060405180910390fd5b505050565b6000610c428383604051806040010060405280601e81526020017f536166654d6174683a20737562747261637469006f6e206f766572666c6f770000815250610b5156fe45524332303a20747261006e7366657220746f20746865207a65726f206164647265737345524332303a00206275726e20616d6f756e7420657863656564732062616c616e63654552430032303a20617070726f766520746f20746865207a65726f20616464726573730045524332303a207472616e7366657220616d6f756e742065786365656473200062616c616e636545524332303a207472616e7366657220616d6f756e7420650078636565647320616c6c6f77616e636545524332303a206275726e2066726f006d20746865207a65726f206164647265737345524332303a207472616e73660065722066726f6d20746865207a65726f206164647265737345524332303a2000617070726f76652066726f6d20746865207a65726f20616464726573734552004332303a2064656372656173656420616c6c6f77616e63652062656c6f7720007a65726f45524332305061757361626c653a20746f6b656e207472616e7366006572207768696c6520706175736564a2646970667358221220e96342bec8f600c2bf72815a39998973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c63004300060c00330000000000000000000000001c5a77d9fa7ef466951b2f01f70024bca3a5820b630000000000000000000000001c5a77d9fa7ef466951b2f0100f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000095745544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a9002e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e7400229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a258d0017bf244c4df02d40343a7626a9d321e105808080808", encoded) - assert.Equal(t, "0x01b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf", batch.BlobVersionedHash.Hex()) -} - -func TestCodecV1BatchChallenge(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0d8e67f882c61159aa99b04ec4f6f3d90cb95cbfba6efd56cefc55ca15b290ef", hex.EncodeToString(batch.z[:])) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "32da228f4945de828954675f9396debb169bbf336ba93f849a8fc7fee1bc9e58", hex.EncodeToString(batch.z[:])) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "09a37ab43d41bcae3000c090a341e4661a8dc705b3c93d01b9eda3a0b3f8d4a8", hex.EncodeToString(batch.z[:])) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", hex.EncodeToString(batch.z[:])) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", hex.EncodeToString(batch.z[:])) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", hex.EncodeToString(batch.z[:])) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "55dac3baa818133cfdce0f97ddbb950e341399756d7b49bc34107dd65ecd3a4b", hex.EncodeToString(batch.z[:])) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0b14dce4abfdeb3a69a341f7db6b1e16162c20826e6d964a829e20f671030cab", hex.EncodeToString(batch.z[:])) -} - -func repeat(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) -} - -func TestCodecV1BatchChallengeWithStandardTestCases(t *testing.T) { - nRowsData := 126914 - - for _, tc := range []struct { - chunks [][]string - expectedz string - expectedy string - }{ - // single empty chunk - {chunks: [][]string{{}}, expectedz: "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", expectedy: "304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd08"}, - // single non-empty chunk - {chunks: [][]string{{"0x010203"}}, expectedz: "1c1d4bd5153f877d799853080aba243f2c186dd6d6064eaefacfe715c92b6354", expectedy: "24e80ed99526b0d15ba46f7ec682f517576ddae68d5131e5d351f8bae06ea7d3"}, - // multiple empty chunks - {chunks: [][]string{{}, {}}, expectedz: "152c9ccfcc2884f9891f7adce2de110cf9f85bfd0e21f0933ae0636390a84d41", expectedy: "5f6f532676e25b49e2eae77513fbeca173a300b434c0a5e24fa554b68e27d582"}, - // multiple non-empty chunks - {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "62100f5381179ea7db7aa8fdedb0f7fc7b82730b75432d50ab41f80aeebe45a3", expectedy: "5b1f6e7a54907ddc06871853cf1f5d53bf2de0df7b61d0df84bc2c3fb80320cd"}, - // empty chunk followed by non-empty chunk - {chunks: [][]string{{}, {"0x010203"}}, expectedz: "2d94d241c4a2a8d8f02845ca40cfba344f3b42384af2045a75c82e725a184232", expectedy: "302416c177e9e7fe40c3bc4315066c117e27d246b0a33ef68cdda6dd333c485c"}, - // non-empty chunk followed by empty chunk - {chunks: [][]string{{"0x070809"}, {}}, expectedz: "7227567e3b1dbacb48a32bb85e4e99f73e4bd5620ea8cd4f5ac00a364c86af9c", expectedy: "2eb3dfd28362f35f562f779e749a555d2f1f87ddc716e95f04133d25189a391c"}, - // max number of chunks all empty - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "1128ac3e22ced6af85be4335e0d03a266946a7cade8047e7fc59d6c8be642321", expectedy: "2d9b16422ce17f328fd00c99349768f0cb0c8648115eb3bd9b7864617ba88059"}, - // max number of chunks all non-empty - {chunks: [][]string{{"0x0a"}, {"0x0a0b"}, {"0x0a0b0c"}, {"0x0a0b0c0d"}, {"0x0a0b0c0d0e"}, {"0x0a0b0c0d0e0f"}, {"0x0a0b0c0d0e0f10"}, {"0x0a0b0c0d0e0f1011"}, {"0x0a0b0c0d0e0f101112"}, {"0x0a0b0c0d0e0f10111213"}, {"0x0a0b0c0d0e0f1011121314"}, {"0x0a0b0c0d0e0f101112131415"}, {"0x0a0b0c0d0e0f10111213141516"}, {"0x0a0b0c0d0e0f1011121314151617"}, {"0x0a0b0c0d0e0f101112131415161718"}}, expectedz: "1a4025a3d74e70b511007dd55a2e252478c48054c6383285e8a176f33d99853b", expectedy: "12071ac2571c11220432a27b8be549392892e9baf4c654748ca206def3843940"}, - // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "72714cc4a0ca75cee2d543b1f958e3d3dd59ac7df0d9d5617d8117b65295a5f2", expectedy: "4ebb690362bcbc42321309c210c99f2ebdb53b3fcf7cf3b17b78f6cfd1203ed3"}, - // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "70eb5b4db503e59413238eef451871c5d12f2bb96c8b96ceca012f4ca0114727", expectedy: "568d0aaf280ec83f9c81ed2d80ecbdf199bd72dafb8a350007d37ea82997e455"}, - // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "03db68ae16ee88489d52db19e6111b25630c5f23ad7cd14530aacf0cd231d476", expectedy: "24527d0b0e93b3dec0060c7b128975a8088b3104d3a297dc807ab43862a77a1a"}, - // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "677670193f73db499cede572bcb55677f0d2f13d690f9a820bd00bf584c3c241", expectedy: "1d85677f172dbdf4ad3094a17deeb1df4d7d2b7f35ecea44aebffa757811a268"}, - // max number of chunks but last is empty - {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "22935042dfe7df771b02c1f5cababfe508869e8f6339dabe25a8a32e37728bb0", expectedy: "48ca66fb5a094401728c3a6a517ffbd72c4d4d9a8c907e2d2f1320812f4d856f"}, - } { - chunks := []*encoding.Chunk{} - - for _, c := range tc.chunks { - block := &encoding.Block{Transactions: []*types.TransactionData{}} - - for _, data := range c { - tx := &types.TransactionData{Type: 0xff, Data: data} - block.Transactions = append(block.Transactions, tx) - } - - chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} - chunks = append(chunks, chunk) - } - - b, _, z, err := constructBlobPayload(chunks, true /* use mock */) - assert.NoError(t, err) - actualZ := hex.EncodeToString(z[:]) - assert.Equal(t, tc.expectedz, actualZ) - - _, y, err := kzg4844.ComputeProof(b, *z) - assert.NoError(t, err) - actualY := hex.EncodeToString(y[:]) - assert.Equal(t, tc.expectedy, actualY) - - } -} - -func TestCodecV1BatchBlobDataProof(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err := batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "0d8e67f882c61159aa99b04ec4f6f3d90cb95cbfba6efd56cefc55ca15b290ef423dc493f1dd7c9fbecdffa021ca4649b13e8d72231487034ec6b27e155ecfd7b44a38af1f9a6c70cd3ccfbf71968f447aa566bbafb0bbc566fc9eeb42973484802635a1bbd8305d34a46693331bf607b38542ec811c92d86ff6f3319de06ee60c42655278ccf874f3615f450de730895276828b73db03c553b0bc7e5474a5e0", hex.EncodeToString(verifyData)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "32da228f4945de828954675f9396debb169bbf336ba93f849a8fc7fee1bc9e5821975f318babe50be728f9b52754d5ce2caa2ba82ba35b5888af1c5f28d23206b8aab265dc352e352807a298f7bb99d432c7cd543e63158cbdb8fbf99f3182a71af35ccbed2693c5e0bc5be38d565e868e0c6fe7bd39baa5ee6339cd334a18af7c680d24e825262499e83b31633b13a9ee89813fae8441630c82bc9dce3f1e07", hex.EncodeToString(verifyData)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "09a37ab43d41bcae3000c090a341e4661a8dc705b3c93d01b9eda3a0b3f8d4a8088a01e54e3565d2e91ce6afbadf479330847d9106737875303ce17f17c48722afd4e1c55a17dbdf8390b5736158afe238d82f8b696669ba47015fcdfd4d1becd0ff7a47f8f379a4ac8d1741e2d67624aee03a0f7cdb7807bc7e0b9fb20bc299af2a35e38cda816708b40f2f18db491e14a0f5d9cfe2f4c12e4ca1a219484f17", hex.EncodeToString(verifyData)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "55dac3baa818133cfdce0f97ddbb950e341399756d7b49bc34107dd65ecd3a4b54d28f1479467d8b97fb99f5257d3e5d63a81cb2d60e3564fe6ec6066a311c119743324c70e20042de6480f115b215fbba3472a8b994303a99576c1244aa4aec22fdfe6c74ec728aa28a9eb3812bc932a0b603cc94be2007d4b3b17af06b4fb30caf0e574d5abcfc5654079e65154679afad75844396082a7200a4e82462aeed", hex.EncodeToString(verifyData)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "0b14dce4abfdeb3a69a341f7db6b1e16162c20826e6d964a829e20f671030cab35b73ddb4a78fc4a8540f1d8259512c46e606a701e7ef7742e38cc4562ef53b983bee97f95fbf2d789a8e0fb365c26e141d6a31e43403b4a469d1723128f6d5de5c54e913e143feede32d0af9b6fd6fda28e5610ca6b185d6ac30b53bd83d6366fccb1956daafa90ff6b504a966b119ebb45cb3f7085b7c1d622ee1ad27fcff9", hex.EncodeToString(verifyData)) -} - -func TestCodecV1BatchSkipBitmap(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000003ff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 - assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000001fffffffff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000000000000000", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000001dd", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 - assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000001ffffffbff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 42, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000000007fffffe", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 32, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) -} - -func TestCodecV1ChunkAndBatchCommitBlobSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BlobSize, err := EstimateChunkL1CommitBlobSize(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(302), chunk2BlobSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BlobSize, err := EstimateBatchL1CommitBlobSize(batch2) - assert.NoError(t, err) - assert.Equal(t, uint64(302), batch2BlobSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BlobSize, err := EstimateChunkL1CommitBlobSize(chunk3) - assert.NoError(t, err) - assert.Equal(t, uint64(5929), chunk3BlobSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BlobSize, err := EstimateBatchL1CommitBlobSize(batch3) - assert.NoError(t, err) - assert.Equal(t, uint64(5929), batch3BlobSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BlobSize, err := EstimateChunkL1CommitBlobSize(chunk4) - assert.NoError(t, err) - assert.Equal(t, uint64(98), chunk4BlobSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4BlobSize, err := EstimateBatchL1CommitBlobSize(batch4) - assert.NoError(t, err) - assert.Equal(t, uint64(98), batch4BlobSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BlobSize, err := EstimateChunkL1CommitBlobSize(chunk5) - assert.NoError(t, err) - assert.Equal(t, uint64(6166), chunk5BlobSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BlobSize, err := EstimateChunkL1CommitBlobSize(chunk6) - assert.NoError(t, err) - assert.Equal(t, uint64(98), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BlobSize, err := EstimateBatchL1CommitBlobSize(batch5) - assert.NoError(t, err) - assert.Equal(t, uint64(6199), batch5BlobSize) -} - -func TestCodecV1ChunkAndBatchCommitCalldataSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) - assert.Equal(t, uint64(60), chunk2CalldataSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) - assert.Equal(t, uint64(60), batch2CalldataSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) - assert.Equal(t, uint64(60), chunk3CalldataSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) - assert.Equal(t, uint64(60), batch3CalldataSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) - assert.Equal(t, uint64(60), chunk4CalldataSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4BlobSize := EstimateBatchL1CommitCalldataSize(batch4) - assert.Equal(t, uint64(60), batch4BlobSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) - assert.Equal(t, uint64(120), chunk5CalldataSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BlobSize := EstimateChunkL1CommitCalldataSize(chunk6) - assert.Equal(t, uint64(60), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) - assert.Equal(t, uint64(180), batch5CalldataSize) -} - -func TestCodecV1ChunkAndBatchCommitGasEstimation(t *testing.T) { - block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block2Gas := EstimateBlockL1CommitGas(block2) - assert.Equal(t, uint64(960), block2Gas) - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} - chunk2Gas := EstimateChunkL1CommitGas(chunk2) - assert.Equal(t, uint64(1124), chunk2Gas) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2Gas := EstimateBatchL1CommitGas(batch2) - assert.Equal(t, uint64(157649), batch2Gas) - - block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block3Gas := EstimateBlockL1CommitGas(block3) - assert.Equal(t, uint64(960), block3Gas) - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} - chunk3Gas := EstimateChunkL1CommitGas(chunk3) - assert.Equal(t, uint64(1124), chunk3Gas) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3Gas := EstimateBatchL1CommitGas(batch3) - assert.Equal(t, uint64(157649), batch3Gas) - - block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block4Gas := EstimateBlockL1CommitGas(block4) - assert.Equal(t, uint64(3572), block4Gas) - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk4Gas := EstimateChunkL1CommitGas(chunk4) - assert.Equal(t, uint64(3745), chunk4Gas) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4Gas := EstimateBatchL1CommitGas(batch4) - assert.Equal(t, uint64(160302), batch4Gas) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} - chunk5Gas := EstimateChunkL1CommitGas(chunk5) - assert.Equal(t, uint64(2202), chunk5Gas) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk6Gas := EstimateChunkL1CommitGas(chunk6) - assert.Equal(t, uint64(3745), chunk6Gas) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5Gas := EstimateBatchL1CommitGas(batch5) - assert.Equal(t, uint64(163087), batch5Gas) -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2.go similarity index 70% rename from encoding/codecv2/codecv2.go rename to encoding/codecv2.go index 7588394..e592304 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2.go @@ -1,4 +1,4 @@ -package codecv2 +package encoding import ( "crypto/sha256" @@ -14,22 +14,22 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv1" "github.com/scroll-tech/da-codec/encoding/zstd" ) -// MaxNumChunks is the maximum number of chunks that a batch can contain. -const MaxNumChunks = 45 +type DACodecV2 struct{} -// DABlock represents a Data Availability Block. -type DABlock = codecv1.DABlock +// Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. +const Codecv2MaxNumChunks = 45 -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk = codecv1.DAChunk +// DABlockV2 represents a Data Availability Block. +type DABlockV2 = DABlockV1 + +// DAChunkV2 groups consecutive DABlocks with their transactions. +type DAChunkV2 = DAChunkV1 // DABatch contains metadata about a batch of DAChunks. -type DABatch struct { +type DABatchV2 struct { // header Version uint8 BatchIndex uint64 @@ -46,19 +46,19 @@ type DABatch struct { } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - return codecv1.NewDABlock(block, totalL1MessagePoppedBefore) +func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { + return (&DACodecV1{}).NewDABlock(block, totalL1MessagePoppedBefore) } // NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - return codecv1.NewDAChunk(chunk, totalL1MessagePoppedBefore) +func (o *DACodecV2) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + return (&DACodecV1{}).NewDAChunk(chunk, totalL1MessagePoppedBefore) } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { +func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > MaxNumChunks { + if len(batch.Chunks) > Codecv2MaxNumChunks { return nil, errors.New("too many chunks in batch") } @@ -67,25 +67,25 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // batch data hash - dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // blob payload - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, _, err := o.ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } - daBatch := DABatch{ - Version: uint8(encoding.CodecV2), + daBatch := DABatchV2{ + Version: uint8(CodecV2), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, TotalL1MessagePopped: totalL1MessagePoppedAfter, @@ -104,21 +104,21 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return codecv1.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) +func (o *DACodecV2) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return (&DACodecV1{}).ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (o *DACodecV2) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 + metadataLength := 2 + Codecv2MaxNumChunks*4 // batchBytes represents the raw (un-compressed and un-padded) blob payload batchBytes := make([]byte, metadataLength) // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+MaxNumChunks+1)*32) + challengePreimage := make([]byte, (1+Codecv2MaxNumChunks+1)*32) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -138,7 +138,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 } // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -156,10 +156,10 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than MaxNumChunks chunks, the rest + // if we have fewer than Codecv2MaxNumChunks chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ { + for chunkID := len(chunks); chunkID < Codecv2MaxNumChunks; chunkID++ { // use the last chunk's data hash as padding copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } @@ -177,7 +177,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // Only apply this check when the uncompressed batch data has exceeded 128 KiB. if !useMockTxData && len(batchBytes) > 131072 { // Check compressed data compatibility. - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -189,7 +189,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 } // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) + blob, err := MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -202,11 +202,11 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+Codecv2MaxNumChunks)*32:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -219,12 +219,12 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { +func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 121 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } - b := &DABatch{ + b := &DABatchV2{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), @@ -239,7 +239,7 @@ func NewDABatchFromBytes(data []byte) (*DABatch, error) { } // Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { +func (b *DABatchV2) Encode() []byte { batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) batchBytes[0] = b.Version binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) @@ -253,13 +253,13 @@ func (b *DABatch) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { +func (b *DABatchV2) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // BlobDataProof computes the abi-encoded blob verification data. -func (b *DABatch) BlobDataProof() ([]byte, error) { +func (b *DABatchV2) BlobDataProof() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProof with empty blob") } @@ -283,7 +283,7 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := encoding.GetBlobDataProofArgs() + blobDataProofArgs, err := GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -291,13 +291,23 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *DABatch) Blob() *kzg4844.Blob { +func (b *DABatchV2) Blob() *kzg4844.Blob { return b.blob } +// BlobBytes returns the blob bytes of the batch. +func (b *DABatchV2) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatchV2) BlobDataProofForPointEvaluation() ([]byte, error) { + return nil, nil +} + // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) +func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) if err != nil { return 0, 0, err } @@ -305,12 +315,12 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint6 if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) +func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) if err != nil { return 0, 0, err } @@ -318,13 +328,13 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint6 if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) +func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) if err != nil { return false, err } @@ -336,7 +346,7 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { if len(batchBytes) <= 131072 { return true, nil } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -345,8 +355,8 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) +func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) if err != nil { return false, err } @@ -358,7 +368,7 @@ func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { if len(batchBytes) <= 131072 { return true, nil } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -366,26 +376,29 @@ func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { - return codecv1.EstimateChunkL1CommitCalldataSize(c) +func (o *DACodecV2) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return (&DACodecV1{}).EstimateChunkL1CommitCalldataSize(c) } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { - return codecv1.EstimateBatchL1CommitCalldataSize(b) +func (o *DACodecV2) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { + return (&DACodecV1{}).EstimateBatchL1CommitCalldataSize(b) } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { - return codecv1.EstimateBlockL1CommitGas(b) +func (o *DACodecV2) EstimateBlockL1CommitGas(b *Block) (uint64, error) { + return (&DACodecV1{}).EstimateBlockL1CommitGas(b) } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { - return codecv1.EstimateChunkL1CommitGas(c) +func (o *DACodecV2) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + return (&DACodecV1{}).EstimateChunkL1CommitGas(c) } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { - return codecv1.EstimateBatchL1CommitGas(b) +func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { + return (&DACodecV1{}).EstimateBatchL1CommitGas(b) } + +// SetCompression enables or disables compression. +func (o *DACodecV2) SetCompression(enable bool) {} diff --git a/encoding/codecv2/codecv2_test.go b/encoding/codecv2/codecv2_test.go deleted file mode 100644 index c34f608..0000000 --- a/encoding/codecv2/codecv2_test.go +++ /dev/null @@ -1,967 +0,0 @@ -package codecv2 - -import ( - "encoding/hex" - "encoding/json" - "os" - "strings" - "testing" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" -) - -func TestCodecV2BlockEncode(t *testing.T) { - block := &DABlock{} - encoded := hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block, err := NewDABlock(trace2, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block, err = NewDABlock(trace3, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block, err = NewDABlock(trace4, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block, err = NewDABlock(trace5, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block, err = NewDABlock(trace6, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - block, err = NewDABlock(trace7, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - - // sanity check: v0 and v2 block encodings are identical - for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { - blockv0, err := codecv0.NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv0 := hex.EncodeToString(blockv0.Encode()) - - blockv2, err := NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv2 := hex.EncodeToString(blockv2.Encode()) - - assert.Equal(t, encodedv0, encodedv2) - } -} - -func TestCodecV2ChunkEncode(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - encoded := hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - // transactions are not part of the encoding - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err := NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) -} - -func TestCodecV2ChunkHash(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - hash, err := chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) - - // L1 transactions are part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // L2 transactions are not part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // numL1Messages are not part of the hash - chunk.Blocks[0].NumL1Messages = 1 - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // invalid hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) - _, err = chunk.Hash() - assert.Error(t, err) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) -} - -func TestCodecV2BatchEncode(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV2)} - encoded := hex.EncodeToString(batch.Encode()) - assert.Equal(t, "02000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "020000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd70000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "02000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad40000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "020000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "020000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc53394137000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "020000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "02000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc5339413700000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "020000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa002900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "020000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb3363200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) -} - -func TestCodecV2BatchHash(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV2)} - assert.Equal(t, "0x8839b8a7b8dfebdc8e829f6fe543578ccdc8da1307e1e1581541a1e2a8fa5592", batch.Hash().Hex()) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x57553c35f981626b4d1a73c816aa8d8fad83c460fc049c5792581763f7e21b13", batch.Hash().Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x0f8e5b5205c5d809bf09047f37b558f4eb388c9c4eb23291cd97810d06654409", batch.Hash().Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc59155dc0ae7d7d3fc29f0a9c6042f14dc58e3a1f9c0417f52bac2c4a8b33014", batch.Hash().Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x417509641fb0c0d1c07d80e64aab13934f828cb4f09608722bf8126a68c04617", batch.Hash().Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xe9c82b48e2a54c9206f57897cb870536bd22066d2af3d03aafe8a6a39add7635", batch.Hash().Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x5e3d20c5b3f56cc5a28e7431241b3ce3d484b12cfb0b3228f378b196beeb3a53", batch.Hash().Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x19b99491401625d92e16f7df6705219cc55e48e4b08db7bc4020e6934076f5f7", batch.Hash().Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc5daf2ea5a3107c13b2994fb547336a7dca25cd352c051b6d9b9759d77e95fd2", batch.Hash().Hex()) -} - -func TestCodecV2BatchDataHash(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) -} - -func TestCodecV2BatchBlob(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) - assert.Equal(t, "0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7", batch.BlobVersionedHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) - assert.Equal(t, "0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4", batch.BlobVersionedHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) - assert.Equal(t, "0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c", batch.BlobVersionedHash.Hex()) - - // this batch only contains L1 txs - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060300d2d0700140d000f000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10005004124d3c68f60b26d07d824482d965c11", encoded) - assert.Equal(t, "0x0140a7ef703ef625ee71e6a580a8ff05cab32c3f3402bd37a1b715f5810760c9", batch.BlobVersionedHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) - assert.Equal(t, "0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632", batch.BlobVersionedHash.Hex()) -} - -func TestCodecV2BatchChallenge(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea", hex.EncodeToString(batch.z[:])) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b7792", hex.EncodeToString(batch.z[:])) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf", hex.EncodeToString(batch.z[:])) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a", hex.EncodeToString(batch.z[:])) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b32", hex.EncodeToString(batch.z[:])) -} - -func TestCodecV2ChunkAndBatchCommitGasEstimation(t *testing.T) { - block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block2Gas := EstimateBlockL1CommitGas(block2) - assert.Equal(t, uint64(960), block2Gas) - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} - chunk2Gas := EstimateChunkL1CommitGas(chunk2) - assert.Equal(t, uint64(1124), chunk2Gas) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2Gas := EstimateBatchL1CommitGas(batch2) - assert.Equal(t, uint64(157649), batch2Gas) - - block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block3Gas := EstimateBlockL1CommitGas(block3) - assert.Equal(t, uint64(960), block3Gas) - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} - chunk3Gas := EstimateChunkL1CommitGas(chunk3) - assert.Equal(t, uint64(1124), chunk3Gas) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3Gas := EstimateBatchL1CommitGas(batch3) - assert.Equal(t, uint64(157649), batch3Gas) - - block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block4Gas := EstimateBlockL1CommitGas(block4) - assert.Equal(t, uint64(3572), block4Gas) - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk4Gas := EstimateChunkL1CommitGas(chunk4) - assert.Equal(t, uint64(3745), chunk4Gas) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4Gas := EstimateBatchL1CommitGas(batch4) - assert.Equal(t, uint64(160302), batch4Gas) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} - chunk5Gas := EstimateChunkL1CommitGas(chunk5) - assert.Equal(t, uint64(2202), chunk5Gas) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk6Gas := EstimateChunkL1CommitGas(chunk6) - assert.Equal(t, uint64(3745), chunk6Gas) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5Gas := EstimateBatchL1CommitGas(batch5) - assert.Equal(t, uint64(163087), batch5Gas) -} - -func repeat(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) -} - -func TestCodecV2BatchStandardTestCases(t *testing.T) { - // Taking into consideration compression, we allow up to 5x of max blob bytes. - // We then ignore the metadata rows for 45 chunks. - maxChunks := 45 - nRowsData := 5*126976 - (maxChunks*4 + 2) - - for _, tc := range []struct { - chunks [][]string - expectedz string - expectedy string - expectedBlobVersionedHash string - expectedBatchHash string - }{ - // single empty chunk - {chunks: [][]string{{}}, expectedz: "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", expectedy: "132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", expectedBlobVersionedHash: "015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", expectedBatchHash: "52003e842cce4d84085e1f884ac416f19f2424b5d71df7717159ffdcf47803cc"}, - // single non-empty chunk - {chunks: [][]string{{"0x010203"}}, expectedz: "13c58784e6eeed40130ab43baa13a1f2d5a6d895c66f554456e00c480568a42d", expectedy: "248ace7f7f0fb3718b80b8cf04be560b97d083a3dbbd79d169e0fe9c80c9668c", expectedBlobVersionedHash: "0161d97a72d600ed5aa264bc8fc409a87e60b768ffb52b9c1106858c2ae57f04", expectedBatchHash: "f143f754efac11fd7b1be1828a463e2fc92fb3adc9ba937f88ff7a4d3b5219e8"}, - // multiple empty chunks - {chunks: [][]string{{}, {}}, expectedz: "102e7bf1335a8a86e8ecac2283843eff536555e464bb6ba01a29ff1ca8d4b8cb", expectedy: "033a0272284ae81eb693588e731fc19ad24c44a332405e471966335b37f1a2c2", expectedBlobVersionedHash: "01c0a83d1c0ee2ee06f030ca2f0ec36827b3e9682cbc8c00a27b0bdd3530488b", expectedBatchHash: "130c06cd2a0ec4c5f4d734bd9c61cf9d4acd150d347379d3fd42e2d93bf27c49"}, - // multiple non-empty chunks - {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "0ac462d144c9aa1a7538aebd9087e34e9f9590e59b58ffa08f03cd9e43382ed0", expectedy: "6ac7fc7686c900c9e27fd0ca69736cf77016c8b9e8fd3ebab0ee6be1d6c30c93", expectedBlobVersionedHash: "0104efe2cfccfb25e5ae40250af541bd217cae4c9bc14daaf0360a0a36aa2d03", expectedBatchHash: "85426aad824f708bd55162b5bd0dbf800ae472d887f2c286ba4db19869d3dd20"}, - // empty chunk followed by non-empty chunk - {chunks: [][]string{{}, {"0x010203"}}, expectedz: "1d81a4d2c78fbbf379562a998edde942b2019ec88ede9150a4c2a52a4e271ace", expectedy: "656603441f898b3dd64e0963fea53bfd6a445cb4f838c5caf181186cf45dd7ec", expectedBlobVersionedHash: "0131b881bdc8d8b70a62d9a6f249dc7a48f37428ac10809299489e5e60911f80", expectedBatchHash: "dd1a539175e3b24b2a1da37db2fb0b77c7eb7e69e25c0cfd2b5d9918aba7fd07"}, - // non-empty chunk followed by empty chunk - {chunks: [][]string{{"0x070809"}, {}}, expectedz: "275116a8ff16b17b90d7287fb567e766d1f79f54f8ac3c6d80e2de59fd34f115", expectedy: "5fea2c1bbed12ccdcf9edef780330ee1d13439de4d3b8f4968f2bda9e4fb8b1f", expectedBlobVersionedHash: "01c44c7e70df601a245e714be4f0aa7c918a0056bff379c20a7128e5926db664", expectedBatchHash: "cf067728aa2230e43897683e32e9bb6ec044ae37727ce206f10b707b81197b13"}, - // max number of chunks all empty - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4583c59de31759dbc54109bb2d5825a36655e71db62225fc5d7d758191e59a6b", expectedy: "0b119ffd6c88037d62e1bee05f609d801c6cc6e724214555b97affe3b852819a", expectedBlobVersionedHash: "013ac7e2db84a2f26ee2cba3a5cabbfffd1f7c053e7ea17add4f84a82cf8285a", expectedBatchHash: "fb0c3918408cca7292d55fb93bc6416fe8c06c3b28336bd4a3264f1be5957e07"}, - // max number of chunks all non-empty - {chunks: [][]string{ - {"0x0a"}, - {"0x0a0b"}, - {"0x0a0b0c"}, - {"0x0a0b0c0d"}, - {"0x0a0b0c0d0e"}, - {"0x0a0b0c0d0e0f"}, - {"0x0a0b0c0d0e0f10"}, - {"0x0a0b0c0d0e0f1011"}, - {"0x0a0b0c0d0e0f101112"}, - {"0x0a0b0c0d0e0f10111213"}, - {"0x0a0b0c0d0e0f1011121314"}, - {"0x0a0b0c0d0e0f101112131415"}, - {"0x0a0b0c0d0e0f10111213141516"}, - {"0x0a0b0c0d0e0f1011121314151617"}, - {"0x0a0b0c0d0e0f101112131415161718"}, - {"0x0a0b0c0d0e0f10111213141516171819"}, - {"0x0a0b0c0d0e0f101112131415161718191a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, - }, expectedz: "08454da7c353fa9d7c4d044cca3972dab6aa38e583276848b1aec904f5592837", expectedy: "36cbc815c329e864a018cadf25070d62184d570ef031f5b5c8a5385e65babe9c", expectedBlobVersionedHash: "0198009a5e0941a6acb7dcd95a5016d7f25ca92d66fb300cf6f9918102ef66c0", expectedBatchHash: "8b532d0fd0497a7041d72e0cba750c6ac0cfbeb5160d7c35e52b04d3935be578"}, - // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "63bebf986e2f0fc8bf5f7067108ea4a2b35a5927296e17d5c0bbc5ec04d8dce4", expectedy: "013b762f02e95a62f08977b1a43a017cd84f785b52ebf8ef25e9ebba6c9b76cb", expectedBlobVersionedHash: "01f68a6b3c0ba2ea0406f80f9c88b9905d9b3cc5b2d8ef12923b20fb24b81855", expectedBatchHash: "51aac18f89ddafb75abb0e0c665e64e68421d5cf6b0cc87ce55d4b29e3a576dd"}, - // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "465e095b082136f20ca975c10eafbb3bf2b71724798da87bd62d3f8795c615dc", expectedy: "6f2ff37b255e0da8b5678a9b1157fdc8a1213c17bd248efd50a4c1540c26295c", expectedBlobVersionedHash: "01da6bdac6237fcba7742cf48868467bf95a5e7f33d16c172b36852e506b46b6", expectedBatchHash: "15bc741d48ac712d82418be97705c269816696eba6dcdc1c3ab821d482d005ee"}, - // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "1ca17fdb4dea8396d7e2f10ef7b2a587750517df70ec0ce0d853e61310aec0f3", expectedy: "1b686f2eb8d7e3e2325d9101dd799f5e13af8482b402661325545646a9c96ec0", expectedBlobVersionedHash: "019d11fab4509a83623a64b466a00344552fd44421e78726cda537d06c8425d3", expectedBatchHash: "b1149c99e4a0e576bda7ae518420e0c525efc72011f9c2f8c7b05b7fd3e0d3c2"}, - // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "29c684b13d22cb43d81b9b449c281c15126fdc73512606de81c2d3fc9c7793b1", expectedy: "574418d83d77f6096934c2c4281edf61d48925a268411df0e0c818c6d43156d1", expectedBlobVersionedHash: "01f8da934ada220153abee70e85604ef8fbbf98c203b5eae14d23be088a41f45", expectedBatchHash: "e57e3e1fbb3cb5bb8f9362a66621b0b644d71ca50557b42041c0749fa5e05ea8"}, - // max number of chunks but last is empty - {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "16d2883b0797d3420fabf4591f9dbe9f850ce600ce6133c98c9d291d8b3ce0a9", expectedy: "5bdc1ca8f09efa9c544d2b03d565fec500d5347acd5b3fd4d88e881f9459d83a", expectedBlobVersionedHash: "01f51532d6bb0afe8a0a61351888f322cba40dc664408a3201eb761aaba66671", expectedBatchHash: "8b13d4535977c990d66742293444b6e48e4252698045d66920fd7d4833688444"}, - } { - chunks := []*encoding.Chunk{} - - for _, c := range tc.chunks { - block := &encoding.Block{Transactions: []*types.TransactionData{}} - - for _, data := range c { - tx := &types.TransactionData{Type: 0xff, Data: data} - block.Transactions = append(block.Transactions, tx) - } - - chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} - chunks = append(chunks, chunk) - } - - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* use mock */) - require.NoError(t, err) - actualZ := hex.EncodeToString(z[:]) - assert.Equal(t, tc.expectedz, actualZ) - assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) - - _, y, err := kzg4844.ComputeProof(blob, *z) - require.NoError(t, err) - actualY := hex.EncodeToString(y[:]) - assert.Equal(t, tc.expectedy, actualY) - - // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) - dataBytes := make([]byte, 32*len(chunks)) - for i := range chunks { - copy(dataBytes[32*i:32*i+32], []byte{255 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) - } - dataHash := crypto.Keccak256Hash(dataBytes) - - batch := DABatch{ - Version: uint8(encoding.CodecV3), - BatchIndex: 6789, - L1MessagePopped: 101, - TotalL1MessagePopped: 10101, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), - blob: blob, - z: z, - } - - assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) - } -} - -func TestCodecV2BatchBlobDataProof(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err := batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d90b7a581ba5b2cd6a916d139d2b7f28bf6997adb512653f6bdef0bbb7d681c742560fab406fd299c04fc1a464d277f8a8b3a918761888bd0f9a96cb9b2521347131a43b633c4fa01470842d9fe4211bc59c990f69185b80def79b9dfbf039b75", hex.EncodeToString(verifyData)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8b1530a433168a29443af928876b3d63f4205ba1876d303d56f8456483b9ce91b6ff2b1707726f01c1429cb9d87e4c165ade0ec9e0547ea5721ff442f63d8fcf9ba2f066b07d9b8a0f057e9c0e0e1e56f9a6ec627f9b1cb24866802e15c49c22a", hex.EncodeToString(verifyData)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2a5979717c35155300b0b2d68610aacdd8b0dbb94990168103bfd62985732e3f682370c91c9f2b8f08c6398194e2bb18b83eae765cef6e4e991d91e631dd454953516721962a089a03e4d8f640cd115ede836bad7141e8094317a45ccd04ec842", hex.EncodeToString(verifyData)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a0fad18d05f6f7d57b03dc717f8409489806d89ee5044bea951538682c52d815097e898dbd9a99b1bae2d759ee5f77ac6b6e8fb2cddaf26500532270fd4066e7ae85c450bcbf2cdb4643147091a1ee11ca615b823c97a69cb716d80de6ccafc5823af3a17fc71b72c224edd387abbf4433af013b53f15f394e501e5a3e57af074", hex.EncodeToString(verifyData)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) -} - -func TestCodecV2BatchSkipBitmap(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000003ff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 - assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000001fffffffff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000000000000000", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000001dd", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 - assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000001ffffffbff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 42, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000000007fffffe", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 32, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) -} - -func TestCodecV2ChunkAndBatchBlobSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(412), chunk2BatchBytesSize) - assert.Equal(t, uint64(237), chunk2BlobSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) - assert.NoError(t, err) - assert.Equal(t, uint64(412), batch2BatchBytesSize) - assert.Equal(t, uint64(237), batch2BlobSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), chunk3BatchBytesSize) - assert.Equal(t, uint64(2933), chunk3BlobSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), batch3BatchBytesSize) - assert.Equal(t, uint64(2933), batch3BlobSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk4BatchBytesSize) - assert.Equal(t, uint64(54), chunk4BlobSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) - assert.NoError(t, err) - assert.Equal(t, uint64(214), blob4BatchBytesSize) - assert.Equal(t, uint64(54), batch4BlobSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) - assert.NoError(t, err) - assert.Equal(t, uint64(6093), chunk5BatchBytesSize) - assert.Equal(t, uint64(3149), chunk5BlobSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk6BatchBytesSize) - assert.Equal(t, uint64(54), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) - assert.NoError(t, err) - assert.Equal(t, uint64(6125), batch5BatchBytesSize) - assert.Equal(t, uint64(3186), batch5BlobSize) -} - -func TestCodecV2ChunkAndBatchCalldataSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) - assert.Equal(t, uint64(60), chunk2CalldataSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) - assert.Equal(t, uint64(60), batch2CalldataSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) - assert.Equal(t, uint64(60), chunk3CalldataSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) - assert.Equal(t, uint64(60), batch3CalldataSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) - assert.Equal(t, uint64(60), chunk4CalldataSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4) - assert.Equal(t, uint64(60), batch4CalldataSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) - assert.Equal(t, uint64(120), chunk5CalldataSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6) - assert.Equal(t, uint64(60), chunk6CalldataSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) - assert.Equal(t, uint64(180), batch5CalldataSize) -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3.go similarity index 63% rename from encoding/codecv3/codecv3.go rename to encoding/codecv3.go index 0a85efa..c6fcc79 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3.go @@ -1,4 +1,4 @@ -package codecv3 +package encoding import ( "encoding/binary" @@ -9,22 +9,21 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv2" ) -// MaxNumChunks is the maximum number of chunks that a batch can contain. -const MaxNumChunks = codecv2.MaxNumChunks +type DACodecV3 struct{} + +// Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. +const Codecv3MaxNumChunks = 45 -// DABlock represents a Data Availability Block. -type DABlock = codecv2.DABlock +// DABlockV3 represents a Data Availability Block. +type DABlockV3 = DABlockV2 -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk = codecv2.DAChunk +// DAChunkV3 groups consecutive DABlocks with their transactions. +type DAChunkV3 = DAChunkV2 -// DABatch contains metadata about a batch of DAChunks. -type DABatch struct { +// DABatchV3 contains metadata about a batch of DAChunks. +type DABatchV3 struct { // header Version uint8 `json:"version"` BatchIndex uint64 `json:"batch_index"` @@ -44,20 +43,20 @@ type DABatch struct { blobBytes []byte } -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - return codecv2.NewDABlock(block, totalL1MessagePoppedBefore) +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { + return (&DACodecV2{}).NewDABlock(block, totalL1MessagePoppedBefore) } -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - return codecv2.NewDAChunk(chunk, totalL1MessagePoppedBefore) +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (o *DACodecV3) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + return (&DACodecV2{}).NewDAChunk(chunk, totalL1MessagePoppedBefore) } -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { +// NewDABatch creates a DABatch from the provided Batch. +func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > MaxNumChunks { + if len(batch.Chunks) > Codecv3MaxNumChunks { return nil, errors.New("too many chunks in batch") } @@ -70,19 +69,19 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // batch data hash - dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // skipped L1 messages bitmap - _, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + _, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // blob payload - blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := o.ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -90,8 +89,8 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - daBatch := DABatch{ - Version: uint8(encoding.CodecV3), + daBatch := DABatchV3{ + Version: uint8(CodecV3), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, TotalL1MessagePopped: totalL1MessagePoppedAfter, @@ -116,23 +115,23 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return codecv2.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) +func (o *DACodecV3) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return (&DACodecV2{}).ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { - return codecv2.ConstructBlobPayload(chunks, useMockTxData) +func (o *DACodecV3) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { + return (&DACodecV2{}).ConstructBlobPayload(chunks, useMockTxData) } // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { +func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) != 193 { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } - b := &DABatch{ + b := &DABatchV3{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), @@ -151,7 +150,7 @@ func NewDABatchFromBytes(data []byte) (*DABatch, error) { } // Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { +func (b *DABatchV3) Encode() []byte { batchBytes := make([]byte, 193) batchBytes[0] = b.Version binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) @@ -167,13 +166,13 @@ func (b *DABatch) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { +func (b *DABatchV3) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { +func (b *DABatchV3) blobDataProofForPICircuit() ([2]common.Hash, error) { if b.blob == nil { return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") } @@ -198,7 +197,7 @@ func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { +func (b *DABatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") } @@ -222,7 +221,7 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := encoding.GetBlobDataProofArgs() + blobDataProofArgs, err := GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -230,51 +229,62 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *DABatch) Blob() *kzg4844.Blob { +func (b *DABatchV3) Blob() *kzg4844.Blob { return b.blob } // BlobBytes returns the blob bytes of the batch. -func (b *DABatch) BlobBytes() []byte { +func (b *DABatchV3) BlobBytes() []byte { return b.blobBytes } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c) +func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + return (&DACodecV2{}).EstimateChunkL1CommitBatchSizeAndBlobSize(c) } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b) +func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + return (&DACodecV2{}).EstimateBatchL1CommitBatchSizeAndBlobSize(b) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - return codecv2.CheckChunkCompressedDataCompatibility(c) +func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + return (&DACodecV2{}).CheckChunkCompressedDataCompatibility(c) } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - return codecv2.CheckBatchCompressedDataCompatibility(b) +func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + return (&DACodecV2{}).CheckBatchCompressedDataCompatibility(b) } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { - return codecv2.EstimateChunkL1CommitCalldataSize(c) +func (o *DACodecV3) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return (&DACodecV2{}).EstimateChunkL1CommitCalldataSize(c) } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { - return codecv2.EstimateBatchL1CommitCalldataSize(b) +func (o *DACodecV3) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { + return (&DACodecV2{}).EstimateBatchL1CommitCalldataSize(b) } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { - return codecv2.EstimateChunkL1CommitGas(c) + 50000 // plus 50000 for the point-evaluation precompile call. +func (o *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + chunkL1CommitGas, err := (&DACodecV2{}).EstimateChunkL1CommitGas(c) + if err != nil { + return 0, err + } + return chunkL1CommitGas + 50000, nil // plus 50000 for the point-evaluation precompile call. } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { - return codecv2.EstimateBatchL1CommitGas(b) + 50000 // plus 50000 for the point-evaluation precompile call. +func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { + batchL1CommitGas, err := (&DACodecV2{}).EstimateBatchL1CommitGas(b) + if err != nil { + return 0, err + } + return batchL1CommitGas + 50000, nil // plus 50000 for the point-evaluation precompile call. } + +// SetCompression enables or disables compression. +func (o *DACodecV3) SetCompression(enable bool) {} diff --git a/encoding/codecv3/codecv3_test.go b/encoding/codecv3/codecv3_test.go deleted file mode 100644 index fef0c12..0000000 --- a/encoding/codecv3/codecv3_test.go +++ /dev/null @@ -1,1098 +0,0 @@ -package codecv3 - -import ( - "encoding/hex" - "encoding/json" - "os" - "strings" - "testing" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" -) - -func TestCodecV3BlockEncode(t *testing.T) { - block := &DABlock{} - encoded := hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block, err := NewDABlock(trace2, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block, err = NewDABlock(trace3, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block, err = NewDABlock(trace4, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block, err = NewDABlock(trace5, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block, err = NewDABlock(trace6, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - block, err = NewDABlock(trace7, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - - // sanity check: v0 and v3 block encodings are identical - for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { - blockv0, err := codecv0.NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv0 := hex.EncodeToString(blockv0.Encode()) - - blockv3, err := NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv3 := hex.EncodeToString(blockv3.Encode()) - - assert.Equal(t, encodedv0, encodedv3) - } -} - -func TestCodecV3ChunkEncode(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - encoded := hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - // transactions are not part of the encoding - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err := NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) -} - -func TestCodecV3ChunkHash(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - hash, err := chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) - - // L1 transactions are part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // L2 transactions are not part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // numL1Messages are not part of the hash - chunk.Blocks[0].NumL1Messages = 1 - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // invalid hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) - _, err = chunk.Hash() - assert.Error(t, err) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) -} - -func TestCodecV3BatchEncode(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV3)} - encoded := hex.EncodeToString(batch.Encode()) - assert.Equal(t, "03000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "03000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "03000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) -} - -func TestCodecV3BatchHash(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV3)} - assert.Equal(t, "0x9f059299e02cd1ccaed5bbcc821843000ae6b992b68b55ff59a51252478681b0", batch.Hash().Hex()) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc5065afb8f29f620ae1edb4c6ebaf7380faf4226fb83ee920d70d489fe51c5c2", batch.Hash().Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9ec8eabaa13229ec9c9d0687133afd7435afcfe260fc4c73fea052c0911522ac", batch.Hash().Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xda944b66dcaa6dc1442be2230233e97286ee1ed3c51cde155a36643b293b07c4", batch.Hash().Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x20e2324fac82e484c569eb286a221c61151c2b3c38a63b289f6ef6c30fb31e49", batch.Hash().Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc962bce28a34a4eb9ec81393edcf2e6367e84aad9c4fc5641da6f18f54053ed5", batch.Hash().Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x405e0fc4b7efbe5b6d1dcc63c1f3253bbb6fbefedd1afe6b2067629f9da1f1cc", batch.Hash().Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x3d5d24c951cb55e56f3b4e2defcd8f32d6d048565e6723ac7cdff7ed5e580e3a", batch.Hash().Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb", batch.Hash().Hex()) -} - -func TestCodecV3BatchDataHash(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) -} - -func TestCodecV3BatchBlob(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) - assert.Equal(t, "0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7", batch.BlobVersionedHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) - assert.Equal(t, "0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4", batch.BlobVersionedHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) - assert.Equal(t, "0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c", batch.BlobVersionedHash.Hex()) - - // this batch only contains L1 txs - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060300d2d0700140d000f000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10005004124d3c68f60b26d07d824482d965c11", encoded) - assert.Equal(t, "0x0140a7ef703ef625ee71e6a580a8ff05cab32c3f3402bd37a1b715f5810760c9", batch.BlobVersionedHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) - assert.Equal(t, "0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632", batch.BlobVersionedHash.Hex()) -} - -func TestCodecV3BatchChallenge(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea", hex.EncodeToString(batch.z[:])) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b7792", hex.EncodeToString(batch.z[:])) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf", hex.EncodeToString(batch.z[:])) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a", hex.EncodeToString(batch.z[:])) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b32", hex.EncodeToString(batch.z[:])) -} - -func TestCodecV3ChunkAndBatchCommitGasEstimation(t *testing.T) { - block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} - chunk2Gas := EstimateChunkL1CommitGas(chunk2) - assert.Equal(t, uint64(51124), chunk2Gas) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2Gas := EstimateBatchL1CommitGas(batch2) - assert.Equal(t, uint64(207649), batch2Gas) - - block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} - chunk3Gas := EstimateChunkL1CommitGas(chunk3) - assert.Equal(t, uint64(51124), chunk3Gas) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3Gas := EstimateBatchL1CommitGas(batch3) - assert.Equal(t, uint64(207649), batch3Gas) - - block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk4Gas := EstimateChunkL1CommitGas(chunk4) - assert.Equal(t, uint64(53745), chunk4Gas) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4Gas := EstimateBatchL1CommitGas(batch4) - assert.Equal(t, uint64(210302), batch4Gas) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} - chunk5Gas := EstimateChunkL1CommitGas(chunk5) - assert.Equal(t, uint64(52202), chunk5Gas) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk6Gas := EstimateChunkL1CommitGas(chunk6) - assert.Equal(t, uint64(53745), chunk6Gas) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5Gas := EstimateBatchL1CommitGas(batch5) - assert.Equal(t, uint64(213087), batch5Gas) -} - -func repeat(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) -} - -func TestCodecV3BatchStandardTestCases(t *testing.T) { - // Taking into consideration compression, we allow up to 5x of max blob bytes. - // We then ignore the metadata rows for 45 chunks. - maxChunks := 45 - nRowsData := 5*126976 - (maxChunks*4 + 2) - - for _, tc := range []struct { - chunks [][]string - expectedz string - expectedy string - expectedBlobVersionedHash string - expectedBatchHash string - }{ - // single empty chunk - {chunks: [][]string{{}}, expectedz: "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", expectedy: "132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", expectedBlobVersionedHash: "015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", expectedBatchHash: "edde6b1becf302856884f0b9da5879d58eeb822ddab14a06bacd8de9276dbc79"}, - // single non-empty chunk - {chunks: [][]string{{"0x010203"}}, expectedz: "13c58784e6eeed40130ab43baa13a1f2d5a6d895c66f554456e00c480568a42d", expectedy: "248ace7f7f0fb3718b80b8cf04be560b97d083a3dbbd79d169e0fe9c80c9668c", expectedBlobVersionedHash: "0161d97a72d600ed5aa264bc8fc409a87e60b768ffb52b9c1106858c2ae57f04", expectedBatchHash: "4c30ec3d03ecf70c479e802640a185cadf971e61acf68dac149ac73bdc645195"}, - // multiple empty chunks - {chunks: [][]string{{}, {}}, expectedz: "102e7bf1335a8a86e8ecac2283843eff536555e464bb6ba01a29ff1ca8d4b8cb", expectedy: "033a0272284ae81eb693588e731fc19ad24c44a332405e471966335b37f1a2c2", expectedBlobVersionedHash: "01c0a83d1c0ee2ee06f030ca2f0ec36827b3e9682cbc8c00a27b0bdd3530488b", expectedBatchHash: "31fd0237208587df3ddbea413673b479e2daa84fd1143a519940267c37257b1a"}, - // multiple non-empty chunks - {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "0ac462d144c9aa1a7538aebd9087e34e9f9590e59b58ffa08f03cd9e43382ed0", expectedy: "6ac7fc7686c900c9e27fd0ca69736cf77016c8b9e8fd3ebab0ee6be1d6c30c93", expectedBlobVersionedHash: "0104efe2cfccfb25e5ae40250af541bd217cae4c9bc14daaf0360a0a36aa2d03", expectedBatchHash: "0e0e8fd8b4f8ceb0215a29cc8b95750c0d1969706573af8872f397747809a479"}, - // empty chunk followed by non-empty chunk - {chunks: [][]string{{}, {"0x010203"}}, expectedz: "1d81a4d2c78fbbf379562a998edde942b2019ec88ede9150a4c2a52a4e271ace", expectedy: "656603441f898b3dd64e0963fea53bfd6a445cb4f838c5caf181186cf45dd7ec", expectedBlobVersionedHash: "0131b881bdc8d8b70a62d9a6f249dc7a48f37428ac10809299489e5e60911f80", expectedBatchHash: "d6b97dde29d4b8afb1a036ee54757af4087c939cb96cf17c2720e9f59eff19da"}, - // non-empty chunk followed by empty chunk - {chunks: [][]string{{"0x070809"}, {}}, expectedz: "275116a8ff16b17b90d7287fb567e766d1f79f54f8ac3c6d80e2de59fd34f115", expectedy: "5fea2c1bbed12ccdcf9edef780330ee1d13439de4d3b8f4968f2bda9e4fb8b1f", expectedBlobVersionedHash: "01c44c7e70df601a245e714be4f0aa7c918a0056bff379c20a7128e5926db664", expectedBatchHash: "3d56e12359c8b565f9cbe1c8f81e848be4635d9df84bc6ef0eb9986a15e08c20"}, - // max number of chunks all empty - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4583c59de31759dbc54109bb2d5825a36655e71db62225fc5d7d758191e59a6b", expectedy: "0b119ffd6c88037d62e1bee05f609d801c6cc6e724214555b97affe3b852819a", expectedBlobVersionedHash: "013ac7e2db84a2f26ee2cba3a5cabbfffd1f7c053e7ea17add4f84a82cf8285a", expectedBatchHash: "2e8078e277221a0d0e235ef825eef02653677bd50e259aeed64af5b95477645c"}, - // max number of chunks all non-empty - {chunks: [][]string{ - {"0x0a"}, - {"0x0a0b"}, - {"0x0a0b0c"}, - {"0x0a0b0c0d"}, - {"0x0a0b0c0d0e"}, - {"0x0a0b0c0d0e0f"}, - {"0x0a0b0c0d0e0f10"}, - {"0x0a0b0c0d0e0f1011"}, - {"0x0a0b0c0d0e0f101112"}, - {"0x0a0b0c0d0e0f10111213"}, - {"0x0a0b0c0d0e0f1011121314"}, - {"0x0a0b0c0d0e0f101112131415"}, - {"0x0a0b0c0d0e0f10111213141516"}, - {"0x0a0b0c0d0e0f1011121314151617"}, - {"0x0a0b0c0d0e0f101112131415161718"}, - {"0x0a0b0c0d0e0f10111213141516171819"}, - {"0x0a0b0c0d0e0f101112131415161718191a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, - }, expectedz: "08454da7c353fa9d7c4d044cca3972dab6aa38e583276848b1aec904f5592837", expectedy: "36cbc815c329e864a018cadf25070d62184d570ef031f5b5c8a5385e65babe9c", expectedBlobVersionedHash: "0198009a5e0941a6acb7dcd95a5016d7f25ca92d66fb300cf6f9918102ef66c0", expectedBatchHash: "e366eeacd45fbc2f43756f66d0a8f82f7f390a9aa7795df82e7df2d724856e7e"}, - // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "63bebf986e2f0fc8bf5f7067108ea4a2b35a5927296e17d5c0bbc5ec04d8dce4", expectedy: "013b762f02e95a62f08977b1a43a017cd84f785b52ebf8ef25e9ebba6c9b76cb", expectedBlobVersionedHash: "01f68a6b3c0ba2ea0406f80f9c88b9905d9b3cc5b2d8ef12923b20fb24b81855", expectedBatchHash: "88e6df6a5e1112485995fe5957d57c90ff306343a9d8d80831b7a6c041daf728"}, - // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "465e095b082136f20ca975c10eafbb3bf2b71724798da87bd62d3f8795c615dc", expectedy: "6f2ff37b255e0da8b5678a9b1157fdc8a1213c17bd248efd50a4c1540c26295c", expectedBlobVersionedHash: "01da6bdac6237fcba7742cf48868467bf95a5e7f33d16c172b36852e506b46b6", expectedBatchHash: "7bd97fc7c8c7e918029e5bd85d3c9e0335117475c449d5c6dd24e5af9d55cfc6"}, - // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "1ca17fdb4dea8396d7e2f10ef7b2a587750517df70ec0ce0d853e61310aec0f3", expectedy: "1b686f2eb8d7e3e2325d9101dd799f5e13af8482b402661325545646a9c96ec0", expectedBlobVersionedHash: "019d11fab4509a83623a64b466a00344552fd44421e78726cda537d06c8425d3", expectedBatchHash: "8b50a41e08000b7617de7204d8082870c8446f591fadffcb5190fdeadf47fae5"}, - // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "29c684b13d22cb43d81b9b449c281c15126fdc73512606de81c2d3fc9c7793b1", expectedy: "574418d83d77f6096934c2c4281edf61d48925a268411df0e0c818c6d43156d1", expectedBlobVersionedHash: "01f8da934ada220153abee70e85604ef8fbbf98c203b5eae14d23be088a41f45", expectedBatchHash: "cc0592160b2fcdb58750d29c36662b55437f4bc69ba3d45a965590f534a0228c"}, - // max number of chunks but last is empty - {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "16d2883b0797d3420fabf4591f9dbe9f850ce600ce6133c98c9d291d8b3ce0a9", expectedy: "5bdc1ca8f09efa9c544d2b03d565fec500d5347acd5b3fd4d88e881f9459d83a", expectedBlobVersionedHash: "01f51532d6bb0afe8a0a61351888f322cba40dc664408a3201eb761aaba66671", expectedBatchHash: "043a40c8fbc4edb6a820ba4162f1368d157d1d59c07f969b2c584cc6a47385ca"}, - } { - chunks := []*encoding.Chunk{} - - for _, c := range tc.chunks { - block := &encoding.Block{Transactions: []*types.TransactionData{}} - - for _, data := range c { - tx := &types.TransactionData{Type: 0xff, Data: data} - block.Transactions = append(block.Transactions, tx) - } - - chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} - chunks = append(chunks, chunk) - } - - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* use mock */) - require.NoError(t, err) - actualZ := hex.EncodeToString(z[:]) - assert.Equal(t, tc.expectedz, actualZ) - assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) - - _, y, err := kzg4844.ComputeProof(blob, *z) - require.NoError(t, err) - actualY := hex.EncodeToString(y[:]) - assert.Equal(t, tc.expectedy, actualY) - - // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) - dataBytes := make([]byte, 32*len(chunks)) - for i := range chunks { - copy(dataBytes[32*i:32*i+32], []byte{255 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) - } - dataHash := crypto.Keccak256Hash(dataBytes) - - batch := DABatch{ - Version: uint8(encoding.CodecV3), - BatchIndex: 6789, - L1MessagePopped: 101, - TotalL1MessagePopped: 10101, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), - LastBlockTimestamp: 192837, - blob: blob, - z: z, - } - - batch.BlobDataProof, err = batch.blobDataProofForPICircuit() - require.NoError(t, err) - - assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) - } -} - -func TestCodecV3BatchBlobDataProof(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err := batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d90b7a581ba5b2cd6a916d139d2b7f28bf6997adb512653f6bdef0bbb7d681c742560fab406fd299c04fc1a464d277f8a8b3a918761888bd0f9a96cb9b2521347131a43b633c4fa01470842d9fe4211bc59c990f69185b80def79b9dfbf039b75", hex.EncodeToString(verifyData)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8b1530a433168a29443af928876b3d63f4205ba1876d303d56f8456483b9ce91b6ff2b1707726f01c1429cb9d87e4c165ade0ec9e0547ea5721ff442f63d8fcf9ba2f066b07d9b8a0f057e9c0e0e1e56f9a6ec627f9b1cb24866802e15c49c22a", hex.EncodeToString(verifyData)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2a5979717c35155300b0b2d68610aacdd8b0dbb94990168103bfd62985732e3f682370c91c9f2b8f08c6398194e2bb18b83eae765cef6e4e991d91e631dd454953516721962a089a03e4d8f640cd115ede836bad7141e8094317a45ccd04ec842", hex.EncodeToString(verifyData)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a0fad18d05f6f7d57b03dc717f8409489806d89ee5044bea951538682c52d815097e898dbd9a99b1bae2d759ee5f77ac6b6e8fb2cddaf26500532270fd4066e7ae85c450bcbf2cdb4643147091a1ee11ca615b823c97a69cb716d80de6ccafc5823af3a17fc71b72c224edd387abbf4433af013b53f15f394e501e5a3e57af074", hex.EncodeToString(verifyData)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) -} - -func TestCodecV3BatchL1MessagePopped(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 - assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 - assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 42, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 32, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) -} - -func TestCodecV3ChunkAndBatchBlobSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(412), chunk2BatchBytesSize) - assert.Equal(t, uint64(237), chunk2BlobSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) - assert.NoError(t, err) - assert.Equal(t, uint64(412), batch2BatchBytesSize) - assert.Equal(t, uint64(237), batch2BlobSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), chunk3BatchBytesSize) - assert.Equal(t, uint64(2933), chunk3BlobSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), batch3BatchBytesSize) - assert.Equal(t, uint64(2933), batch3BlobSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk4BatchBytesSize) - assert.Equal(t, uint64(54), chunk4BlobSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) - assert.NoError(t, err) - assert.Equal(t, uint64(214), blob4BatchBytesSize) - assert.Equal(t, uint64(54), batch4BlobSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) - assert.NoError(t, err) - assert.Equal(t, uint64(6093), chunk5BatchBytesSize) - assert.Equal(t, uint64(3149), chunk5BlobSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk6BatchBytesSize) - assert.Equal(t, uint64(54), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) - assert.NoError(t, err) - assert.Equal(t, uint64(6125), batch5BatchBytesSize) - assert.Equal(t, uint64(3186), batch5BlobSize) -} - -func TestCodecV3ChunkAndBatchCalldataSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) - assert.Equal(t, uint64(60), chunk2CalldataSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) - assert.Equal(t, uint64(60), batch2CalldataSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) - assert.Equal(t, uint64(60), chunk3CalldataSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) - assert.Equal(t, uint64(60), batch3CalldataSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) - assert.Equal(t, uint64(60), chunk4CalldataSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4) - assert.Equal(t, uint64(60), batch4CalldataSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) - assert.Equal(t, uint64(120), chunk5CalldataSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6) - assert.Equal(t, uint64(60), chunk6CalldataSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) - assert.Equal(t, uint64(180), batch5CalldataSize) -} - -func TestCodecV3DABatchJSONMarshalUnmarshal(t *testing.T) { - t.Run("Case 1", func(t *testing.T) { - jsonStr := `{ - "version": 3, - "batch_index": 293212, - "l1_message_popped": 7, - "total_l1_message_popped": 904750, - "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", - "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", - "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", - "last_block_timestamp": 1721130505, - "blob_data_proof": [ - "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", - "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(3), batch.Version) - assert.Equal(t, uint64(293212), batch.BatchIndex) - assert.Equal(t, uint64(7), batch.L1MessagePopped) - assert.Equal(t, uint64(904750), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), batch.ParentBatchHash) - assert.Equal(t, uint64(1721130505), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0x6c693817a272efd00dd1323a533a114bd0a8c63b55816fde36c5784a4125441d") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) - - t.Run("Case 2", func(t *testing.T) { - jsonStr := `{ - "version": 4, - "batch_index": 123, - "l1_message_popped": 0, - "total_l1_message_popped": 0, - "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", - "last_block_timestamp": 1720174236, - "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", - "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", - "blob_data_proof": [ - "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", - "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(4), batch.Version) - assert.Equal(t, uint64(123), batch.BatchIndex) - assert.Equal(t, uint64(0), batch.L1MessagePopped) - assert.Equal(t, uint64(0), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), batch.ParentBatchHash) - assert.Equal(t, uint64(1720174236), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0x005661faf2444824b8a3fe1a53958195b197436a0df81b5d1677287bcd1c1923") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) - - t.Run("Case 3", func(t *testing.T) { - jsonStr := `{ - "version": 3, - "batch_index": 293205, - "l1_message_popped": 0, - "total_l1_message_popped": 904737, - "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", - "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", - "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", - "last_block_timestamp": 1721129563, - "blob_data_proof": [ - "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", - "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(3), batch.Version) - assert.Equal(t, uint64(293205), batch.BatchIndex) - assert.Equal(t, uint64(0), batch.L1MessagePopped) - assert.Equal(t, uint64(904737), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), batch.ParentBatchHash) - assert.Equal(t, uint64(1721129563), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0xe86e067f78b1c29c1cc297f6d9fe670c7beea1eebb226d1b8eeb9616a2bcac7e") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4.go similarity index 70% rename from encoding/codecv4/codecv4.go rename to encoding/codecv4.go index b07e2be..d750127 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4.go @@ -1,4 +1,4 @@ -package codecv4 +package encoding import ( "crypto/sha256" @@ -7,6 +7,7 @@ import ( "errors" "fmt" "math/big" + "sync/atomic" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -14,22 +15,24 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv3" "github.com/scroll-tech/da-codec/encoding/zstd" ) -// MaxNumChunks is the maximum number of chunks that a batch can contain. -const MaxNumChunks = codecv3.MaxNumChunks +type DACodecV4 struct { + enableCompress uint32 +} + +// Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. +const Codecv4MaxNumChunks = 45 -// DABlock represents a Data Availability Block. -type DABlock = codecv3.DABlock +// DABlockV4 represents a Data Availability Block. +type DABlockV4 = DABlockV3 -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk = codecv3.DAChunk +// DAChunkV4 groups consecutive DABlocks with their transactions. +type DAChunkV4 = DAChunkV3 -// DABatch contains metadata about a batch of DAChunks. -type DABatch struct { +// DABatchV4 contains metadata about a batch of DAChunks. +type DABatchV4 struct { // header Version uint8 `json:"version"` BatchIndex uint64 `json:"batch_index"` @@ -49,20 +52,20 @@ type DABatch struct { blobBytes []byte } -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - return codecv3.NewDABlock(block, totalL1MessagePoppedBefore) +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { + return (&DACodecV3{}).NewDABlock(block, totalL1MessagePoppedBefore) } -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - return codecv3.NewDAChunk(chunk, totalL1MessagePoppedBefore) +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (o *DACodecV4) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + return (&DACodecV3{}).NewDAChunk(chunk, totalL1MessagePoppedBefore) } -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch, enableCompress bool) (*DABatch, error) { +// NewDABatch creates a DABatch from the provided Batch. +func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > MaxNumChunks { + if len(batch.Chunks) > Codecv4MaxNumChunks { return nil, errors.New("too many chunks in batch") } @@ -75,19 +78,19 @@ func NewDABatch(batch *encoding.Batch, enableCompress bool) (*DABatch, error) { } // batch data hash - dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // skipped L1 messages bitmap - _, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + _, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // blob payload - blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, enableCompress, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := o.ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -95,8 +98,8 @@ func NewDABatch(batch *encoding.Batch, enableCompress bool) (*DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - daBatch := DABatch{ - Version: uint8(encoding.CodecV4), + daBatch := DABatchV4{ + Version: uint8(CodecV4), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, TotalL1MessagePopped: totalL1MessagePoppedAfter, @@ -121,21 +124,21 @@ func NewDABatch(batch *encoding.Batch, enableCompress bool) (*DABatch, error) { // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return codecv3.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) +func (o *DACodecV4) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return (&DACodecV3{}).ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (o *DACodecV4) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 + metadataLength := 2 + Codecv4MaxNumChunks*4 // batchBytes represents the raw (un-compressed and un-padded) blob payload batchBytes := make([]byte, metadataLength) // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+MaxNumChunks+1)*32) + challengePreimage := make([]byte, (1+Codecv4MaxNumChunks+1)*32) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -155,7 +158,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock } // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -173,10 +176,10 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than MaxNumChunks chunks, the rest + // if we have fewer than Codecv4MaxNumChunks chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ { + for chunkID := len(chunks); chunkID < Codecv4MaxNumChunks; chunkID++ { // use the last chunk's data hash as padding copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } @@ -186,7 +189,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock copy(challengePreimage[0:], hash[:]) var blobBytes []byte - if enableCompress { + if o.isCompressEnabled() { // blobBytes represents the compressed blob payload (batchBytes) var err error blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) @@ -195,7 +198,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock } if !useMockTxData { // Check compressed data compatibility. - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -211,7 +214,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock } // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) + blob, err := MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -224,11 +227,11 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+Codecv4MaxNumChunks)*32:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -241,12 +244,12 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { +func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) != 193 { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } - b := &DABatch{ + b := &DABatchV4{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), @@ -265,7 +268,7 @@ func NewDABatchFromBytes(data []byte) (*DABatch, error) { } // Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { +func (b *DABatchV4) Encode() []byte { batchBytes := make([]byte, 193) batchBytes[0] = b.Version binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) @@ -281,13 +284,13 @@ func (b *DABatch) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { +func (b *DABatchV4) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { +func (b *DABatchV4) blobDataProofForPICircuit() ([2]common.Hash, error) { if b.blob == nil { return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") } @@ -312,7 +315,7 @@ func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { +func (b *DABatchV4) BlobDataProofForPointEvaluation() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") } @@ -336,7 +339,7 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := encoding.GetBlobDataProofArgs() + blobDataProofArgs, err := GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -344,23 +347,23 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *DABatch) Blob() *kzg4844.Blob { +func (b *DABatchV4) Blob() *kzg4844.Blob { return b.blob } // BlobBytes returns the blob bytes of the batch. -func (b *DABatch) BlobBytes() []byte { +func (b *DABatchV4) BlobBytes() []byte { return b.blobBytes } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress bool) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) +func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 - if enableCompress { + if o.isCompressEnabled() { blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err @@ -369,17 +372,17 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress bool) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) +func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 - if enableCompress { + if o.isCompressEnabled() { blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err @@ -388,12 +391,12 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) +func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) if err != nil { return false, err } @@ -401,7 +404,7 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { if err != nil { return false, err } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -409,8 +412,8 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) +func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) if err != nil { return false, err } @@ -418,7 +421,7 @@ func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { if err != nil { return false, err } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -426,21 +429,35 @@ func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { - return codecv3.EstimateChunkL1CommitCalldataSize(c) +func (o *DACodecV4) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return (&DACodecV3{}).EstimateChunkL1CommitCalldataSize(c) } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { - return codecv3.EstimateBatchL1CommitCalldataSize(b) +func (o *DACodecV4) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { + return (&DACodecV3{}).EstimateBatchL1CommitCalldataSize(b) } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { - return codecv3.EstimateChunkL1CommitGas(c) +func (o *DACodecV4) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + return (&DACodecV3{}).EstimateChunkL1CommitGas(c) } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { - return codecv3.EstimateBatchL1CommitGas(b) +func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { + return (&DACodecV3{}).EstimateBatchL1CommitGas(b) +} + +// isCompressEnabled checks if compression is enabled. +func (o *DACodecV4) isCompressEnabled() bool { + return atomic.LoadUint32(&o.enableCompress) == 1 +} + +// SetCompression enables or disables compression. +func (o *DACodecV4) SetCompression(enable bool) { + if enable { + atomic.StoreUint32(&o.enableCompress, 1) + } else { + atomic.StoreUint32(&o.enableCompress, 0) + } } diff --git a/encoding/codecv4/codecv4_test.go b/encoding/codecv4/codecv4_test.go deleted file mode 100644 index a824c64..0000000 --- a/encoding/codecv4/codecv4_test.go +++ /dev/null @@ -1,837 +0,0 @@ -package codecv4 - -import ( - "encoding/hex" - "encoding/json" - "os" - "testing" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" -) - -func TestCodecV4BlockEncode(t *testing.T) { - block := &DABlock{} - encoded := hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block, err := NewDABlock(trace2, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block, err = NewDABlock(trace3, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block, err = NewDABlock(trace4, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block, err = NewDABlock(trace5, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block, err = NewDABlock(trace6, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - block, err = NewDABlock(trace7, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - - // sanity check: v0 and v4 block encodings are identical - for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { - blockv0, err := codecv0.NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv0 := hex.EncodeToString(blockv0.Encode()) - - blockv4, err := NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv4 := hex.EncodeToString(blockv4.Encode()) - - assert.Equal(t, encodedv0, encodedv4) - } -} - -func TestCodecV4ChunkEncode(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - encoded := hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - // transactions are not part of the encoding - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err := NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) -} - -func TestCodecV4ChunkHash(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - hash, err := chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) - - // L1 transactions are part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // L2 transactions are not part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // numL1Messages are not part of the hash - chunk.Blocks[0].NumL1Messages = 1 - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // invalid hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) - _, err = chunk.Hash() - assert.Error(t, err) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) -} - -func TestCodecV4BatchEncode(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV4)} - encoded := hex.EncodeToString(batch.Encode()) - assert.Equal(t, "04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f2900000000000000000000000000000000000000000000000000000000000000000000000063807b2a26451ed31542ed15543973f8bc8c3b6382ba0cba5650a7faf14625377029203c1b6db22aa24613cb68dee10ca50bbbc88fc15b8a6abf9dcf3ad382a2642e480d", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "04000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df7105000000000000000000000000000000000000000000000000000000000000000000000000063807b2d30702c0ea39553a0601a9c6fc5b27c076ddfc1044001fb0a8ad1fd9016304a61233de2770e0fb9a5578e5f633846ef9fa4c2ab8b80b8f9a30f09be07cda8d725", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93000000000000000000000000000000000000000000000000000000000000000000000000646b6e1338122423f3cebb92645f9ac93c8ee50edb75ea93a951f278007e721a7b9f995824895b00195499dfe77d201cf3627050d866abb2685f87e10466c4fcaf3a8588", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "04000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d80113ba3d5c53a035f4b4ec6f8a2ba9ab521bccab9f90e3a713ab5fffc0adec57000000000000000000000000000000000000000000000000000000000000000000000000646b6ed012e49b70b64652e5cab5dfdd1f58958d863de1d7fcb959e09f147a98b0b895171560f81b17ec3a2fe1c8ed2d308ca5bf002d7e3c18db9682a8d0f5379bf213aa", encoded) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26000000000000000000000000000000000000000000000000000000000000000000000000646b6ed046aedf214a661b6b37b9c325fef4484ff3613a6fb52719609bf02a66bc7ba23b6e9b7bcbe3be0ba95654f16f715bf7e39ef87a84199340423f6487cf56058085", encoded) -} - -func TestCodecV4BatchHash(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV4)} - assert.Equal(t, "0xdaf0827d02b32d41458aea0d5796dd0072d0a016f9834a2cb1a964d2c6ee135c", batch.Hash().Hex()) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, "0x53d6da35c9b6f0413b6ebb80f4a8c19b0e3279481ddf602398a54d3b4e5d4f2c", batch.Hash().Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, "0x08feefdb19215bb0f51f85a3b02a0954ac7da67681e274db49b9102f4c6e0857", batch.Hash().Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, "0xc56c5e51993342232193d1d93124bae30a5b1444eebf49b2dd5f2c5962d4d54d", batch.Hash().Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x2c32177c8b4c6289d977361c7fd0f1a6ea15add64da2eb8caf0420ac9b35231e", batch.Hash().Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x909bebbebdbf5ba9c85c6894e839c0b044d2878c457c4942887e3d64469ad342", batch.Hash().Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x53765a37bbd72655df586b530d79cb4ad0fb814d72ddc95e01e0ede579f45117", batch.Hash().Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, "0x74ccf9cc265f423cc6e6e53ed294000637a832cdc93c76485855289bebb6764a", batch.Hash().Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, "0x8d5ee00a80d7dbdc083d0cdedd35c2cb722e5944f9d88f7450c9186f3ef3da44", batch.Hash().Hex()) -} - -func TestCodecV4ChunkAndBatchCommitGasEstimation(t *testing.T) { - block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} - chunk2Gas := EstimateChunkL1CommitGas(chunk2) - assert.Equal(t, uint64(51124), chunk2Gas) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2Gas := EstimateBatchL1CommitGas(batch2) - assert.Equal(t, uint64(207649), batch2Gas) - - block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} - chunk3Gas := EstimateChunkL1CommitGas(chunk3) - assert.Equal(t, uint64(51124), chunk3Gas) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3Gas := EstimateBatchL1CommitGas(batch3) - assert.Equal(t, uint64(207649), batch3Gas) - - block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk4Gas := EstimateChunkL1CommitGas(chunk4) - assert.Equal(t, uint64(53745), chunk4Gas) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4Gas := EstimateBatchL1CommitGas(batch4) - assert.Equal(t, uint64(210302), batch4Gas) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} - chunk5Gas := EstimateChunkL1CommitGas(chunk5) - assert.Equal(t, uint64(52202), chunk5Gas) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk6Gas := EstimateChunkL1CommitGas(chunk6) - assert.Equal(t, uint64(53745), chunk6Gas) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5Gas := EstimateBatchL1CommitGas(batch5) - assert.Equal(t, uint64(213087), batch5Gas) -} - -func repeat(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) -} - -func TestCodecV4BatchStandardTestCases(t *testing.T) { - // Taking into consideration compression, we allow up to 5x of max blob bytes. - // We then ignore the metadata rows for 45 chunks. - maxChunks := 45 - nRowsData := 5*126976 - (maxChunks*4 + 2) - - for _, tc := range []struct { - chunks [][]string - expectedz string - expectedy string - expectedBlobVersionedHash string - expectedBatchHash string - }{ - // single empty chunk - {chunks: [][]string{{}}, expectedz: "1517a7f04a9f2517aaad8440792de202bd1fef70a861e12134c882ccf0c5a537", expectedy: "1ff0c5ea938308566ab022bc30d0136792084dc9adca93612ec925411915d4a9", expectedBlobVersionedHash: "015f16731c3e7864a08edae95f11db8c96e39a487427d7e58b691745d87f8a21", expectedBatchHash: "c3cfeead404a6de1ec5feaa29b6c1c1a5e6a40671c5d5e9cf1dd86fdf5a2e44a"}, - // single non-empty chunk - {chunks: [][]string{{"0x010203"}}, expectedz: "2cbd5fb174611060e72a2afcc385cea273b0f5ea8656f04f3661d757a6b00ff9", expectedy: "68d653e973d32fc5b79763d1b7de1699f37e2527830331b1a02f39d58d7070a9", expectedBlobVersionedHash: "019de38b4472451c5e8891dbb01bc2e834d660198cb9878e6b94fb55e4aaf92b", expectedBatchHash: "41e1c4a5220feb7fed5ba9e3980d138b8d5b4b06b8a46a87d796dbf5ed9265f5"}, - // multiple empty chunks - {chunks: [][]string{{}, {}}, expectedz: "0f9270fd0f21c1eef46334614c586759a2fb71ae46fef50560e92ef7ec926ccc", expectedy: "028f18fc74210d214d3e78a5f92f5c68a9d4dcc633e6e7ffb4144651a39b9dce", expectedBlobVersionedHash: "014a46e5be597971d313e300a052dc406b9f06fad394e1ba115df7da9ca5746d", expectedBatchHash: "94cac32609ae6c3d99dacf5af3650a7748b4dcf8c9779353b932a75e85bc2632"}, - // multiple non-empty chunks - {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "3a199bd64627e67c320add8a5932870535c667236eda365c989f0b73176bb000", expectedy: "221d60db4912e9067df77ee3d71587ea1023ec0238c23044a3325f909fd5ceb3", expectedBlobVersionedHash: "0145df6dbf8070bb3137156fe4540c11330e84487fcac24239442859d95e925c", expectedBatchHash: "d2332749a82a3b94766493ee3826074b8af74efc98367d14fd82e1056e2abf88"}, - // empty chunk followed by non-empty chunk - {chunks: [][]string{{}, {"0x010203"}}, expectedz: "0a421d448784eb111c2ae9a8031a7cf79e4638b300c48d0c7ff38322e25268fc", expectedy: "48ad5516b1370ac6be17a1d3220e286c9522366ec36fc66a584bbe1ee904eaf1", expectedBlobVersionedHash: "019e5c4c0bfa68324657a0d2e49075eeee2e7c928811bc9c8b2c03888d9d3a5d", expectedBatchHash: "5eac258323d1a4d166d2d116b330262440f46f1ecf07b247cc792bca4a905761"}, - // non-empty chunk followed by empty chunk - {chunks: [][]string{{"0x070809"}, {}}, expectedz: "6aa26c5d595fa1b72c4e1aa4f06b35788060a7504137c7dd6896486819445230", expectedy: "72c082827841ab84576b49cd63bd06af07cb090626ea3e91a8e77de29b3e61dc", expectedBlobVersionedHash: "0166c93797bf7d4e5701d36bfc8bcea5270c1c4ff18d1aaa248125c87746cf3d", expectedBatchHash: "03e0bdf053fa21d37bf55ac27e7774298b95465123c353e30761e51965269a10"}, - // max number of chunks all empty - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4a04cb1860de2c0d03a78520da62a447ef2af92e36dc0b1806db501d7cf63469", expectedy: "17ca30439aed3d9a96f4336d2a416da04a0803667922c7b0765557bb0162493f", expectedBlobVersionedHash: "014b8172c9e2ef89ac8d2ff0c9991baafff3602459250f5870721ac4f05dca09", expectedBatchHash: "216add0492703b12b841ebf6d217a41d1907dd4acd54d07a870472d31d4fde0d"}, - // max number of chunks all non-empty - {chunks: [][]string{ - {"0x0a"}, - {"0x0a0b"}, - {"0x0a0b0c"}, - {"0x0a0b0c0d"}, - {"0x0a0b0c0d0e"}, - {"0x0a0b0c0d0e0f"}, - {"0x0a0b0c0d0e0f10"}, - {"0x0a0b0c0d0e0f1011"}, - {"0x0a0b0c0d0e0f101112"}, - {"0x0a0b0c0d0e0f10111213"}, - {"0x0a0b0c0d0e0f1011121314"}, - {"0x0a0b0c0d0e0f101112131415"}, - {"0x0a0b0c0d0e0f10111213141516"}, - {"0x0a0b0c0d0e0f1011121314151617"}, - {"0x0a0b0c0d0e0f101112131415161718"}, - {"0x0a0b0c0d0e0f10111213141516171819"}, - {"0x0a0b0c0d0e0f101112131415161718191a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, - }, expectedz: "53eafb50809b3473cb4f8764f7e5d598af9eaaddc45a5a6da7cddac3380e39bb", expectedy: "40751ed98861f5c2058b4062b275f94a3d505a3221f6abe8dbe1074a4f10d0f4", expectedBlobVersionedHash: "01b78b07dbe03b960cd73ea45088b231a50ce88408fa938765e971c5dc7bbb6b", expectedBatchHash: "257175785213c68b10bb94396b657892fb7ae70708bf98ce357752906a80a6f0"}, - // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "37ca5366d9f5ddd9471f074f8019050ea6a13097368e84f298ffa1bd806ad851", expectedy: "5aa602da97cc438a039431c799b5f97467bcd45e693273dd1215f201b19fa5bd", expectedBlobVersionedHash: "01e531e7351a271839b2ae6ddec58818efd5f426fd6a7c0bc5c33c9171ed74bf", expectedBatchHash: "d3809d6b2fd10a62c6c58f9e7c32772f4ac062a78d363f46cd3ee301e87dbad2"}, - // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "250fc907e7ba3b5affb90a624566e337b02dd89a265677571cc0d1c51b60af19", expectedy: "1b2898bb001d962717159f49b015ae7228b21e9a590f836be0d79a0870c7d82b", expectedBlobVersionedHash: "01f3c431a72bbfd43c42dbd638d7f6d109be2b9449b96386b214f92b9e28ccc4", expectedBatchHash: "a51631991f6210b13e9c8ac9260704cca29fdc08adcfbd210053dc77c956e82f"}, - // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "6ba09c6123b374f1828ce5b3e52c69ac7e2251f1a573ba4d51e71b386eef9c38", expectedy: "3104f9e81ecf4ade3281cc8ea68c4f451341388e2a2c84be4b5e5ed938b6bb26", expectedBlobVersionedHash: "017813036e3c57d5259d5b1d89ca0fe253e43d740f5ee287eabc916b3486f15d", expectedBatchHash: "ebfaf617cc91d9147b00968263993f70e0efc57c1189877092a87ea60b55a2d7"}, - // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "295f6ba39b866f6635a1e11ffe16badf42174ba120bdcb973806620370f665fc", expectedy: "553772861d517aefd58332d87d75a388523b40dbd69c1d73b7d78fd18d895513", expectedBlobVersionedHash: "013a5cb4a098dfa068b82acea202eac5c7b1ec8f16c7cb37b2a9629e7359a4b1", expectedBatchHash: "b4c58eb1be9b2b21f6a43b4170ee92d6ee0af46e20848fff508a07d40b2bac29"}, - // max number of chunks but last is empty - {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "4affa105e7c5d72a3223482b237296fead99e6d716b97bab0cb3447f93309692", expectedy: "4a850a8c7b84d568d8505121c92ebf284e88aa7a881290cf3939d52040871e56", expectedBlobVersionedHash: "01d3ce566fbdbcab307095bdc05de7bc2905d25f3dd4453b0f7d5f7ba8da9f08", expectedBatchHash: "ac29c2e8c26749cf99fca994cde6d33147e9e9aa60f162c964720b4937cae8fb"}, - } { - chunks := []*encoding.Chunk{} - - for _, c := range tc.chunks { - block := &encoding.Block{Transactions: []*types.TransactionData{}} - - for _, data := range c { - tx := &types.TransactionData{Type: 0xff, Data: data} - block.Transactions = append(block.Transactions, tx) - } - - chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} - chunks = append(chunks, chunk) - } - - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* enble encode */, true /* use mock */) - require.NoError(t, err) - actualZ := hex.EncodeToString(z[:]) - assert.Equal(t, tc.expectedz, actualZ) - assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) - - _, y, err := kzg4844.ComputeProof(blob, *z) - require.NoError(t, err) - actualY := hex.EncodeToString(y[:]) - assert.Equal(t, tc.expectedy, actualY) - - // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) - dataBytes := make([]byte, 32*len(chunks)) - for i := range chunks { - copy(dataBytes[32*i:32*i+32], []byte{255 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) - } - dataHash := crypto.Keccak256Hash(dataBytes) - - batch := DABatch{ - Version: uint8(encoding.CodecV4), - BatchIndex: 6789, - L1MessagePopped: 101, - TotalL1MessagePopped: 10101, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), - LastBlockTimestamp: 192837, - blob: blob, - z: z, - } - - batch.BlobDataProof, err = batch.blobDataProofForPICircuit() - require.NoError(t, err) - - assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) - } -} - -func TestCodecV4BatchL1MessagePopped(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 - assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 - assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, 42, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, 32, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) -} - -func TestCodecV4ChunkAndBatchBlobSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(412), chunk2BatchBytesSize) - assert.Equal(t, uint64(238), chunk2BlobSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(412), batch2BatchBytesSize) - assert.Equal(t, uint64(238), batch2BlobSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), chunk3BatchBytesSize) - assert.Equal(t, uint64(2934), chunk3BlobSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), batch3BatchBytesSize) - assert.Equal(t, uint64(2934), batch3BlobSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk4BatchBytesSize) - assert.Equal(t, uint64(55), chunk4BlobSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(214), blob4BatchBytesSize) - assert.Equal(t, uint64(55), batch4BlobSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(6093), chunk5BatchBytesSize) - assert.Equal(t, uint64(3150), chunk5BlobSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk6BatchBytesSize) - assert.Equal(t, uint64(55), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(6125), batch5BatchBytesSize) - assert.Equal(t, uint64(3187), batch5BlobSize) -} - -func TestCodecV4ChunkAndBatchCalldataSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) - assert.Equal(t, uint64(60), chunk2CalldataSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) - assert.Equal(t, uint64(60), batch2CalldataSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) - assert.Equal(t, uint64(60), chunk3CalldataSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) - assert.Equal(t, uint64(60), batch3CalldataSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) - assert.Equal(t, uint64(60), chunk4CalldataSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4) - assert.Equal(t, uint64(60), batch4CalldataSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) - assert.Equal(t, uint64(120), chunk5CalldataSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6) - assert.Equal(t, uint64(60), chunk6CalldataSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) - assert.Equal(t, uint64(180), batch5CalldataSize) -} - -func TestCodecV4DABatchJSONMarshalUnmarshal(t *testing.T) { - t.Run("Case 1", func(t *testing.T) { - jsonStr := `{ - "version": 4, - "batch_index": 293212, - "l1_message_popped": 7, - "total_l1_message_popped": 904750, - "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", - "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", - "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", - "last_block_timestamp": 1721130505, - "blob_data_proof": [ - "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", - "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(4), batch.Version) - assert.Equal(t, uint64(293212), batch.BatchIndex) - assert.Equal(t, uint64(7), batch.L1MessagePopped) - assert.Equal(t, uint64(904750), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), batch.ParentBatchHash) - assert.Equal(t, uint64(1721130505), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0x64ba42153a4f642b2d8a37cf74a53067c37bba7389b85e7e07521f584e6b73d0") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) - - t.Run("Case 2", func(t *testing.T) { - jsonStr := `{ - "version": 5, - "batch_index": 123, - "l1_message_popped": 0, - "total_l1_message_popped": 0, - "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", - "last_block_timestamp": 1720174236, - "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", - "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", - "blob_data_proof": [ - "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", - "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(5), batch.Version) - assert.Equal(t, uint64(123), batch.BatchIndex) - assert.Equal(t, uint64(0), batch.L1MessagePopped) - assert.Equal(t, uint64(0), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), batch.ParentBatchHash) - assert.Equal(t, uint64(1720174236), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0xd14f142dbc5c384e9920d5bf82c6bbf7c98030ffd7a3cace6c8a6e9639a285f9") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) - - t.Run("Case 3", func(t *testing.T) { - jsonStr := `{ - "version": 4, - "batch_index": 293205, - "l1_message_popped": 0, - "total_l1_message_popped": 904737, - "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", - "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", - "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", - "last_block_timestamp": 1721129563, - "blob_data_proof": [ - "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", - "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(4), batch.Version) - assert.Equal(t, uint64(293205), batch.BatchIndex) - assert.Equal(t, uint64(0), batch.L1MessagePopped) - assert.Equal(t, uint64(904737), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), batch.ParentBatchHash) - assert.Equal(t, uint64(1721129563), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0x19638ca802926b93946fe281666205958838d46172587d150ca4c720ae244cd3") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/da.go b/encoding/da.go index eb66b7c..b55f79e 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -19,26 +19,6 @@ var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80 // CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. const CalldataNonZeroByteGas = 16 -// CodecVersion defines the version of encoder and decoder. -type CodecVersion uint8 - -const ( - // CodecV0 represents the version 0 of the encoder and decoder. - CodecV0 CodecVersion = iota - - // CodecV1 represents the version 1 of the encoder and decoder. - CodecV1 - - // CodecV2 represents the version 2 of the encoder and decoder. - CodecV2 - - // CodecV3 represents the version 3 of the encoder and decoder. - CodecV3 - - // CodecV4 represents the version 4 of the encoder and decoder. - CodecV4 -) - // Block represents an L2 block. type Block struct { Header *types.Header @@ -461,3 +441,11 @@ func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { memoryCost := (memorySizeWord*memorySizeWord)/512 + (3 * memorySizeWord) return memoryCost } + +func GetTxPayloadLength(txData *types.TransactionData) (uint64, error) { + rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) + if err != nil { + return 0, err + } + return uint64(len(rlpTxData)), nil +} diff --git a/encoding/encoding.go b/encoding/encoding.go index db3b027..8d165eb 100644 --- a/encoding/encoding.go +++ b/encoding/encoding.go @@ -1,6 +1,8 @@ package encoding import ( + "fmt" + "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) @@ -13,7 +15,7 @@ type DABlock interface { // DAChunk groups consecutive DABlocks with their transactions. type DAChunk interface { - Encode() []byte + Encode() ([]byte, error) Hash() (common.Hash, error) } @@ -33,17 +35,43 @@ type Codec interface { NewDABatch(*Batch) (DABatch, error) NewDABatchFromBytes([]byte) (DABatch, error) - ComputeBatchDataHash([]*Chunk, uint64) (common.Hash, error) - ConstructBlobPayload([]*Chunk, bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) - EstimateChunkL1CommitBatchSizeAndBlobSize(*Chunk) (uint64, uint64, error) EstimateBatchL1CommitBatchSizeAndBlobSize(*Batch) (uint64, uint64, error) CheckChunkCompressedDataCompatibility(*Chunk) (bool, error) CheckBatchCompressedDataCompatibility(*Batch) (bool, error) - EstimateChunkL1CommitCalldataSize(*Chunk) uint64 - EstimateChunkL1CommitGas(*Chunk) uint64 - EstimateBatchL1CommitGas(*Batch) uint64 - EstimateBatchL1CommitCalldataSize(*Batch) uint64 + EstimateChunkL1CommitCalldataSize(*Chunk) (uint64, error) + EstimateChunkL1CommitGas(*Chunk) (uint64, error) + EstimateBatchL1CommitGas(*Batch) (uint64, error) + EstimateBatchL1CommitCalldataSize(*Batch) (uint64, error) SetCompression(enable bool) // only used for codecv4 } + +// CodecVersion represents the version of the codec. +type CodecVersion int + +const ( + CodecV0 CodecVersion = iota + CodecV1 + CodecV2 + CodecV3 + CodecV4 +) + +// GetCodec returns the appropriate codec for the given version. +func GetCodec(version CodecVersion) (Codec, error) { + switch version { + case CodecV0: + return &DACodecV0{}, nil + case CodecV1: + return &DACodecV1{}, nil + case CodecV2: + return &DACodecV2{}, nil + case CodecV3: + return &DACodecV3{}, nil + case CodecV4: + return &DACodecV4{}, nil + default: + return nil, fmt.Errorf("unsupported codec version: %d", version) + } +} diff --git a/encoding/zstd/zstd.go b/encoding/zstd/zstd.go index 58eab2b..feab982 100644 --- a/encoding/zstd/zstd.go +++ b/encoding/zstd/zstd.go @@ -5,6 +5,7 @@ package zstd char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); */ import "C" + import ( "fmt" "unsafe" From 879bb98de49ca4792ddaf0ca1c19de1f1560024f Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 20:44:22 +0800 Subject: [PATCH 016/126] add dablock.go --- encoding/codecv0.go | 57 +++++--------------------------------------- encoding/codecv1.go | 13 +++------- encoding/codecv2.go | 5 +--- encoding/codecv3.go | 5 +--- encoding/codecv4.go | 5 +--- encoding/dablock.go | 47 ++++++++++++++++++++++++++++++++++++ encoding/encoding.go | 6 ----- 7 files changed, 59 insertions(+), 79 deletions(-) create mode 100644 encoding/dablock.go diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 9a92879..3cceb18 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -6,7 +6,6 @@ import ( "errors" "fmt" "math" - "math/big" "strings" "github.com/scroll-tech/go-ethereum/common" @@ -17,19 +16,9 @@ import ( type DACodecV0 struct{} -// DABlock represents a Data Availability Block. -type DABlockV0 struct { - BlockNumber uint64 - Timestamp uint64 - BaseFee *big.Int - GasLimit uint64 - NumTransactions uint16 - NumL1Messages uint16 -} - // DAChunk groups consecutive DABlocks with their transactions. type DAChunkV0 struct { - Blocks []*DABlockV0 + Blocks []*DABlock Transactions [][]*types.TransactionData } @@ -45,7 +34,7 @@ type DABatchV0 struct { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -63,7 +52,7 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := DABlockV0{ + daBlock := &DABlock{ BlockNumber: block.Header.Number.Uint64(), Timestamp: block.Header.Time, BaseFee: block.Header.BaseFee, @@ -72,42 +61,12 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) NumL1Messages: uint16(numL1Messages), } - return &daBlock, nil -} - -// Encode serializes the DABlock into a slice of bytes. -func (b *DABlockV0) Encode() []byte { - bytes := make([]byte, 60) - binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber) - binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) - if b.BaseFee != nil { - binary.BigEndian.PutUint64(bytes[40:], b.BaseFee.Uint64()) - } - binary.BigEndian.PutUint64(bytes[48:], b.GasLimit) - binary.BigEndian.PutUint16(bytes[56:], b.NumTransactions) - binary.BigEndian.PutUint16(bytes[58:], b.NumL1Messages) - return bytes -} - -// Decode populates the fields of a DABlock from a byte slice. -func (b *DABlockV0) Decode(bytes []byte) error { - if len(bytes) != 60 { - return errors.New("block encoding is not 60 bytes long") - } - - b.BlockNumber = binary.BigEndian.Uint64(bytes[0:8]) - b.Timestamp = binary.BigEndian.Uint64(bytes[8:16]) - b.BaseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) - b.GasLimit = binary.BigEndian.Uint64(bytes[48:56]) - b.NumTransactions = binary.BigEndian.Uint16(bytes[56:58]) - b.NumL1Messages = binary.BigEndian.Uint16(bytes[58:60]) - - return nil + return daBlock, nil } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - var blocks []*DABlockV0 + var blocks []*DABlock var txs [][]*types.TransactionData if chunk == nil { @@ -127,11 +86,7 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) if err != nil { return nil, err } - blockData, ok := b.(*DABlockV0) - if !ok { - return nil, errors.New("failed to cast block data") - } - blocks = append(blocks, blockData) + blocks = append(blocks, b) totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) txs = append(txs, block.Transactions) } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 5a1d87d..0ac7d58 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -20,9 +20,6 @@ type DACodecV1 struct{} // Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv1MaxNumChunks = 15 -// DABlockV1 represents a Data Availability Block. -type DABlockV1 = DABlockV0 - // DAChunkV1 groups consecutive DABlocks with their transactions. type DAChunkV1 DAChunkV0 @@ -44,7 +41,7 @@ type DABatchV1 struct { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV0{}).NewDABlock(block, totalL1MessagePoppedBefore) } @@ -58,7 +55,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []*DABlockV1 + var blocks []*DABlock var txs [][]*types.TransactionData for _, block := range chunk.Blocks { @@ -66,11 +63,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) if err != nil { return nil, err } - blockData, ok := b.(*DABlockV1) - if !ok { - return nil, errors.New("failed to cast block data") - } - blocks = append(blocks, blockData) + blocks = append(blocks, b) totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) txs = append(txs, block.Transactions) } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index e592304..02cb6f1 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -22,9 +22,6 @@ type DACodecV2 struct{} // Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv2MaxNumChunks = 45 -// DABlockV2 represents a Data Availability Block. -type DABlockV2 = DABlockV1 - // DAChunkV2 groups consecutive DABlocks with their transactions. type DAChunkV2 = DAChunkV1 @@ -46,7 +43,7 @@ type DABatchV2 struct { } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV1{}).NewDABlock(block, totalL1MessagePoppedBefore) } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index c6fcc79..2eed735 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -16,9 +16,6 @@ type DACodecV3 struct{} // Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv3MaxNumChunks = 45 -// DABlockV3 represents a Data Availability Block. -type DABlockV3 = DABlockV2 - // DAChunkV3 groups consecutive DABlocks with their transactions. type DAChunkV3 = DAChunkV2 @@ -44,7 +41,7 @@ type DABatchV3 struct { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV2{}).NewDABlock(block, totalL1MessagePoppedBefore) } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index d750127..0d2864e 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -25,9 +25,6 @@ type DACodecV4 struct { // Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv4MaxNumChunks = 45 -// DABlockV4 represents a Data Availability Block. -type DABlockV4 = DABlockV3 - // DAChunkV4 groups consecutive DABlocks with their transactions. type DAChunkV4 = DAChunkV3 @@ -53,7 +50,7 @@ type DABatchV4 struct { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV3{}).NewDABlock(block, totalL1MessagePoppedBefore) } diff --git a/encoding/dablock.go b/encoding/dablock.go new file mode 100644 index 0000000..7ae69ab --- /dev/null +++ b/encoding/dablock.go @@ -0,0 +1,47 @@ +package encoding + +import ( + "encoding/binary" + "errors" + "math/big" +) + +// DABlock represents a Data Availability Block. +type DABlock struct { + BlockNumber uint64 + Timestamp uint64 + BaseFee *big.Int + GasLimit uint64 + NumTransactions uint16 + NumL1Messages uint16 +} + +// Encode serializes the DABlock into a slice of bytes. +func (b *DABlock) Encode() []byte { + bytes := make([]byte, 60) + binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber) + binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) + if b.BaseFee != nil { + binary.BigEndian.PutUint64(bytes[40:], b.BaseFee.Uint64()) + } + binary.BigEndian.PutUint64(bytes[48:], b.GasLimit) + binary.BigEndian.PutUint16(bytes[56:], b.NumTransactions) + binary.BigEndian.PutUint16(bytes[58:], b.NumL1Messages) + return bytes +} + +// Decode populates the fields of a DABlock from a byte slice. +func (b *DABlock) Decode(bytes []byte) error { + if len(bytes) != 60 { + return errors.New("block encoding is not 60 bytes long") + } + + b.BlockNumber = binary.BigEndian.Uint64(bytes[0:8]) + b.Timestamp = binary.BigEndian.Uint64(bytes[8:16]) + b.BaseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) + b.GasLimit = binary.BigEndian.Uint64(bytes[48:56]) + b.NumTransactions = binary.BigEndian.Uint16(bytes[56:58]) + b.NumL1Messages = binary.BigEndian.Uint16(bytes[58:60]) + + return nil +} diff --git a/encoding/encoding.go b/encoding/encoding.go index 8d165eb..1783439 100644 --- a/encoding/encoding.go +++ b/encoding/encoding.go @@ -7,12 +7,6 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) -// DABlock represents a Data Availability Block. -type DABlock interface { - Encode() []byte - Decode([]byte) error -} - // DAChunk groups consecutive DABlocks with their transactions. type DAChunk interface { Encode() ([]byte, error) From 77aafd422158aad079253e621086fea6feab8fb0 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 21:26:56 +0800 Subject: [PATCH 017/126] add dachunk.go --- encoding/codecv0.go | 91 ------------------------ encoding/codecv1.go | 51 -------------- encoding/codecv2.go | 3 - encoding/codecv3.go | 3 - encoding/codecv4.go | 3 - encoding/dachunk.go | 161 +++++++++++++++++++++++++++++++++++++++++++ encoding/encoding.go | 2 +- 7 files changed, 162 insertions(+), 152 deletions(-) create mode 100644 encoding/dachunk.go diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 3cceb18..b0a57a4 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -2,11 +2,9 @@ package encoding import ( "encoding/binary" - "encoding/hex" "errors" "fmt" "math" - "strings" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -16,12 +14,6 @@ import ( type DACodecV0 struct{} -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunkV0 struct { - Blocks []*DABlock - Transactions [][]*types.TransactionData -} - // DABatch contains metadata about a batch of DAChunks. type DABatchV0 struct { Version uint8 @@ -99,89 +91,6 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return &daChunk, nil } -// Encode serializes the DAChunk into a slice of bytes. -func (c *DAChunkV0) Encode() ([]byte, error) { - if len(c.Blocks) == 0 { - return nil, errors.New("number of blocks is 0") - } - - if len(c.Blocks) > 255 { - return nil, errors.New("number of blocks exceeds 1 byte") - } - - var chunkBytes []byte - chunkBytes = append(chunkBytes, byte(len(c.Blocks))) - - var l2TxDataBytes []byte - - for _, block := range c.Blocks { - chunkBytes = append(chunkBytes, block.Encode()...) - } - - for _, blockTxs := range c.Transactions { - for _, txData := range blockTxs { - if txData.Type == types.L1MessageTxType { - continue - } - - var txLen [4]byte - rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) - if err != nil { - return nil, err - } - binary.BigEndian.PutUint32(txLen[:], uint32(len(rlpTxData))) - l2TxDataBytes = append(l2TxDataBytes, txLen[:]...) - l2TxDataBytes = append(l2TxDataBytes, rlpTxData...) - } - } - - chunkBytes = append(chunkBytes, l2TxDataBytes...) - return chunkBytes, nil -} - -// Hash computes the hash of the DAChunk data. -func (c *DAChunkV0) Hash() (common.Hash, error) { - chunkBytes, err := c.Encode() - if err != nil { - return common.Hash{}, err - } - - if len(chunkBytes) == 0 { - return common.Hash{}, errors.New("chunk data is empty and cannot be processed") - } - numBlocks := chunkBytes[0] - - // concatenate block contexts - var dataBytes []byte - for i := 0; i < int(numBlocks); i++ { - // only the first 58 bytes of each BlockContext are needed for the hashing process - dataBytes = append(dataBytes, chunkBytes[1+60*i:60*i+59]...) - } - - // concatenate l1 and l2 tx hashes - for _, blockTxs := range c.Transactions { - var l1TxHashes []byte - var l2TxHashes []byte - for _, txData := range blockTxs { - txHash := strings.TrimPrefix(txData.TxHash, "0x") - hashBytes, err := hex.DecodeString(txHash) - if err != nil { - return common.Hash{}, fmt.Errorf("failed to decode tx hash from TransactionData: hash=%v, err=%w", txData.TxHash, err) - } - if txData.Type == types.L1MessageTxType { - l1TxHashes = append(l1TxHashes, hashBytes...) - } else { - l2TxHashes = append(l2TxHashes, hashBytes...) - } - } - dataBytes = append(dataBytes, l1TxHashes...) - dataBytes = append(dataBytes, l2TxHashes...) - } - - hash := crypto.Keccak256Hash(dataBytes) - return hash, nil -} - // NewDABatch creates a DABatch from the provided Batch. func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // compute batch data hash diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 0ac7d58..b4efc4d 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -7,7 +7,6 @@ import ( "errors" "fmt" "math/big" - "strings" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -20,9 +19,6 @@ type DACodecV1 struct{} // Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv1MaxNumChunks = 15 -// DAChunkV1 groups consecutive DABlocks with their transactions. -type DAChunkV1 DAChunkV0 - // DABatchV1 contains metadata about a batch of DAChunks. type DABatchV1 struct { // header @@ -76,53 +72,6 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return &daChunk, nil } -// Encode serializes the DAChunk into a slice of bytes. -func (c *DAChunkV1) Encode() ([]byte, error) { - var chunkBytes []byte - chunkBytes = append(chunkBytes, byte(len(c.Blocks))) - - for _, block := range c.Blocks { - blockBytes := block.Encode() - chunkBytes = append(chunkBytes, blockBytes...) - } - - return chunkBytes, nil -} - -// Hash computes the hash of the DAChunk data. -func (c *DAChunkV1) Hash() (common.Hash, error) { - var dataBytes []byte - - // concatenate block contexts - for _, block := range c.Blocks { - encodedBlock := block.Encode() - // only the first 58 bytes are used in the hashing process - dataBytes = append(dataBytes, encodedBlock[:58]...) - } - - // concatenate l1 tx hashes - for _, blockTxs := range c.Transactions { - for _, txData := range blockTxs { - if txData.Type != types.L1MessageTxType { - continue - } - - txHash := strings.TrimPrefix(txData.TxHash, "0x") - hashBytes, err := hex.DecodeString(txHash) - if err != nil { - return common.Hash{}, err - } - if len(hashBytes) != 32 { - return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) - } - dataBytes = append(dataBytes, hashBytes...) - } - } - - hash := crypto.Keccak256Hash(dataBytes) - return hash, nil -} - // NewDABatch creates a DABatch from the provided Batch. func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 02cb6f1..3c13824 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -22,9 +22,6 @@ type DACodecV2 struct{} // Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv2MaxNumChunks = 45 -// DAChunkV2 groups consecutive DABlocks with their transactions. -type DAChunkV2 = DAChunkV1 - // DABatch contains metadata about a batch of DAChunks. type DABatchV2 struct { // header diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 2eed735..1d577cd 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -16,9 +16,6 @@ type DACodecV3 struct{} // Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv3MaxNumChunks = 45 -// DAChunkV3 groups consecutive DABlocks with their transactions. -type DAChunkV3 = DAChunkV2 - // DABatchV3 contains metadata about a batch of DAChunks. type DABatchV3 struct { // header diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 0d2864e..69228a3 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -25,9 +25,6 @@ type DACodecV4 struct { // Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv4MaxNumChunks = 45 -// DAChunkV4 groups consecutive DABlocks with their transactions. -type DAChunkV4 = DAChunkV3 - // DABatchV4 contains metadata about a batch of DAChunks. type DABatchV4 struct { // header diff --git a/encoding/dachunk.go b/encoding/dachunk.go new file mode 100644 index 0000000..d4533df --- /dev/null +++ b/encoding/dachunk.go @@ -0,0 +1,161 @@ +package encoding + +import ( + "encoding/binary" + "encoding/hex" + "errors" + "fmt" + "strings" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" +) + +// DAChunk groups consecutive DABlocks with their transactions. +type DAChunkV0 struct { + Blocks []*DABlock + Transactions [][]*types.TransactionData +} + +// Encode serializes the DAChunk into a slice of bytes. +func (c *DAChunkV0) Encode() ([]byte, error) { + if len(c.Blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(c.Blocks) > 255 { + return nil, errors.New("number of blocks exceeds 1 byte") + } + + var chunkBytes []byte + chunkBytes = append(chunkBytes, byte(len(c.Blocks))) + + var l2TxDataBytes []byte + + for _, block := range c.Blocks { + chunkBytes = append(chunkBytes, block.Encode()...) + } + + for _, blockTxs := range c.Transactions { + for _, txData := range blockTxs { + if txData.Type == types.L1MessageTxType { + continue + } + + var txLen [4]byte + rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) + if err != nil { + return nil, err + } + binary.BigEndian.PutUint32(txLen[:], uint32(len(rlpTxData))) + l2TxDataBytes = append(l2TxDataBytes, txLen[:]...) + l2TxDataBytes = append(l2TxDataBytes, rlpTxData...) + } + } + + chunkBytes = append(chunkBytes, l2TxDataBytes...) + return chunkBytes, nil +} + +// Hash computes the hash of the DAChunk data. +func (c *DAChunkV0) Hash() (common.Hash, error) { + chunkBytes, err := c.Encode() + if err != nil { + return common.Hash{}, err + } + + if len(chunkBytes) == 0 { + return common.Hash{}, errors.New("chunk data is empty and cannot be processed") + } + numBlocks := chunkBytes[0] + + // concatenate block contexts + var dataBytes []byte + for i := 0; i < int(numBlocks); i++ { + // only the first 58 bytes of each BlockContext are needed for the hashing process + dataBytes = append(dataBytes, chunkBytes[1+60*i:60*i+59]...) + } + + // concatenate l1 and l2 tx hashes + for _, blockTxs := range c.Transactions { + var l1TxHashes []byte + var l2TxHashes []byte + for _, txData := range blockTxs { + txHash := strings.TrimPrefix(txData.TxHash, "0x") + hashBytes, err := hex.DecodeString(txHash) + if err != nil { + return common.Hash{}, fmt.Errorf("failed to decode tx hash from TransactionData: hash=%v, err=%w", txData.TxHash, err) + } + if txData.Type == types.L1MessageTxType { + l1TxHashes = append(l1TxHashes, hashBytes...) + } else { + l2TxHashes = append(l2TxHashes, hashBytes...) + } + } + dataBytes = append(dataBytes, l1TxHashes...) + dataBytes = append(dataBytes, l2TxHashes...) + } + + hash := crypto.Keccak256Hash(dataBytes) + return hash, nil +} + +// DAChunkV1 groups consecutive DABlocks with their transactions. +type DAChunkV1 DAChunkV0 + +// Encode serializes the DAChunk into a slice of bytes. +func (c *DAChunkV1) Encode() ([]byte, error) { + var chunkBytes []byte + chunkBytes = append(chunkBytes, byte(len(c.Blocks))) + + for _, block := range c.Blocks { + blockBytes := block.Encode() + chunkBytes = append(chunkBytes, blockBytes...) + } + + return chunkBytes, nil +} + +// Hash computes the hash of the DAChunk data. +func (c *DAChunkV1) Hash() (common.Hash, error) { + var dataBytes []byte + + // concatenate block contexts + for _, block := range c.Blocks { + encodedBlock := block.Encode() + // only the first 58 bytes are used in the hashing process + dataBytes = append(dataBytes, encodedBlock[:58]...) + } + + // concatenate l1 tx hashes + for _, blockTxs := range c.Transactions { + for _, txData := range blockTxs { + if txData.Type != types.L1MessageTxType { + continue + } + + txHash := strings.TrimPrefix(txData.TxHash, "0x") + hashBytes, err := hex.DecodeString(txHash) + if err != nil { + return common.Hash{}, err + } + if len(hashBytes) != 32 { + return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) + } + dataBytes = append(dataBytes, hashBytes...) + } + } + + hash := crypto.Keccak256Hash(dataBytes) + return hash, nil +} + +// DAChunkV2 groups consecutive DABlocks with their transactions. +type DAChunkV2 = DAChunkV1 + +// DAChunkV3 groups consecutive DABlocks with their transactions. +type DAChunkV3 = DAChunkV2 + +// DAChunkV4 groups consecutive DABlocks with their transactions. +type DAChunkV4 = DAChunkV3 diff --git a/encoding/encoding.go b/encoding/encoding.go index 1783439..7bbdaac 100644 --- a/encoding/encoding.go +++ b/encoding/encoding.go @@ -24,7 +24,7 @@ type DABatch interface { // Codec represents the interface for encoding and decoding DA-related structures. type Codec interface { - NewDABlock(*Block, uint64) (DABlock, error) + NewDABlock(*Block, uint64) (*DABlock, error) NewDAChunk(*Chunk, uint64) (DAChunk, error) NewDABatch(*Batch) (DABatch, error) NewDABatchFromBytes([]byte) (DABatch, error) From 6ee5c197524e4273c5f49b7726765644a8e2b77e Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 21:58:18 +0800 Subject: [PATCH 018/126] add dabatch.go --- encoding/codecv0.go | 46 -------- encoding/codecv1.go | 112 +------------------ encoding/codecv2.go | 104 ++---------------- encoding/codecv3.go | 130 +--------------------- encoding/codecv4.go | 130 ++-------------------- encoding/da.go | 26 +++++ encoding/dabatch.go | 255 ++++++++++++++++++++++++++++++++++++++++++++ 7 files changed, 299 insertions(+), 504 deletions(-) create mode 100644 encoding/dabatch.go diff --git a/encoding/codecv0.go b/encoding/codecv0.go index b0a57a4..7b0b5cd 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -9,22 +9,10 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) type DACodecV0 struct{} -// DABatch contains metadata about a batch of DAChunks. -type DABatchV0 struct { - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte -} - // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { if !block.Header.Number.IsUint64() { @@ -152,40 +140,6 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// Encode serializes the DABatch into bytes. -func (b *DABatchV0) Encode() []byte { - batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.ParentBatchHash[:]) - copy(batchBytes[89:], b.SkippedL1MessageBitmap[:]) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatchV0) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// Blob returns the blob of the batch. -func (b *DABatchV0) Blob() *kzg4844.Blob { - return nil -} - -// BlobBytes returns the blob bytes of the batch. -func (b *DABatchV0) BlobBytes() []byte { - return nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { - return nil, nil -} - // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { var size uint64 diff --git a/encoding/codecv1.go b/encoding/codecv1.go index b4efc4d..0bc60a6 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -3,7 +3,6 @@ package encoding import ( "crypto/sha256" "encoding/binary" - "encoding/hex" "errors" "fmt" "math/big" @@ -19,23 +18,6 @@ type DACodecV1 struct{} // Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv1MaxNumChunks = 15 -// DABatchV1 contains metadata about a batch of DAChunks. -type DABatchV1 struct { - // header - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - BlobVersionedHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte - - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point -} - // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV0{}).NewDABlock(block, totalL1MessagePoppedBefore) @@ -84,7 +66,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -117,31 +99,6 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } -// ComputeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (o *DACodecV1) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - var dataBytes []byte - totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore - - for _, chunk := range chunks { - daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) - if err != nil { - return common.Hash{}, err - } - totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) - chunkHash, err := daChunk.Hash() - if err != nil { - return common.Hash{}, err - } - dataBytes = append(dataBytes, chunkHash.Bytes()...) - } - - dataHash := crypto.Keccak256Hash(dataBytes) - return dataHash, nil -} - // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) @@ -252,73 +209,6 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// Encode serializes the DABatch into bytes. -func (b *DABatchV1) Encode() []byte { - batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.BlobVersionedHash[:]) - copy(batchBytes[89:], b.ParentBatchHash[:]) - copy(batchBytes[121:], b.SkippedL1MessageBitmap[:]) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatchV1) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// BlobDataProof computes the abi-encoded blob verification data. -func (b *DABatchV1) BlobDataProof() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProof with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProof with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of ``_blobDataProof``: - // | z | y | kzg_commitment | kzg_proof | - // |---------|---------|----------------|-----------| - // | bytes32 | bytes32 | bytes48 | bytes48 | - - values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() - if err != nil { - return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) - } - return blobDataProofArgs.Pack(values...) -} - -// Blob returns the blob of the batch. -func (b *DABatchV1) Blob() *kzg4844.Blob { - return b.blob -} - -// BlobBytes returns the blob bytes of the batch. -func (b *DABatchV1) BlobBytes() []byte { - return nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { - return nil, nil -} - // EstimateChunkL1CommitBlobSize estimates the size of the L1 commit blob for a single chunk. func (o *DACodecV1) EstimateChunkL1CommitBlobSize(c *Chunk) (uint64, error) { metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) // over-estimate: adding metadata length diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 3c13824..81b8897 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -22,23 +22,6 @@ type DACodecV2 struct{} // Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv2MaxNumChunks = 45 -// DABatch contains metadata about a batch of DAChunks. -type DABatchV2 struct { - // header - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - BlobVersionedHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte - - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point -} - // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV1{}).NewDABlock(block, totalL1MessagePoppedBefore) @@ -61,7 +44,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -73,7 +56,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, _, err := o.ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, _, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -94,16 +77,8 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } -// ComputeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (o *DACodecV2) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return (&DACodecV1{}).ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) -} - -// ConstructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV2) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +// constructBlobPayload constructs the 4844 blob payload. +func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv2MaxNumChunks*4 @@ -172,13 +147,13 @@ func (o *DACodecV2) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (* if !useMockTxData && len(batchBytes) > 131072 { // Check compressed data compatibility. if err = CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } } if len(blobBytes) > 126976 { - log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") } @@ -232,73 +207,6 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// Encode serializes the DABatch into bytes. -func (b *DABatchV2) Encode() []byte { - batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.BlobVersionedHash[:]) - copy(batchBytes[89:], b.ParentBatchHash[:]) - copy(batchBytes[121:], b.SkippedL1MessageBitmap[:]) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatchV2) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// BlobDataProof computes the abi-encoded blob verification data. -func (b *DABatchV2) BlobDataProof() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProof with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProof with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of ``_blobDataProof``: - // | z | y | kzg_commitment | kzg_proof | - // |---------|---------|----------------|-----------| - // | bytes32 | bytes32 | bytes48 | bytes48 | - - values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() - if err != nil { - return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) - } - return blobDataProofArgs.Pack(values...) -} - -// Blob returns the blob of the batch. -func (b *DABatchV2) Blob() *kzg4844.Blob { - return b.blob -} - -// BlobBytes returns the blob bytes of the batch. -func (b *DABatchV2) BlobBytes() []byte { - return nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV2) BlobDataProofForPointEvaluation() ([]byte, error) { - return nil, nil -} - // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 1d577cd..e60248b 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -2,12 +2,10 @@ package encoding import ( "encoding/binary" - "encoding/hex" "errors" "fmt" "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) @@ -16,27 +14,6 @@ type DACodecV3 struct{} // Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv3MaxNumChunks = 45 -// DABatchV3 contains metadata about a batch of DAChunks. -type DABatchV3 struct { - // header - Version uint8 `json:"version"` - BatchIndex uint64 `json:"batch_index"` - L1MessagePopped uint64 `json:"l1_message_popped"` - TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` - DataHash common.Hash `json:"data_hash"` - BlobVersionedHash common.Hash `json:"blob_versioned_hash"` - ParentBatchHash common.Hash `json:"parent_batch_hash"` - LastBlockTimestamp uint64 `json:"last_block_timestamp"` - BlobDataProof [2]common.Hash `json:"blob_data_proof"` - - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point - - // for batch task - blobBytes []byte -} - // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV2{}).NewDABlock(block, totalL1MessagePoppedBefore) @@ -63,7 +40,7 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -75,7 +52,7 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := o.ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -105,17 +82,9 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } -// ComputeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (o *DACodecV3) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return (&DACodecV2{}).ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) -} - -// ConstructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV3) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { - return (&DACodecV2{}).ConstructBlobPayload(chunks, useMockTxData) +// constructBlobPayload constructs the 4844 blob payload. +func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { + return (&DACodecV2{}).constructBlobPayload(chunks, useMockTxData) } // NewDABatchFromBytes decodes the given byte slice into a DABatch. @@ -143,95 +112,6 @@ func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// Encode serializes the DABatch into bytes. -func (b *DABatchV3) Encode() []byte { - batchBytes := make([]byte, 193) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) - copy(batchBytes[25:57], b.DataHash[:]) - copy(batchBytes[57:89], b.BlobVersionedHash[:]) - copy(batchBytes[89:121], b.ParentBatchHash[:]) - binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) - copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) - copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatchV3) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatchV3) blobDataProofForPICircuit() ([2]common.Hash, error) { - if b.blob == nil { - return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") - } - if b.z == nil { - return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty z") - } - - _, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return [2]common.Hash{}, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of result: - // | z | y | - // |---------|---------| - // | bytes32 | bytes32 | - var result [2]common.Hash - result[0] = common.BytesToHash(b.z[:]) - result[1] = common.BytesToHash(y[:]) - - return result, nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of ``_blobDataProof``: - // | z | y | kzg_commitment | kzg_proof | - // |---------|---------|----------------|-----------| - // | bytes32 | bytes32 | bytes48 | bytes48 | - - values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() - if err != nil { - return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) - } - return blobDataProofArgs.Pack(values...) -} - -// Blob returns the blob of the batch. -func (b *DABatchV3) Blob() *kzg4844.Blob { - return b.blob -} - -// BlobBytes returns the blob bytes of the batch. -func (b *DABatchV3) BlobBytes() []byte { - return b.blobBytes -} - // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { return (&DACodecV2{}).EstimateChunkL1CommitBatchSizeAndBlobSize(c) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 69228a3..f4d1426 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -25,27 +25,6 @@ type DACodecV4 struct { // Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv4MaxNumChunks = 45 -// DABatchV4 contains metadata about a batch of DAChunks. -type DABatchV4 struct { - // header - Version uint8 `json:"version"` - BatchIndex uint64 `json:"batch_index"` - L1MessagePopped uint64 `json:"l1_message_popped"` - TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` - DataHash common.Hash `json:"data_hash"` - BlobVersionedHash common.Hash `json:"blob_versioned_hash"` - ParentBatchHash common.Hash `json:"parent_batch_hash"` - LastBlockTimestamp uint64 `json:"last_block_timestamp"` - BlobDataProof [2]common.Hash `json:"blob_data_proof"` - - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point - - // for batch task - blobBytes []byte -} - // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV3{}).NewDABlock(block, totalL1MessagePoppedBefore) @@ -72,7 +51,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -84,7 +63,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := o.ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -114,16 +93,8 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } -// ComputeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (o *DACodecV4) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return (&DACodecV3{}).ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) -} - -// ConstructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV4) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +// constructBlobPayload constructs the 4844 blob payload. +func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv4MaxNumChunks*4 @@ -193,7 +164,7 @@ func (o *DACodecV4) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (* if !useMockTxData { // Check compressed data compatibility. if err = CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } } @@ -203,7 +174,7 @@ func (o *DACodecV4) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } if len(blobBytes) > 126976 { - log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") } @@ -261,95 +232,6 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// Encode serializes the DABatch into bytes. -func (b *DABatchV4) Encode() []byte { - batchBytes := make([]byte, 193) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) - copy(batchBytes[25:57], b.DataHash[:]) - copy(batchBytes[57:89], b.BlobVersionedHash[:]) - copy(batchBytes[89:121], b.ParentBatchHash[:]) - binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) - copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) - copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatchV4) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatchV4) blobDataProofForPICircuit() ([2]common.Hash, error) { - if b.blob == nil { - return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") - } - if b.z == nil { - return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty z") - } - - _, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return [2]common.Hash{}, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of result: - // | z | y | - // |---------|---------| - // | bytes32 | bytes32 | - var result [2]common.Hash - result[0] = common.BytesToHash(b.z[:]) - result[1] = common.BytesToHash(y[:]) - - return result, nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV4) BlobDataProofForPointEvaluation() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of ``_blobDataProof``: - // | z | y | kzg_commitment | kzg_proof | - // |---------|---------|----------------|-----------| - // | bytes32 | bytes32 | bytes48 | bytes48 | - - values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() - if err != nil { - return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) - } - return blobDataProofArgs.Pack(values...) -} - -// Blob returns the blob of the batch. -func (b *DABatchV4) Blob() *kzg4844.Blob { - return b.blob -} - -// BlobBytes returns the blob bytes of the batch. -func (b *DABatchV4) BlobBytes() []byte { - return b.blobBytes -} - // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) diff --git a/encoding/da.go b/encoding/da.go index b55f79e..61d8c03 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -10,6 +10,7 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) @@ -449,3 +450,28 @@ func GetTxPayloadLength(txData *types.TransactionData) (uint64, error) { } return uint64(len(rlpTxData)), nil } + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + var dataBytes []byte + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil +} diff --git a/encoding/dabatch.go b/encoding/dabatch.go new file mode 100644 index 0000000..8c00bbb --- /dev/null +++ b/encoding/dabatch.go @@ -0,0 +1,255 @@ +package encoding + +import ( + "encoding/binary" + "encoding/hex" + "errors" + "fmt" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +// DABatch contains metadata about a batch of DAChunks. +type DABatchV0 struct { + Version uint8 + BatchIndex uint64 + L1MessagePopped uint64 + TotalL1MessagePopped uint64 + DataHash common.Hash + ParentBatchHash common.Hash + SkippedL1MessageBitmap []byte +} + +// Encode serializes the DABatch into bytes. +func (b *DABatchV0) Encode() []byte { + batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) + batchBytes[0] = b.Version + binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) + binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) + copy(batchBytes[25:], b.DataHash[:]) + copy(batchBytes[57:], b.ParentBatchHash[:]) + copy(batchBytes[89:], b.SkippedL1MessageBitmap[:]) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *DABatchV0) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// Blob returns the blob of the batch. +func (b *DABatchV0) Blob() *kzg4844.Blob { + return nil +} + +// BlobBytes returns the blob bytes of the batch. +func (b *DABatchV0) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { + return nil, nil +} + +// DABatchV1 contains metadata about a batch of DAChunks. +type DABatchV1 struct { + // header + Version uint8 + BatchIndex uint64 + L1MessagePopped uint64 + TotalL1MessagePopped uint64 + DataHash common.Hash + BlobVersionedHash common.Hash + ParentBatchHash common.Hash + SkippedL1MessageBitmap []byte + + // blob payload + blob *kzg4844.Blob + z *kzg4844.Point +} + +// Encode serializes the DABatch into bytes. +func (b *DABatchV1) Encode() []byte { + batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) + batchBytes[0] = b.Version + binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) + binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) + copy(batchBytes[25:], b.DataHash[:]) + copy(batchBytes[57:], b.BlobVersionedHash[:]) + copy(batchBytes[89:], b.ParentBatchHash[:]) + copy(batchBytes[121:], b.SkippedL1MessageBitmap[:]) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *DABatchV1) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// BlobDataProof computes the abi-encoded blob verification data. +func (b *DABatchV1) BlobDataProof() ([]byte, error) { + if b.blob == nil { + return nil, errors.New("called BlobDataProof with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProof with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, errors.New("failed to create blob commitment") + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of ``_blobDataProof``: + // | z | y | kzg_commitment | kzg_proof | + // |---------|---------|----------------|-----------| + // | bytes32 | bytes32 | bytes48 | bytes48 | + + values := []interface{}{*b.z, y, commitment, proof} + blobDataProofArgs, err := GetBlobDataProofArgs() + if err != nil { + return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) + } + return blobDataProofArgs.Pack(values...) +} + +// Blob returns the blob of the batch. +func (b *DABatchV1) Blob() *kzg4844.Blob { + return b.blob +} + +// BlobBytes returns the blob bytes of the batch. +func (b *DABatchV1) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { + return nil, nil +} + +type DABatchV2 = DABatchV1 + +// DABatchV3 contains metadata about a batch of DAChunks. +type DABatchV3 struct { + // header + Version uint8 `json:"version"` + BatchIndex uint64 `json:"batch_index"` + L1MessagePopped uint64 `json:"l1_message_popped"` + TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` + DataHash common.Hash `json:"data_hash"` + BlobVersionedHash common.Hash `json:"blob_versioned_hash"` + ParentBatchHash common.Hash `json:"parent_batch_hash"` + LastBlockTimestamp uint64 `json:"last_block_timestamp"` + BlobDataProof [2]common.Hash `json:"blob_data_proof"` + + // blob payload + blob *kzg4844.Blob + z *kzg4844.Point + + // for batch task + blobBytes []byte +} + +// Encode serializes the DABatch into bytes. +func (b *DABatchV3) Encode() []byte { + batchBytes := make([]byte, 193) + batchBytes[0] = b.Version + binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) + binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) + copy(batchBytes[25:57], b.DataHash[:]) + copy(batchBytes[57:89], b.BlobVersionedHash[:]) + copy(batchBytes[89:121], b.ParentBatchHash[:]) + binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) + copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) + copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *DABatchV3) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// blobDataProofForPICircuit computes the abi-encoded blob verification data. +func (b *DABatchV3) blobDataProofForPICircuit() ([2]common.Hash, error) { + if b.blob == nil { + return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") + } + if b.z == nil { + return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty z") + } + + _, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return [2]common.Hash{}, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of result: + // | z | y | + // |---------|---------| + // | bytes32 | bytes32 | + var result [2]common.Hash + result[0] = common.BytesToHash(b.z[:]) + result[1] = common.BytesToHash(y[:]) + + return result, nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { + if b.blob == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, errors.New("failed to create blob commitment") + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of ``_blobDataProof``: + // | z | y | kzg_commitment | kzg_proof | + // |---------|---------|----------------|-----------| + // | bytes32 | bytes32 | bytes48 | bytes48 | + + values := []interface{}{*b.z, y, commitment, proof} + blobDataProofArgs, err := GetBlobDataProofArgs() + if err != nil { + return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) + } + return blobDataProofArgs.Pack(values...) +} + +// Blob returns the blob of the batch. +func (b *DABatchV3) Blob() *kzg4844.Blob { + return b.blob +} + +// BlobBytes returns the blob bytes of the batch. +func (b *DABatchV3) BlobBytes() []byte { + return b.blobBytes +} + +type DABatchV4 = DABatchV3 From 79422a22e9a73d9d53aae578c926120b8c0d98d9 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 22:07:49 +0800 Subject: [PATCH 019/126] move computeBatchDataHash to codecv --- encoding/codecv1.go | 25 +++++++++++++++++++++++++ encoding/codecv2.go | 10 +++++++++- encoding/codecv3.go | 10 +++++++++- encoding/codecv4.go | 10 +++++++++- encoding/da.go | 26 -------------------------- 5 files changed, 52 insertions(+), 29 deletions(-) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 0bc60a6..5387f37 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -391,3 +391,28 @@ func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, // SetCompression enables or disables compression. func (o *DACodecV1) SetCompression(enable bool) {} + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func (o *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + var dataBytes []byte + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil +} diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 81b8897..5ad0626 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -44,7 +44,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -304,3 +304,11 @@ func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { // SetCompression enables or disables compression. func (o *DACodecV2) SetCompression(enable bool) {} + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func (o *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return (&DACodecV1{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) +} diff --git a/encoding/codecv3.go b/encoding/codecv3.go index e60248b..efebe3f 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -40,7 +40,7 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -162,3 +162,11 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { // SetCompression enables or disables compression. func (o *DACodecV3) SetCompression(enable bool) {} + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func (o *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return (&DACodecV2{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) +} diff --git a/encoding/codecv4.go b/encoding/codecv4.go index f4d1426..906082d 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -51,7 +51,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -337,3 +337,11 @@ func (o *DACodecV4) SetCompression(enable bool) { atomic.StoreUint32(&o.enableCompress, 0) } } + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func (o *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return (&DACodecV3{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) +} diff --git a/encoding/da.go b/encoding/da.go index 61d8c03..b55f79e 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -10,7 +10,6 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) @@ -450,28 +449,3 @@ func GetTxPayloadLength(txData *types.TransactionData) (uint64, error) { } return uint64(len(rlpTxData)), nil } - -// computeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - var dataBytes []byte - totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore - - for _, chunk := range chunks { - daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) - if err != nil { - return common.Hash{}, err - } - totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) - chunkHash, err := daChunk.Hash() - if err != nil { - return common.Hash{}, err - } - dataBytes = append(dataBytes, chunkHash.Bytes()...) - } - - dataHash := crypto.Keccak256Hash(dataBytes) - return dataHash, nil -} From 296880e0bd6d67bcf867f3174e1ef9f8446e5525 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 22:25:15 +0800 Subject: [PATCH 020/126] fix --- encoding/codecv1.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 5387f37..6235172 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -66,7 +66,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } From c038850ebe6c9710dd53364559eba9c3ff6c093c Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 23:38:59 +0800 Subject: [PATCH 021/126] add DABatchBase --- encoding/bitmap.go | 4 ++-- encoding/codecv0.go | 34 +++++++++++++++++--------------- encoding/codecv1.go | 42 ++++++++++++++++++++++------------------ encoding/codecv2.go | 42 ++++++++++++++++++++++------------------ encoding/codecv3.go | 47 +++++++++++++++++++++++++-------------------- encoding/codecv4.go | 45 ++++++++++++++++++++++++------------------- encoding/dabatch.go | 36 +++++++++++++++------------------- 7 files changed, 133 insertions(+), 117 deletions(-) diff --git a/encoding/bitmap.go b/encoding/bitmap.go index 7ada6d6..da4386e 100644 --- a/encoding/bitmap.go +++ b/encoding/bitmap.go @@ -7,8 +7,8 @@ import ( "github.com/scroll-tech/go-ethereum/core/types" ) -// ConstructSkippedBitmap constructs skipped L1 message bitmap of the batch. -func ConstructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePoppedBefore uint64) ([]byte, uint64, error) { +// constructSkippedBitmap constructs skipped L1 message bitmap of the batch. +func constructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePoppedBefore uint64) ([]byte, uint64, error) { // skipped L1 message bitmap, an array of 256-bit bitmaps var skippedBitmap []*big.Int diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 7b0b5cd..ff3edb8 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -103,19 +103,21 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { dataHash := crypto.Keccak256Hash(dataBytes) // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } daBatch := DABatchV0{ - Version: uint8(CodecV0), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, + DABatchBase: DABatchBase{ + Version: uint8(CodecV0), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + ParentBatchHash: batch.ParentBatchHash, + SkippedL1MessageBitmap: bitmapBytes, + }, } return &daBatch, nil @@ -128,13 +130,15 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV0{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[57:89]), - SkippedL1MessageBitmap: data[89:], + DABatchBase: DABatchBase{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + ParentBatchHash: common.BytesToHash(data[57:89]), + SkippedL1MessageBitmap: data[89:], + }, } return b, nil diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 6235172..5308473 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -72,7 +72,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -84,16 +84,18 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } daBatch := DABatchV1{ - Version: uint8(CodecV1), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - blob: blob, - z: z, + DABatchBase: DABatchBase{ + Version: uint8(CodecV1), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + ParentBatchHash: batch.ParentBatchHash, + SkippedL1MessageBitmap: bitmapBytes, + }, + BlobVersionedHash: blobVersionedHash, + blob: blob, + z: z, } return &daBatch, nil @@ -196,14 +198,16 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV1{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - BlobVersionedHash: common.BytesToHash(data[57:89]), - ParentBatchHash: common.BytesToHash(data[89:121]), - SkippedL1MessageBitmap: data[121:], + DABatchBase: DABatchBase{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + ParentBatchHash: common.BytesToHash(data[89:121]), + SkippedL1MessageBitmap: data[121:], + }, + BlobVersionedHash: common.BytesToHash(data[57:89]), } return b, nil diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 5ad0626..b496073 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -50,7 +50,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -62,16 +62,18 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } daBatch := DABatchV2{ - Version: uint8(CodecV2), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - blob: blob, - z: z, + DABatchBase: DABatchBase{ + Version: uint8(CodecV2), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + ParentBatchHash: batch.ParentBatchHash, + SkippedL1MessageBitmap: bitmapBytes, + }, + BlobVersionedHash: blobVersionedHash, + blob: blob, + z: z, } return &daBatch, nil @@ -194,14 +196,16 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV2{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - BlobVersionedHash: common.BytesToHash(data[57:89]), - ParentBatchHash: common.BytesToHash(data[89:121]), - SkippedL1MessageBitmap: data[121:], + DABatchBase: DABatchBase{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + ParentBatchHash: common.BytesToHash(data[89:121]), + SkippedL1MessageBitmap: data[121:], + }, + BlobVersionedHash: common.BytesToHash(data[57:89]), } return b, nil diff --git a/encoding/codecv3.go b/encoding/codecv3.go index efebe3f..70fb3c2 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -46,7 +46,7 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - _, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -61,17 +61,20 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] daBatch := DABatchV3{ - Version: uint8(CodecV3), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: batch.ParentBatchHash, - LastBlockTimestamp: lastBlock.Header.Time, - blob: blob, - z: z, - blobBytes: blobBytes, + DABatchBase: DABatchBase{ + Version: uint8(CodecV3), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + ParentBatchHash: batch.ParentBatchHash, + SkippedL1MessageBitmap: bitmapBytes, + }, + BlobVersionedHash: blobVersionedHash, + LastBlockTimestamp: lastBlock.Header.Time, + blob: blob, + z: z, + blobBytes: blobBytes, } daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() @@ -88,21 +91,23 @@ func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } // NewDABatchFromBytes decodes the given byte slice into a DABatch. -// Note: This function only populates the batch header, it leaves the blob-related fields empty. +// Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) != 193 { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } b := &DABatchV3{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - BlobVersionedHash: common.BytesToHash(data[57:89]), - ParentBatchHash: common.BytesToHash(data[89:121]), - LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), + DABatchBase: DABatchBase{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + ParentBatchHash: common.BytesToHash(data[89:121]), + }, + BlobVersionedHash: common.BytesToHash(data[57:89]), + LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), BlobDataProof: [2]common.Hash{ common.BytesToHash(data[129:161]), common.BytesToHash(data[161:193]), diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 906082d..2856ca2 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -57,7 +57,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - _, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -72,17 +72,20 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] daBatch := DABatchV4{ - Version: uint8(CodecV4), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: batch.ParentBatchHash, - LastBlockTimestamp: lastBlock.Header.Time, - blob: blob, - z: z, - blobBytes: blobBytes, + DABatchBase: DABatchBase{ + Version: uint8(CodecV4), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + ParentBatchHash: batch.ParentBatchHash, + SkippedL1MessageBitmap: bitmapBytes, + }, + BlobVersionedHash: blobVersionedHash, + LastBlockTimestamp: lastBlock.Header.Time, + blob: blob, + z: z, + blobBytes: blobBytes, } daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() @@ -215,14 +218,16 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV4{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - BlobVersionedHash: common.BytesToHash(data[57:89]), - ParentBatchHash: common.BytesToHash(data[89:121]), - LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), + DABatchBase: DABatchBase{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + ParentBatchHash: common.BytesToHash(data[89:121]), + }, + BlobVersionedHash: common.BytesToHash(data[57:89]), + LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), BlobDataProof: [2]common.Hash{ common.BytesToHash(data[129:161]), common.BytesToHash(data[161:193]), diff --git a/encoding/dabatch.go b/encoding/dabatch.go index 8c00bbb..f284c86 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -11,8 +11,8 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) -// DABatch contains metadata about a batch of DAChunks. -type DABatchV0 struct { +// DABatchBase contains common metadata for all versions of DABatch +type DABatchBase struct { Version uint8 BatchIndex uint64 L1MessagePopped uint64 @@ -22,6 +22,11 @@ type DABatchV0 struct { SkippedL1MessageBitmap []byte } +// DABatchV0 contains metadata about a batch of DAChunks. +type DABatchV0 struct { + DABatchBase +} + // Encode serializes the DABatch into bytes. func (b *DABatchV0) Encode() []byte { batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) @@ -58,15 +63,9 @@ func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { // DABatchV1 contains metadata about a batch of DAChunks. type DABatchV1 struct { - // header - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - BlobVersionedHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte + DABatchBase + + BlobVersionedHash common.Hash // blob payload blob *kzg4844.Blob @@ -144,16 +143,11 @@ type DABatchV2 = DABatchV1 // DABatchV3 contains metadata about a batch of DAChunks. type DABatchV3 struct { - // header - Version uint8 `json:"version"` - BatchIndex uint64 `json:"batch_index"` - L1MessagePopped uint64 `json:"l1_message_popped"` - TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` - DataHash common.Hash `json:"data_hash"` - BlobVersionedHash common.Hash `json:"blob_versioned_hash"` - ParentBatchHash common.Hash `json:"parent_batch_hash"` - LastBlockTimestamp uint64 `json:"last_block_timestamp"` - BlobDataProof [2]common.Hash `json:"blob_data_proof"` + DABatchBase + + BlobVersionedHash common.Hash `json:"blob_versioned_hash"` + LastBlockTimestamp uint64 `json:"last_block_timestamp"` + BlobDataProof [2]common.Hash `json:"blob_data_proof"` // blob payload blob *kzg4844.Blob From 4499e2c752686c7ad5a9233eecca555d24e9b664 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 22 Aug 2024 00:33:25 +0800 Subject: [PATCH 022/126] add GetCodecVersion --- encoding/codecv0.go | 4 ++-- encoding/da.go | 19 ++++++++++++++++++- go.mod | 2 +- go.sum | 4 ++-- 4 files changed, 23 insertions(+), 6 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index ff3edb8..9c7113a 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -152,7 +152,7 @@ func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) continue } size += 4 // 4 bytes payload length - txPayloadLength, err := GetTxPayloadLength(txData) + txPayloadLength, err := getTxPayloadLength(txData) if err != nil { return 0, err } @@ -172,7 +172,7 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { continue } - txPayloadLength, err := GetTxPayloadLength(txData) + txPayloadLength, err := getTxPayloadLength(txData) if err != nil { return 0, err } diff --git a/encoding/da.go b/encoding/da.go index b55f79e..e4be4d6 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -11,6 +11,7 @@ import ( "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/params" ) // BLSModulus is the BLS modulus defined in EIP-4844. @@ -442,10 +443,26 @@ func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { return memoryCost } -func GetTxPayloadLength(txData *types.TransactionData) (uint64, error) { +func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) if err != nil { return 0, err } return uint64(len(rlpTxData)), nil } + +// GetCodecVersion determines the codec version based on hain configuration, block number, and timestamp. +func GetCodecVersion(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) CodecVersion { + switch { + case startBlockNumber.Uint64() == 0 || !chainCfg.IsBernoulli(startBlockNumber): + return CodecV0 // codecv0: genesis batch or batches before Bernoulli + case !chainCfg.IsCurie(startBlockNumber): + return CodecV1 // codecv1: batches after Bernoulli and before Curie + case !chainCfg.IsDarwin(startBlockTimestamp): + return CodecV2 // codecv2: batches after Curie and before Darwin + case !chainCfg.IsDarwinV2(startBlockTimestamp): + return CodecV3 // codecv3: batches after Darwin + default: + return CodecV4 // codecv4: batches after DarwinV2 + } +} diff --git a/go.mod b/go.mod index 8d5696e..cdb9440 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/scroll-tech/da-codec go 1.21 require ( - github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4 + github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e github.com/stretchr/testify v1.9.0 ) diff --git a/go.sum b/go.sum index 29a3574..49f5013 100644 --- a/go.sum +++ b/go.sum @@ -72,8 +72,8 @@ github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= -github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4 h1:gheWXra3HdZsz6q+w4LrXy8ybHOO6/t6Kb/V64bR5wE= -github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ= +github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e h1:WCJ+UzfrM0jJSirXEYjWCJ89gr5EoRb4KfKb0mo6+Wo= +github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e/go.mod h1:swB5NSp8pKNDuYsTxfR08bHS6L56i119PBx8fxvV8Cs= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk= github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI= From 8e763ddc2c0e1f17024d88028805cee07bb35efc Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 22 Aug 2024 00:56:03 +0800 Subject: [PATCH 023/126] add BlobVersionedHashes --- encoding/dabatch.go | 15 +++++++++++++++ encoding/encoding.go | 1 + 2 files changed, 16 insertions(+) diff --git a/encoding/dabatch.go b/encoding/dabatch.go index f284c86..b4b9afa 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -51,6 +51,11 @@ func (b *DABatchV0) Blob() *kzg4844.Blob { return nil } +// BlobVersionedHashes returns the blob versioned hashes of the batch. +func (b *DABatchV0) BlobVersionedHashes() []common.Hash { + return nil +} + // BlobBytes returns the blob bytes of the batch. func (b *DABatchV0) BlobBytes() []byte { return nil @@ -129,6 +134,11 @@ func (b *DABatchV1) Blob() *kzg4844.Blob { return b.blob } +// BlobVersionedHashes returns the blob versioned hashes of the batch. +func (b *DABatchV1) BlobVersionedHashes() []common.Hash { + return []common.Hash{b.BlobVersionedHash} +} + // BlobBytes returns the blob bytes of the batch. func (b *DABatchV1) BlobBytes() []byte { return nil @@ -241,6 +251,11 @@ func (b *DABatchV3) Blob() *kzg4844.Blob { return b.blob } +// BlobVersionedHashes returns the blob versioned hashes of the batch. +func (b *DABatchV3) BlobVersionedHashes() []common.Hash { + return []common.Hash{b.BlobVersionedHash} +} + // BlobBytes returns the blob bytes of the batch. func (b *DABatchV3) BlobBytes() []byte { return b.blobBytes diff --git a/encoding/encoding.go b/encoding/encoding.go index 7bbdaac..24ff3b9 100644 --- a/encoding/encoding.go +++ b/encoding/encoding.go @@ -20,6 +20,7 @@ type DABatch interface { BlobDataProofForPointEvaluation() ([]byte, error) Blob() *kzg4844.Blob BlobBytes() []byte + BlobVersionedHashes() []common.Hash } // Codec represents the interface for encoding and decoding DA-related structures. From 98d5635acc5968b28fb7eb57a14cc719e67b1c8f Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 22 Aug 2024 16:08:57 +0800 Subject: [PATCH 024/126] rename encoding.go to interfaces.go --- encoding/{encoding.go => interfaces.go} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename encoding/{encoding.go => interfaces.go} (100%) diff --git a/encoding/encoding.go b/encoding/interfaces.go similarity index 100% rename from encoding/encoding.go rename to encoding/interfaces.go From bdb98f868abee7c15e93075f3cdc37f54c878949 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 23 Aug 2024 01:10:52 +0800 Subject: [PATCH 025/126] add NewDABatchWithExpectedBlobVersionedHashes --- encoding/codecv0.go | 6 ++++++ encoding/codecv1.go | 16 ++++++++++++++++ encoding/codecv2.go | 16 ++++++++++++++++ encoding/codecv3.go | 16 ++++++++++++++++ encoding/codecv4.go | 21 +++++++++++++++++++++ encoding/interfaces.go | 1 + 6 files changed, 76 insertions(+) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 9c7113a..8bf3bce 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -123,6 +123,12 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } +// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. +// It also checks if the blob versioned hashes are as expected. +func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, _ []common.Hash) (DABatch, error) { + return o.NewDABatch(batch) +} + // NewDABatchFromBytes decodes the given byte slice into a DABatch. func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 89 { diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 5308473..426472a 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "math/big" + "reflect" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -101,6 +102,21 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } +// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. +// It also checks if the blob versioned hashes are as expected. +func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + daBatch, err := o.NewDABatch(batch) + if err != nil { + return nil, err + } + + if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + return nil, errors.New("blob versioned hashes do not match") + } + + return daBatch, nil +} + // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) diff --git a/encoding/codecv2.go b/encoding/codecv2.go index b496073..f3acfe5 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -7,6 +7,7 @@ import ( "errors" "fmt" "math/big" + "reflect" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -79,6 +80,21 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } +// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. +// It also checks if the blob versioned hashes are as expected. +func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + daBatch, err := o.NewDABatch(batch) + if err != nil { + return nil, err + } + + if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + return nil, errors.New("blob versioned hashes do not match") + } + + return daBatch, nil +} + // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 70fb3c2..b786a49 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -4,6 +4,7 @@ import ( "encoding/binary" "errors" "fmt" + "reflect" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" @@ -85,6 +86,21 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } +// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. +// It also checks if the blob versioned hashes are as expected. +func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + daBatch, err := o.NewDABatch(batch) + if err != nil { + return nil, err + } + + if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + return nil, errors.New("blob versioned hashes do not match") + } + + return daBatch, nil +} + // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { return (&DACodecV2{}).constructBlobPayload(chunks, useMockTxData) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 2856ca2..b34277f 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -7,6 +7,7 @@ import ( "errors" "fmt" "math/big" + "reflect" "sync/atomic" "github.com/scroll-tech/go-ethereum/common" @@ -96,6 +97,26 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } +// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. +// It also checks if the blob versioned hashes are as expected. +func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + o.SetCompression(true) + daBatch, err := o.NewDABatch(batch) + if err != nil || reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + o.SetCompression(false) + daBatch, err = o.NewDABatch(batch) + if err != nil { + return nil, err + } + } + + if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + return nil, fmt.Errorf("blob versioned hashes do not match, expected %v, got %v", hashes, daBatch.BlobVersionedHashes()) + } + + return daBatch, nil +} + // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 24ff3b9..f87e281 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -29,6 +29,7 @@ type Codec interface { NewDAChunk(*Chunk, uint64) (DAChunk, error) NewDABatch(*Batch) (DABatch, error) NewDABatchFromBytes([]byte) (DABatch, error) + NewDABatchWithExpectedBlobVersionedHashes(*Batch, []common.Hash) (DABatch, error) EstimateChunkL1CommitBatchSizeAndBlobSize(*Chunk) (uint64, uint64, error) EstimateBatchL1CommitBatchSizeAndBlobSize(*Batch) (uint64, uint64, error) From 08d60a313219428d51b3691f30f3caf8f29a36d1 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 23 Aug 2024 01:31:08 +0800 Subject: [PATCH 026/126] tweak --- encoding/codecv0.go | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 8bf3bce..ed173bd 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "math" + "reflect" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -125,8 +126,17 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, _ []common.Hash) (DABatch, error) { - return o.NewDABatch(batch) +func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + daBatch, err := o.NewDABatch(batch) + if err != nil { + return nil, err + } + + if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + return nil, errors.New("blob versioned hashes do not match") + } + + return daBatch, nil } // NewDABatchFromBytes decodes the given byte slice into a DABatch. From 2d425d8a636a9350201c962a7db85e048589e6ad Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sat, 24 Aug 2024 13:15:42 +0800 Subject: [PATCH 027/126] fix a bug --- encoding/codecv0.go | 2 +- encoding/codecv1.go | 2 +- encoding/codecv2.go | 2 +- encoding/codecv3.go | 2 +- encoding/codecv4.go | 4 ++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index ed173bd..c937bb0 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -132,7 +132,7 @@ func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash return nil, err } - if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { return nil, errors.New("blob versioned hashes do not match") } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 426472a..faf786c 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -110,7 +110,7 @@ func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash return nil, err } - if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { return nil, errors.New("blob versioned hashes do not match") } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index f3acfe5..b4ced56 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -88,7 +88,7 @@ func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash return nil, err } - if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { return nil, errors.New("blob versioned hashes do not match") } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index b786a49..05c1df7 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -94,7 +94,7 @@ func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash return nil, err } - if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { return nil, errors.New("blob versioned hashes do not match") } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index b34277f..0dbe73f 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -102,7 +102,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { o.SetCompression(true) daBatch, err := o.NewDABatch(batch) - if err != nil || reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + if err != nil || !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { o.SetCompression(false) daBatch, err = o.NewDABatch(batch) if err != nil { @@ -110,7 +110,7 @@ func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } } - if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { return nil, fmt.Errorf("blob versioned hashes do not match, expected %v, got %v", hashes, daBatch.BlobVersionedHashes()) } From c1e4a0d0af49b34856720ecebc41ae1e7ec7c0b0 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sat, 24 Aug 2024 14:48:22 +0800 Subject: [PATCH 028/126] add more logs --- encoding/codecv0.go | 2 +- encoding/codecv1.go | 2 +- encoding/codecv2.go | 2 +- encoding/codecv3.go | 2 +- encoding/codecv4.go | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index c937bb0..a0675ae 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -133,7 +133,7 @@ func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, errors.New("blob versioned hashes do not match") + return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) } return daBatch, nil diff --git a/encoding/codecv1.go b/encoding/codecv1.go index faf786c..7785914 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -111,7 +111,7 @@ func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, errors.New("blob versioned hashes do not match") + return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) } return daBatch, nil diff --git a/encoding/codecv2.go b/encoding/codecv2.go index b4ced56..a04bad3 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -89,7 +89,7 @@ func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, errors.New("blob versioned hashes do not match") + return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) } return daBatch, nil diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 05c1df7..3bc0a37 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -95,7 +95,7 @@ func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, errors.New("blob versioned hashes do not match") + return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) } return daBatch, nil diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 0dbe73f..9b88c0f 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -111,7 +111,7 @@ func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, fmt.Errorf("blob versioned hashes do not match, expected %v, got %v", hashes, daBatch.BlobVersionedHashes()) + return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) } return daBatch, nil From e5df84634ee0ece08581bd1069ab7dcfce199f66 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 5 Sep 2024 23:02:53 +0800 Subject: [PATCH 029/126] add DecodeDAChunks --- encoding/codecv0.go | 42 ++++++++++++++++++++++++++++++++++++------ encoding/codecv1.go | 10 +++++++--- encoding/codecv2.go | 5 +++++ encoding/codecv3.go | 5 +++++ encoding/codecv4.go | 5 +++++ encoding/da.go | 6 ++++++ encoding/dablock.go | 6 +++--- encoding/interfaces.go | 2 ++ 8 files changed, 69 insertions(+), 12 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index a0675ae..f600bf7 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -174,7 +174,7 @@ func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) } size += txPayloadLength } - size += 60 // 60 bytes BlockContext + size += BlockContextByteSize return size, nil } @@ -197,8 +197,7 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += GetKeccak256Gas(txPayloadLength) // l2 tx hash } - // 60 bytes BlockContext calldata - total += CalldataNonZeroByteGas * 60 + total += CalldataNonZeroByteGas * BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -243,9 +242,9 @@ func (o *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * 60 // numBlocks of BlockContext in chunk + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * BlockContextByteSize // numBlocks of BlockContext in chunk totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil @@ -333,3 +332,34 @@ func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, // SetCompression enables or disables compression. func (o *DACodecV0) SetCompression(enable bool) {} + +// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk +func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { + var chunks []DAChunk + for _, chunk := range bytes { + if len(chunk) < 1 { + return nil, fmt.Errorf("invalid chunk, length is less than 1") + } + + numBlocks := int(chunk[0]) + if len(chunk) < 1+numBlocks*BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) + } + + blocks := make([]*DABlock, numBlocks) + for i := 0; i < numBlocks; i++ { + startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + BlockContextByteSize + blocks[i] = &DABlock{} + err := blocks[i].Decode(chunk[startIdx:endIdx]) + if err != nil { + return nil, err + } + } + + chunks = append(chunks, &DAChunkV0{ + Blocks: blocks, + }) + } + return chunks, nil +} diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 7785914..dce865e 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -282,8 +282,7 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { } } - // 60 bytes BlockContext calldata - total += CalldataNonZeroByteGas * 60 + total += CalldataNonZeroByteGas * BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -303,7 +302,7 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. func (o *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return uint64(60 * len(c.Blocks)), nil + return uint64(BlockContextByteSize * len(c.Blocks)), nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. @@ -436,3 +435,8 @@ func (o *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe dataHash := crypto.Keccak256Hash(dataBytes) return dataHash, nil } + +// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk +func (o *DACodecV1) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { + return (&DACodecV0{}).DecodeDAChunks(bytes) +} diff --git a/encoding/codecv2.go b/encoding/codecv2.go index a04bad3..1c9934f 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -332,3 +332,8 @@ func (o *DACodecV2) SetCompression(enable bool) {} func (o *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { return (&DACodecV1{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) } + +// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk +func (o *DACodecV2) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { + return (&DACodecV1{}).DecodeDAChunks(bytes) +} diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 3bc0a37..cbbe5a7 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -191,3 +191,8 @@ func (o *DACodecV3) SetCompression(enable bool) {} func (o *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { return (&DACodecV2{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) } + +// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk +func (o *DACodecV3) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { + return (&DACodecV2{}).DecodeDAChunks(bytes) +} diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 9b88c0f..4fe10da 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -371,3 +371,8 @@ func (o *DACodecV4) SetCompression(enable bool) { func (o *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { return (&DACodecV3{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) } + +// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk +func (o *DACodecV4) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { + return (&DACodecV3{}).DecodeDAChunks(bytes) +} diff --git a/encoding/da.go b/encoding/da.go index 7abdd1a..567579a 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -18,6 +18,12 @@ var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80 // CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. const CalldataNonZeroByteGas = 16 +// BlockContextByteSize is the size of the block context in bytes. +const BlockContextByteSize = 60 + +// TxLenByteSize is the size of the transaction length in bytes. +const TxLenByteSize = 4 + // Block represents an L2 block. type Block struct { Header *types.Header diff --git a/encoding/dablock.go b/encoding/dablock.go index 7ae69ab..63dbf07 100644 --- a/encoding/dablock.go +++ b/encoding/dablock.go @@ -18,7 +18,7 @@ type DABlock struct { // Encode serializes the DABlock into a slice of bytes. func (b *DABlock) Encode() []byte { - bytes := make([]byte, 60) + bytes := make([]byte, BlockContextByteSize) binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber) binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) if b.BaseFee != nil { @@ -32,8 +32,8 @@ func (b *DABlock) Encode() []byte { // Decode populates the fields of a DABlock from a byte slice. func (b *DABlock) Decode(bytes []byte) error { - if len(bytes) != 60 { - return errors.New("block encoding is not 60 bytes long") + if len(bytes) != BlockContextByteSize { + return errors.New("block encoding is not BlockContextByteSize bytes long") } b.BlockNumber = binary.BigEndian.Uint64(bytes[0:8]) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index f87e281..effbd7d 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -31,6 +31,8 @@ type Codec interface { NewDABatchFromBytes([]byte) (DABatch, error) NewDABatchWithExpectedBlobVersionedHashes(*Batch, []common.Hash) (DABatch, error) + DecodeDAChunks(chunks [][]byte) ([]DAChunk, error) + EstimateChunkL1CommitBatchSizeAndBlobSize(*Chunk) (uint64, uint64, error) EstimateBatchL1CommitBatchSizeAndBlobSize(*Batch) (uint64, uint64, error) CheckChunkCompressedDataCompatibility(*Chunk) (bool, error) From ecaca71c61f881e189b2930af280d92e4b809223 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 5 Sep 2024 23:15:03 +0800 Subject: [PATCH 030/126] add BlockRange interface --- encoding/dachunk.go | 8 ++++++++ encoding/interfaces.go | 1 + 2 files changed, 9 insertions(+) diff --git a/encoding/dachunk.go b/encoding/dachunk.go index d4533df..6462462 100644 --- a/encoding/dachunk.go +++ b/encoding/dachunk.go @@ -101,6 +101,14 @@ func (c *DAChunkV0) Hash() (common.Hash, error) { return hash, nil } +func (c *DAChunkV0) BlockRange() (uint64, uint64, error) { + if len(c.Blocks) == 0 { + return 0, 0, errors.New("number of blocks is 0") + } + + return c.Blocks[0].BlockNumber, c.Blocks[len(c.Blocks)-1].BlockNumber, nil +} + // DAChunkV1 groups consecutive DABlocks with their transactions. type DAChunkV1 DAChunkV0 diff --git a/encoding/interfaces.go b/encoding/interfaces.go index effbd7d..125b0fc 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -11,6 +11,7 @@ import ( type DAChunk interface { Encode() ([]byte, error) Hash() (common.Hash, error) + BlockRange() (uint64, uint64, error) } // DABatch contains metadata about a batch of DAChunks. From 484fa5959ab5c588460378d623a85db18c196679 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 5 Sep 2024 23:34:17 +0800 Subject: [PATCH 031/126] fix --- encoding/dachunk.go | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/encoding/dachunk.go b/encoding/dachunk.go index 6462462..90f842a 100644 --- a/encoding/dachunk.go +++ b/encoding/dachunk.go @@ -101,6 +101,7 @@ func (c *DAChunkV0) Hash() (common.Hash, error) { return hash, nil } +// BlockRange returns the block range of the DAChunk. func (c *DAChunkV0) BlockRange() (uint64, uint64, error) { if len(c.Blocks) == 0 { return 0, 0, errors.New("number of blocks is 0") @@ -159,6 +160,15 @@ func (c *DAChunkV1) Hash() (common.Hash, error) { return hash, nil } +// BlockRange returns the block range of the DAChunk. +func (c *DAChunkV1) BlockRange() (uint64, uint64, error) { + if len(c.Blocks) == 0 { + return 0, 0, errors.New("number of blocks is 0") + } + + return c.Blocks[0].BlockNumber, c.Blocks[len(c.Blocks)-1].BlockNumber, nil +} + // DAChunkV2 groups consecutive DABlocks with their transactions. type DAChunkV2 = DAChunkV1 From f1fe4c86c1d706f32e0805940a9693b99397613b Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 5 Sep 2024 23:59:26 +0800 Subject: [PATCH 032/126] add version check --- encoding/codecv0.go | 4 ++++ encoding/codecv1.go | 4 ++++ encoding/codecv2.go | 4 ++++ encoding/codecv3.go | 4 ++++ encoding/codecv4.go | 4 ++++ 5 files changed, 20 insertions(+) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index f600bf7..8aaa6d1 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -145,6 +145,10 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 89 bytes but got %d", len(data)) } + if CodecVersion(data[0]) != CodecV0 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV0) + } + b := &DABatchV0{ DABatchBase: DABatchBase{ Version: data[0], diff --git a/encoding/codecv1.go b/encoding/codecv1.go index dce865e..fe47438 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -213,6 +213,10 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } + if CodecVersion(data[0]) != CodecV1 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV1) + } + b := &DABatchV1{ DABatchBase: DABatchBase{ Version: data[0], diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 1c9934f..f01ccf5 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -211,6 +211,10 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } + if CodecVersion(data[0]) != CodecV2 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV2) + } + b := &DABatchV2{ DABatchBase: DABatchBase{ Version: data[0], diff --git a/encoding/codecv3.go b/encoding/codecv3.go index cbbe5a7..bef1272 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -113,6 +113,10 @@ func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } + if CodecVersion(data[0]) != CodecV3 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV3) + } + b := &DABatchV3{ DABatchBase: DABatchBase{ Version: data[0], diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 4fe10da..0852aa4 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -238,6 +238,10 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } + if CodecVersion(data[0]) != CodecV4 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV4) + } + b := &DABatchV4{ DABatchBase: DABatchBase{ Version: data[0], From 97711e21ec9486e2fe9c54cabf6e879ad6c88808 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:02:27 +0800 Subject: [PATCH 033/126] add Version --- encoding/codecv0.go | 5 +++++ encoding/codecv1.go | 5 +++++ encoding/codecv2.go | 5 +++++ encoding/codecv3.go | 5 +++++ encoding/codecv4.go | 5 +++++ encoding/interfaces.go | 2 ++ 6 files changed, 27 insertions(+) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 8aaa6d1..4f01925 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -14,6 +14,11 @@ import ( type DACodecV0 struct{} +// Version returns the codec version. +func (o *DACodecV0) Version() CodecVersion { + return CodecV0 +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { if !block.Header.Number.IsUint64() { diff --git a/encoding/codecv1.go b/encoding/codecv1.go index fe47438..4ccb14b 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -19,6 +19,11 @@ type DACodecV1 struct{} // Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv1MaxNumChunks = 15 +// Version returns the codec version. +func (o *DACodecV1) Version() CodecVersion { + return CodecV1 +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV0{}).NewDABlock(block, totalL1MessagePoppedBefore) diff --git a/encoding/codecv2.go b/encoding/codecv2.go index f01ccf5..db7d122 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -23,6 +23,11 @@ type DACodecV2 struct{} // Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv2MaxNumChunks = 45 +// Version returns the codec version. +func (o *DACodecV2) Version() CodecVersion { + return CodecV2 +} + // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV1{}).NewDABlock(block, totalL1MessagePoppedBefore) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index bef1272..489aec5 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -15,6 +15,11 @@ type DACodecV3 struct{} // Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv3MaxNumChunks = 45 +// Version returns the codec version. +func (o *DACodecV3) Version() CodecVersion { + return CodecV3 +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV2{}).NewDABlock(block, totalL1MessagePoppedBefore) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 0852aa4..8f49468 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -26,6 +26,11 @@ type DACodecV4 struct { // Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv4MaxNumChunks = 45 +// Version returns the codec version. +func (o *DACodecV4) Version() CodecVersion { + return CodecV4 +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV3{}).NewDABlock(block, totalL1MessagePoppedBefore) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 125b0fc..1082566 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -26,6 +26,8 @@ type DABatch interface { // Codec represents the interface for encoding and decoding DA-related structures. type Codec interface { + Version() CodecVersion + NewDABlock(*Block, uint64) (*DABlock, error) NewDAChunk(*Chunk, uint64) (DAChunk, error) NewDABatch(*Batch) (DABatch, error) From 2a63797f7c00370e8f51fe3e7ca9a659a0e47390 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:06:47 +0800 Subject: [PATCH 034/126] remove DABatchBase --- encoding/codecv0.go | 32 ++++++++++++++------------------ encoding/codecv1.go | 4 ++-- encoding/codecv2.go | 4 ++-- encoding/codecv3.go | 4 ++-- encoding/codecv4.go | 4 ++-- encoding/dabatch.go | 13 ++++--------- 6 files changed, 26 insertions(+), 35 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 4f01925..36fa687 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -115,15 +115,13 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { } daBatch := DABatchV0{ - DABatchBase: DABatchBase{ - Version: uint8(CodecV0), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - }, + Version: uint8(CodecV0), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + ParentBatchHash: batch.ParentBatchHash, + SkippedL1MessageBitmap: bitmapBytes, } return &daBatch, nil @@ -155,15 +153,13 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV0{ - DABatchBase: DABatchBase{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[57:89]), - SkippedL1MessageBitmap: data[89:], - }, + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + ParentBatchHash: common.BytesToHash(data[57:89]), + SkippedL1MessageBitmap: data[89:], } return b, nil diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 4ccb14b..4c3db78 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -90,7 +90,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } daBatch := DABatchV1{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: uint8(CodecV1), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, @@ -223,7 +223,7 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV1{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), diff --git a/encoding/codecv2.go b/encoding/codecv2.go index db7d122..f75a3a1 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -68,7 +68,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } daBatch := DABatchV2{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: uint8(CodecV2), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, @@ -221,7 +221,7 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV2{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 489aec5..788fea1 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -67,7 +67,7 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] daBatch := DABatchV3{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: uint8(CodecV3), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, @@ -123,7 +123,7 @@ func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV3{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 8f49468..6f9e029 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -78,7 +78,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] daBatch := DABatchV4{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: uint8(CodecV4), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, @@ -248,7 +248,7 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV4{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), diff --git a/encoding/dabatch.go b/encoding/dabatch.go index 83a27a5..81f0358 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -11,8 +11,8 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) -// DABatchBase contains common metadata for all versions of DABatch -type DABatchBase struct { +// DABatchV0 contains metadata about a batch of DAChunks. +type DABatchV0 struct { Version uint8 BatchIndex uint64 L1MessagePopped uint64 @@ -22,11 +22,6 @@ type DABatchBase struct { SkippedL1MessageBitmap []byte } -// DABatchV0 contains metadata about a batch of DAChunks. -type DABatchV0 struct { - DABatchBase -} - // Encode serializes the DABatch into bytes. func (b *DABatchV0) Encode() []byte { batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) @@ -68,7 +63,7 @@ func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { // DABatchV1 contains metadata about a batch of DAChunks. type DABatchV1 struct { - DABatchBase + DABatchV0 BlobVersionedHash common.Hash @@ -143,7 +138,7 @@ type DABatchV2 = DABatchV1 // DABatchV3 contains metadata about a batch of DAChunks. type DABatchV3 struct { - DABatchBase + DABatchV0 BlobVersionedHash common.Hash `json:"blob_versioned_hash"` LastBlockTimestamp uint64 `json:"last_block_timestamp"` From 87c45377ae34e5c255ed723bee3c1398f71d94e4 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:26:51 +0800 Subject: [PATCH 035/126] add DABlock --- encoding/codecv0.go | 10 +++++----- encoding/codecv1.go | 4 ++-- encoding/codecv2.go | 2 +- encoding/codecv3.go | 2 +- encoding/dablock.go | 27 ++++++++++++++++++++------- encoding/dachunk.go | 6 +++--- encoding/interfaces.go | 7 +++++++ 7 files changed, 39 insertions(+), 19 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 36fa687..2a779b7 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -20,7 +20,7 @@ func (o *DACodecV0) Version() CodecVersion { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { +func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -38,7 +38,7 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := &DABlock{ + daBlock := &DABlockV0{ BlockNumber: block.Header.Number.Uint64(), Timestamp: block.Header.Time, BaseFee: block.Header.BaseFee, @@ -52,7 +52,7 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - var blocks []*DABlock + var blocks []DABlock var txs [][]*types.TransactionData if chunk == nil { @@ -351,11 +351,11 @@ func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) } - blocks := make([]*DABlock, numBlocks) + blocks := make([]DABlock, numBlocks) for i := 0; i < numBlocks; i++ { startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlock{} + blocks[i] = &DABlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 4c3db78..f6260d3 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -25,7 +25,7 @@ func (o *DACodecV1) Version() CodecVersion { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { +func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { return (&DACodecV0{}).NewDABlock(block, totalL1MessagePoppedBefore) } @@ -39,7 +39,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []*DABlock + var blocks []DABlock var txs [][]*types.TransactionData for _, block := range chunk.Blocks { diff --git a/encoding/codecv2.go b/encoding/codecv2.go index f75a3a1..4b11bff 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -29,7 +29,7 @@ func (o *DACodecV2) Version() CodecVersion { } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { +func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { return (&DACodecV1{}).NewDABlock(block, totalL1MessagePoppedBefore) } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 788fea1..6efafae 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -21,7 +21,7 @@ func (o *DACodecV3) Version() CodecVersion { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { +func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { return (&DACodecV2{}).NewDABlock(block, totalL1MessagePoppedBefore) } diff --git a/encoding/dablock.go b/encoding/dablock.go index 63dbf07..adda6a0 100644 --- a/encoding/dablock.go +++ b/encoding/dablock.go @@ -6,9 +6,9 @@ import ( "math/big" ) -// DABlock represents a Data Availability Block. -type DABlock struct { - BlockNumber uint64 +// DABlockV0 represents a Data Availability Block. +type DABlockV0 struct { + Number uint64 Timestamp uint64 BaseFee *big.Int GasLimit uint64 @@ -17,9 +17,9 @@ type DABlock struct { } // Encode serializes the DABlock into a slice of bytes. -func (b *DABlock) Encode() []byte { +func (b *DABlockV0) Encode() []byte { bytes := make([]byte, BlockContextByteSize) - binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber) + binary.BigEndian.PutUint64(bytes[0:], b.Number) binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) if b.BaseFee != nil { binary.BigEndian.PutUint64(bytes[40:], b.BaseFee.Uint64()) @@ -31,12 +31,12 @@ func (b *DABlock) Encode() []byte { } // Decode populates the fields of a DABlock from a byte slice. -func (b *DABlock) Decode(bytes []byte) error { +func (b *DABlockV0) Decode(bytes []byte) error { if len(bytes) != BlockContextByteSize { return errors.New("block encoding is not BlockContextByteSize bytes long") } - b.BlockNumber = binary.BigEndian.Uint64(bytes[0:8]) + b.Number = binary.BigEndian.Uint64(bytes[0:8]) b.Timestamp = binary.BigEndian.Uint64(bytes[8:16]) b.BaseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) b.GasLimit = binary.BigEndian.Uint64(bytes[48:56]) @@ -45,3 +45,16 @@ func (b *DABlock) Decode(bytes []byte) error { return nil } + +func (b *DABlockV0) BlockNumber() uint64 { + return b.Number +} + +// DABlockV1 represents a Data Availability Block. +type DABlockV1 = DABlockV0 + +// DABlockV2 represents a Data Availability Block. +type DABlockV2 = DABlockV1 + +// DABlockV3 represents a Data Availability Block. +type DABlockV3 = DABlockV2 diff --git a/encoding/dachunk.go b/encoding/dachunk.go index 90f842a..a90c2eb 100644 --- a/encoding/dachunk.go +++ b/encoding/dachunk.go @@ -14,7 +14,7 @@ import ( // DAChunk groups consecutive DABlocks with their transactions. type DAChunkV0 struct { - Blocks []*DABlock + Blocks []DABlock Transactions [][]*types.TransactionData } @@ -107,7 +107,7 @@ func (c *DAChunkV0) BlockRange() (uint64, uint64, error) { return 0, 0, errors.New("number of blocks is 0") } - return c.Blocks[0].BlockNumber, c.Blocks[len(c.Blocks)-1].BlockNumber, nil + return c.Blocks[0].BlockNumber(), c.Blocks[len(c.Blocks)-1].BlockNumber(), nil } // DAChunkV1 groups consecutive DABlocks with their transactions. @@ -166,7 +166,7 @@ func (c *DAChunkV1) BlockRange() (uint64, uint64, error) { return 0, 0, errors.New("number of blocks is 0") } - return c.Blocks[0].BlockNumber, c.Blocks[len(c.Blocks)-1].BlockNumber, nil + return c.Blocks[0].BlockNumber(), c.Blocks[len(c.Blocks)-1].BlockNumber(), nil } // DAChunkV2 groups consecutive DABlocks with their transactions. diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 1082566..c8cc064 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -7,6 +7,13 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) +// DABlock represents a Data Availability Block. +type DABlock interface { + Encode() []byte + Decode([]byte) error + BlockNumber() uint64 +} + // DAChunk groups consecutive DABlocks with their transactions. type DAChunk interface { Encode() ([]byte, error) From 2ac7825e6100d94accaf115eb91a9850e86ea64c Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:31:21 +0800 Subject: [PATCH 036/126] fixes --- encoding/codecv4.go | 2 +- encoding/interfaces.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 6f9e029..e387986 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -32,7 +32,7 @@ func (o *DACodecV4) Version() CodecVersion { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { +func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { return (&DACodecV3{}).NewDABlock(block, totalL1MessagePoppedBefore) } diff --git a/encoding/interfaces.go b/encoding/interfaces.go index c8cc064..6c011b3 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -35,7 +35,7 @@ type DABatch interface { type Codec interface { Version() CodecVersion - NewDABlock(*Block, uint64) (*DABlock, error) + NewDABlock(*Block, uint64) (DABlock, error) NewDAChunk(*Chunk, uint64) (DAChunk, error) NewDABatch(*Batch) (DABatch, error) NewDABatchFromBytes([]byte) (DABatch, error) From 8a6c35fb091ee15c823e58bee5f401debf863d8e Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:34:49 +0800 Subject: [PATCH 037/126] fix --- encoding/codecv0.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 2a779b7..c675b95 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -39,7 +39,7 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) } daBlock := &DABlockV0{ - BlockNumber: block.Header.Number.Uint64(), + Number: block.Header.Number.Uint64(), Timestamp: block.Header.Time, BaseFee: block.Header.BaseFee, GasLimit: block.Header.GasLimit, From 83f6b627f52453f5770186ee163e3834fb92173c Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:48:22 +0800 Subject: [PATCH 038/126] add CodecFromVersion and CodecFromConfig --- encoding/interfaces.go | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 6c011b3..fa78f4f 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -2,9 +2,11 @@ package encoding import ( "fmt" + "math/big" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/params" ) // DABlock represents a Data Availability Block. @@ -66,8 +68,8 @@ const ( CodecV4 ) -// GetCodec returns the appropriate codec for the given version. -func GetCodec(version CodecVersion) (Codec, error) { +// CodecFromVersion returns the appropriate codec for the given version. +func CodecFromVersion(version CodecVersion) (Codec, error) { switch version { case CodecV0: return &DACodecV0{}, nil @@ -83,3 +85,18 @@ func GetCodec(version CodecVersion) (Codec, error) { return nil, fmt.Errorf("unsupported codec version: %d", version) } } + +// CodecFromConfig determines and returns the appropriate codec based on chain configuration, block number, and timestamp. +func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { + if chainCfg.IsDarwinV2(startBlockTimestamp) { + return &DACodecV4{} + } else if chainCfg.IsDarwin(startBlockTimestamp) { + return &DACodecV3{} + } else if chainCfg.IsCurie(startBlockNumber) { + return &DACodecV2{} + } else if chainCfg.IsBernoulli(startBlockNumber) { + return &DACodecV1{} + } else { + return &DACodecV0{} + } +} From c4a249537b1caf00216e37cbef1aaf86ea463b19 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:49:27 +0800 Subject: [PATCH 039/126] remove GetCodecVersion --- encoding/da.go | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/encoding/da.go b/encoding/da.go index 567579a..b09a677 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -9,7 +9,6 @@ import ( "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/scroll-tech/go-ethereum/params" ) // BLSModulus is the BLS modulus defined in EIP-4844. @@ -415,22 +414,6 @@ func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { return uint64(len(rlpTxData)), nil } -// GetCodecVersion determines the codec version based on hain configuration, block number, and timestamp. -func GetCodecVersion(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) CodecVersion { - switch { - case startBlockNumber.Uint64() == 0 || !chainCfg.IsBernoulli(startBlockNumber): - return CodecV0 // codecv0: genesis batch or batches before Bernoulli - case !chainCfg.IsCurie(startBlockNumber): - return CodecV1 // codecv1: batches after Bernoulli and before Curie - case !chainCfg.IsDarwin(startBlockTimestamp): - return CodecV2 // codecv2: batches after Curie and before Darwin - case !chainCfg.IsDarwinV2(startBlockTimestamp): - return CodecV3 // codecv3: batches after Darwin - default: - return CodecV4 // codecv4: batches after DarwinV2 - } -} - // BlobDataProofFromValues creates the blob data proof from the given values. // Memory layout of ``_blobDataProof``: // | z | y | kzg_commitment | kzg_proof | From 10af8e74d5360b8082e7da0c389a57f391ec0ebc Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 18 Sep 2024 21:09:36 +0800 Subject: [PATCH 040/126] fix typos --- encoding/da.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/encoding/da.go b/encoding/da.go index b09a677..b634967 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -204,11 +204,11 @@ func (c *Chunk) NumL2Transactions() uint64 { // L2GasUsed calculates the total gas of L2 transactions in a Chunk. func (c *Chunk) L2GasUsed() uint64 { - var totalTxNum uint64 + var totalGasUsed uint64 for _, block := range c.Blocks { - totalTxNum += block.Header.GasUsed + totalGasUsed += block.Header.GasUsed } - return totalTxNum + return totalGasUsed } // StateRoot gets the state root after committing/finalizing the batch. From 1594e0fc92e7b345363ec6a7d677a50887c9d002 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 23 Sep 2024 00:25:19 +0800 Subject: [PATCH 041/126] make Block fields internal --- encoding/codecv0.go | 18 ++++----- encoding/dablock.go | 91 ++++++++++++++++++++++++++++-------------- encoding/dachunk.go | 4 +- encoding/interfaces.go | 2 +- 4 files changed, 72 insertions(+), 43 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index c675b95..736510d 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -38,14 +38,14 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := &DABlockV0{ - Number: block.Header.Number.Uint64(), - Timestamp: block.Header.Time, - BaseFee: block.Header.BaseFee, - GasLimit: block.Header.GasLimit, - NumTransactions: uint16(numTransactions), - NumL1Messages: uint16(numL1Messages), - } + daBlock := NewDABlockImpl( + block.Header.Number.Uint64(), + block.Header.Time, + block.Header.BaseFee, + block.Header.GasLimit, + uint16(numTransactions), + uint16(numL1Messages), + ) return daBlock, nil } @@ -355,7 +355,7 @@ func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { for i := 0; i < numBlocks; i++ { startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlockV0{} + blocks[i] = &DABlockImpl{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err diff --git a/encoding/dablock.go b/encoding/dablock.go index adda6a0..bea446c 100644 --- a/encoding/dablock.go +++ b/encoding/dablock.go @@ -6,55 +6,84 @@ import ( "math/big" ) -// DABlockV0 represents a Data Availability Block. -type DABlockV0 struct { - Number uint64 - Timestamp uint64 - BaseFee *big.Int - GasLimit uint64 - NumTransactions uint16 - NumL1Messages uint16 +// DABlockImpl represents a Data Availability Block. +type DABlockImpl struct { + number uint64 + timestamp uint64 + baseFee *big.Int + gasLimit uint64 + numTransactions uint16 + numL1Messages uint16 +} + +// NewDABlockImpl is a constructor function for DABlockImpl that initializes the internal fields. +func NewDABlockImpl(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *DABlockImpl { + return &DABlockImpl{ + number: number, + timestamp: timestamp, + baseFee: baseFee, + gasLimit: gasLimit, + numTransactions: numTransactions, + numL1Messages: numL1Messages, + } } // Encode serializes the DABlock into a slice of bytes. -func (b *DABlockV0) Encode() []byte { +func (b *DABlockImpl) Encode() []byte { bytes := make([]byte, BlockContextByteSize) - binary.BigEndian.PutUint64(bytes[0:], b.Number) - binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) - if b.BaseFee != nil { - binary.BigEndian.PutUint64(bytes[40:], b.BaseFee.Uint64()) + binary.BigEndian.PutUint64(bytes[0:], b.number) + binary.BigEndian.PutUint64(bytes[8:], b.timestamp) + if b.baseFee != nil { + binary.BigEndian.PutUint64(bytes[40:], b.baseFee.Uint64()) } - binary.BigEndian.PutUint64(bytes[48:], b.GasLimit) - binary.BigEndian.PutUint16(bytes[56:], b.NumTransactions) - binary.BigEndian.PutUint16(bytes[58:], b.NumL1Messages) + binary.BigEndian.PutUint64(bytes[48:], b.gasLimit) + binary.BigEndian.PutUint16(bytes[56:], b.numTransactions) + binary.BigEndian.PutUint16(bytes[58:], b.numL1Messages) return bytes } // Decode populates the fields of a DABlock from a byte slice. -func (b *DABlockV0) Decode(bytes []byte) error { +func (b *DABlockImpl) Decode(bytes []byte) error { if len(bytes) != BlockContextByteSize { return errors.New("block encoding is not BlockContextByteSize bytes long") } - b.Number = binary.BigEndian.Uint64(bytes[0:8]) - b.Timestamp = binary.BigEndian.Uint64(bytes[8:16]) - b.BaseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) - b.GasLimit = binary.BigEndian.Uint64(bytes[48:56]) - b.NumTransactions = binary.BigEndian.Uint16(bytes[56:58]) - b.NumL1Messages = binary.BigEndian.Uint16(bytes[58:60]) + b.number = binary.BigEndian.Uint64(bytes[0:8]) + b.timestamp = binary.BigEndian.Uint64(bytes[8:16]) + b.baseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) + b.gasLimit = binary.BigEndian.Uint64(bytes[48:56]) + b.numTransactions = binary.BigEndian.Uint16(bytes[56:58]) + b.numL1Messages = binary.BigEndian.Uint16(bytes[58:60]) return nil } -func (b *DABlockV0) BlockNumber() uint64 { - return b.Number +// Number returns the block number. +func (b *DABlockImpl) Number() uint64 { + return b.number +} + +// Timestamp returns the block timestamp. +func (b *DABlockImpl) Timestamp() uint64 { + return b.timestamp } -// DABlockV1 represents a Data Availability Block. -type DABlockV1 = DABlockV0 +// BaseFee returns the block base fee. +func (b *DABlockImpl) BaseFee() *big.Int { + return b.baseFee +} -// DABlockV2 represents a Data Availability Block. -type DABlockV2 = DABlockV1 +// GasLimit returns the block gas limit. +func (b *DABlockImpl) GasLimit() uint64 { + return b.gasLimit +} -// DABlockV3 represents a Data Availability Block. -type DABlockV3 = DABlockV2 +// NumTransactions returns the number of transactions in the block. +func (b *DABlockImpl) NumTransactions() uint16 { + return b.numTransactions +} + +// NumL1Messages returns the number of L1 messages in the block. +func (b *DABlockImpl) NumL1Messages() uint16 { + return b.numL1Messages +} diff --git a/encoding/dachunk.go b/encoding/dachunk.go index a90c2eb..7858345 100644 --- a/encoding/dachunk.go +++ b/encoding/dachunk.go @@ -107,7 +107,7 @@ func (c *DAChunkV0) BlockRange() (uint64, uint64, error) { return 0, 0, errors.New("number of blocks is 0") } - return c.Blocks[0].BlockNumber(), c.Blocks[len(c.Blocks)-1].BlockNumber(), nil + return c.Blocks[0].Number(), c.Blocks[len(c.Blocks)-1].Number(), nil } // DAChunkV1 groups consecutive DABlocks with their transactions. @@ -166,7 +166,7 @@ func (c *DAChunkV1) BlockRange() (uint64, uint64, error) { return 0, 0, errors.New("number of blocks is 0") } - return c.Blocks[0].BlockNumber(), c.Blocks[len(c.Blocks)-1].BlockNumber(), nil + return c.Blocks[0].Number(), c.Blocks[len(c.Blocks)-1].Number(), nil } // DAChunkV2 groups consecutive DABlocks with their transactions. diff --git a/encoding/interfaces.go b/encoding/interfaces.go index fa78f4f..43c5967 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -13,7 +13,7 @@ import ( type DABlock interface { Encode() []byte Decode([]byte) error - BlockNumber() uint64 + Number() uint64 } // DAChunk groups consecutive DABlocks with their transactions. From 1f9facd82a2f741013734bae299aab58847b88df Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 23 Sep 2024 00:51:38 +0800 Subject: [PATCH 042/126] make chunk fields internal --- encoding/codecv0.go | 11 +++------ encoding/codecv1.go | 7 ++---- encoding/dachunk.go | 59 +++++++++++++++++++++++++-------------------- 3 files changed, 38 insertions(+), 39 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 736510d..552a95a 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -77,12 +77,9 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := DAChunkV0{ - Blocks: blocks, - Transactions: txs, - } + daChunk := NewDAChunkV0(blocks, txs) - return &daChunk, nil + return daChunk, nil } // NewDABatch creates a DABatch from the provided Batch. @@ -362,9 +359,7 @@ func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { } } - chunks = append(chunks, &DAChunkV0{ - Blocks: blocks, - }) + chunks = append(chunks, NewDAChunkV0(blocks, nil)) } return chunks, nil } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index f6260d3..beac048 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -52,12 +52,9 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := DAChunkV1{ - Blocks: blocks, - Transactions: txs, - } + daChunk := NewDAChunkV1(blocks, txs) - return &daChunk, nil + return daChunk, nil } // NewDABatch creates a DABatch from the provided Batch. diff --git a/encoding/dachunk.go b/encoding/dachunk.go index 7858345..61a03e4 100644 --- a/encoding/dachunk.go +++ b/encoding/dachunk.go @@ -12,32 +12,40 @@ import ( "github.com/scroll-tech/go-ethereum/crypto" ) -// DAChunk groups consecutive DABlocks with their transactions. +// DAChunkV0 groups consecutive DABlocks with their transactions. type DAChunkV0 struct { - Blocks []DABlock - Transactions [][]*types.TransactionData + blocks []DABlock + transactions [][]*types.TransactionData +} + +// NewDAChunkV0 is a constructor for DAChunkV0, initializing with blocks and transactions. +func NewDAChunkV0(blocks []DABlock, transactions [][]*types.TransactionData) *DAChunkV0 { + return &DAChunkV0{ + blocks: blocks, + transactions: transactions, + } } // Encode serializes the DAChunk into a slice of bytes. func (c *DAChunkV0) Encode() ([]byte, error) { - if len(c.Blocks) == 0 { + if len(c.blocks) == 0 { return nil, errors.New("number of blocks is 0") } - if len(c.Blocks) > 255 { + if len(c.blocks) > 255 { return nil, errors.New("number of blocks exceeds 1 byte") } var chunkBytes []byte - chunkBytes = append(chunkBytes, byte(len(c.Blocks))) + chunkBytes = append(chunkBytes, byte(len(c.blocks))) var l2TxDataBytes []byte - for _, block := range c.Blocks { + for _, block := range c.blocks { chunkBytes = append(chunkBytes, block.Encode()...) } - for _, blockTxs := range c.Transactions { + for _, blockTxs := range c.transactions { for _, txData := range blockTxs { if txData.Type == types.L1MessageTxType { continue @@ -78,7 +86,7 @@ func (c *DAChunkV0) Hash() (common.Hash, error) { } // concatenate l1 and l2 tx hashes - for _, blockTxs := range c.Transactions { + for _, blockTxs := range c.transactions { var l1TxHashes []byte var l2TxHashes []byte for _, txData := range blockTxs { @@ -103,22 +111,30 @@ func (c *DAChunkV0) Hash() (common.Hash, error) { // BlockRange returns the block range of the DAChunk. func (c *DAChunkV0) BlockRange() (uint64, uint64, error) { - if len(c.Blocks) == 0 { + if len(c.blocks) == 0 { return 0, 0, errors.New("number of blocks is 0") } - return c.Blocks[0].Number(), c.Blocks[len(c.Blocks)-1].Number(), nil + return c.blocks[0].Number(), c.blocks[len(c.blocks)-1].Number(), nil } // DAChunkV1 groups consecutive DABlocks with their transactions. type DAChunkV1 DAChunkV0 +// NewDAChunkV1 is a constructor for DAChunkV1, initializing with blocks and transactions. +func NewDAChunkV1(blocks []DABlock, transactions [][]*types.TransactionData) *DAChunkV1 { + return &DAChunkV1{ + blocks: blocks, + transactions: transactions, + } +} + // Encode serializes the DAChunk into a slice of bytes. func (c *DAChunkV1) Encode() ([]byte, error) { var chunkBytes []byte - chunkBytes = append(chunkBytes, byte(len(c.Blocks))) + chunkBytes = append(chunkBytes, byte(len(c.blocks))) - for _, block := range c.Blocks { + for _, block := range c.blocks { blockBytes := block.Encode() chunkBytes = append(chunkBytes, blockBytes...) } @@ -131,14 +147,14 @@ func (c *DAChunkV1) Hash() (common.Hash, error) { var dataBytes []byte // concatenate block contexts - for _, block := range c.Blocks { + for _, block := range c.blocks { encodedBlock := block.Encode() // only the first 58 bytes are used in the hashing process dataBytes = append(dataBytes, encodedBlock[:58]...) } // concatenate l1 tx hashes - for _, blockTxs := range c.Transactions { + for _, blockTxs := range c.transactions { for _, txData := range blockTxs { if txData.Type != types.L1MessageTxType { continue @@ -162,18 +178,9 @@ func (c *DAChunkV1) Hash() (common.Hash, error) { // BlockRange returns the block range of the DAChunk. func (c *DAChunkV1) BlockRange() (uint64, uint64, error) { - if len(c.Blocks) == 0 { + if len(c.blocks) == 0 { return 0, 0, errors.New("number of blocks is 0") } - return c.Blocks[0].Number(), c.Blocks[len(c.Blocks)-1].Number(), nil + return c.blocks[0].Number(), c.blocks[len(c.blocks)-1].Number(), nil } - -// DAChunkV2 groups consecutive DABlocks with their transactions. -type DAChunkV2 = DAChunkV1 - -// DAChunkV3 groups consecutive DABlocks with their transactions. -type DAChunkV3 = DAChunkV2 - -// DAChunkV4 groups consecutive DABlocks with their transactions. -type DAChunkV4 = DAChunkV3 From 451eb68f78c2d38db8b4d1b7fa55ebd40d47d949 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 23 Sep 2024 01:55:19 +0800 Subject: [PATCH 043/126] make batch fields internal and add some tweaks --- encoding/codecv0.go | 64 +++++++------- encoding/codecv1.go | 57 ++++++------ encoding/codecv2.go | 52 ++++++----- encoding/codecv3.go | 65 +++++++------- encoding/codecv4.go | 65 +++++++------- encoding/dabatch.go | 205 +++++++++++++++++++++++++++++++------------- encoding/dablock.go | 26 +++--- 7 files changed, 304 insertions(+), 230 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 552a95a..6f30bed 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -38,13 +38,13 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := NewDABlockImpl( - block.Header.Number.Uint64(), - block.Header.Time, - block.Header.BaseFee, - block.Header.GasLimit, - uint16(numTransactions), - uint16(numL1Messages), + daBlock := NewDABlockV0( + block.Header.Number.Uint64(), // number + block.Header.Time, // timestamp + block.Header.BaseFee, // baseFee + block.Header.GasLimit, // gasLimit + uint16(numTransactions), // numTransactions + uint16(numL1Messages), // numL1Messages ) return daBlock, nil @@ -77,7 +77,10 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV0(blocks, txs) + daChunk := NewDAChunkV0( + blocks, // blocks + txs, // transactions + ) return daChunk, nil } @@ -111,17 +114,17 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := DABatchV0{ - Version: uint8(CodecV0), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - } + daBatch := NewDABatchV0( + uint8(CodecV0), // version + batch.Index, // batchIndex + totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + bitmapBytes, // skippedL1MessageBitmap + ) - return &daBatch, nil + return daBatch, nil } // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. @@ -149,15 +152,15 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV0) } - b := &DABatchV0{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[57:89]), - SkippedL1MessageBitmap: data[89:], - } + b := NewDABatchV0( + data[0], // version + binary.BigEndian.Uint64(data[1:9]), // batchIndex + binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped + binary.BigEndian.Uint64(data[17:25]), // totalL1MessagePopped + common.BytesToHash(data[25:57]), // dataHash + common.BytesToHash(data[57:89]), // parentBatchHash + data[89:], // skippedL1MessageBitmap + ) return b, nil } @@ -352,14 +355,17 @@ func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { for i := 0; i < numBlocks; i++ { startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlockImpl{} + blocks[i] = &DABlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err } } - chunks = append(chunks, NewDAChunkV0(blocks, nil)) + chunks = append(chunks, NewDAChunkV0( + blocks, // blocks + nil, // transactions + )) } return chunks, nil } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index beac048..4c8ccd2 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -52,7 +52,10 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV1(blocks, txs) + daChunk := NewDAChunkV1( + blocks, // blocks + txs, // transactions + ) return daChunk, nil } @@ -86,22 +89,20 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := DABatchV1{ - DABatchV0: DABatchV0{ - Version: uint8(CodecV1), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - }, - BlobVersionedHash: blobVersionedHash, - blob: blob, - z: z, - } + daBatch := NewDABatchV1( + uint8(CodecV2), // version + batch.Index, // batchIndex + totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z + ) - return &daBatch, nil + return daBatch, nil } // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. @@ -219,18 +220,18 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV1) } - b := &DABatchV1{ - DABatchV0: DABatchV0{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[89:121]), - SkippedL1MessageBitmap: data[121:], - }, - BlobVersionedHash: common.BytesToHash(data[57:89]), - } + b := NewDABatchV1( + data[0], // version + binary.BigEndian.Uint64(data[1:9]), // batchIndex + binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped + binary.BigEndian.Uint64(data[17:25]), // totalL1MessagePopped + common.BytesToHash(data[25:57]), // dataHash + common.BytesToHash(data[89:121]), // parentBatchHash + common.BytesToHash(data[57:89]), // blobVersionedHash + data[121:], // skippedL1MessageBitmap + nil, // blob + nil, // z + ) return b, nil } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 4b11bff..3d4052b 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -67,22 +67,20 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := DABatchV2{ - DABatchV0: DABatchV0{ - Version: uint8(CodecV2), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - }, - BlobVersionedHash: blobVersionedHash, - blob: blob, - z: z, - } + daBatch := NewDABatchV1( + uint8(CodecV2), // version + batch.Index, // batchIndex + totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z + ) - return &daBatch, nil + return daBatch, nil } // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. @@ -220,18 +218,18 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV2) } - b := &DABatchV2{ - DABatchV0: DABatchV0{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[89:121]), - SkippedL1MessageBitmap: data[121:], - }, - BlobVersionedHash: common.BytesToHash(data[57:89]), - } + b := NewDABatchV1( + data[0], // version + binary.BigEndian.Uint64(data[1:9]), // batchIndex + binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped + binary.BigEndian.Uint64(data[17:25]), // totalL1MessagePopped + common.BytesToHash(data[25:57]), // dataHash + common.BytesToHash(data[89:121]), // parentBatchHash + common.BytesToHash(data[57:89]), // blobVersionedHash + data[121:], // skippedL1MessageBitmap + nil, // blob + nil, // z + ) return b, nil } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 6efafae..50437a8 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -66,29 +66,20 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - daBatch := DABatchV3{ - DABatchV0: DABatchV0{ - Version: uint8(CodecV3), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - }, - BlobVersionedHash: blobVersionedHash, - LastBlockTimestamp: lastBlock.Header.Time, - blob: blob, - z: z, - blobBytes: blobBytes, - } - - daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() - if err != nil { - return nil, err - } - - return &daBatch, nil + return NewDABatchV2( + uint8(CodecV3), // version + batch.Index, // batchIndex + totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + lastBlock.Header.Time, // lastBlockTimestamp + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z + blobBytes, // blobBytes + ) } // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. @@ -122,22 +113,24 @@ func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV3) } - b := &DABatchV3{ - DABatchV0: DABatchV0{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[89:121]), - }, - BlobVersionedHash: common.BytesToHash(data[57:89]), - LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), - BlobDataProof: [2]common.Hash{ + b := NewDABatchV2WithProof( + data[0], // Version + binary.BigEndian.Uint64(data[1:9]), // BatchIndex + binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped + binary.BigEndian.Uint64(data[17:25]), // TotalL1MessagePopped + binary.BigEndian.Uint64(data[121:129]), // LastBlockTimestamp + common.BytesToHash(data[25:57]), // DataHash + common.BytesToHash(data[89:121]), // ParentBatchHash + common.BytesToHash(data[57:89]), // BlobVersionedHash + nil, // skippedL1MessageBitmap + nil, // blob + nil, // z + nil, // blobBytes + [2]common.Hash{ // BlobDataProof common.BytesToHash(data[129:161]), common.BytesToHash(data[161:193]), }, - } + ) return b, nil } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index e387986..eb4af9c 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -77,29 +77,20 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - daBatch := DABatchV4{ - DABatchV0: DABatchV0{ - Version: uint8(CodecV4), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - }, - BlobVersionedHash: blobVersionedHash, - LastBlockTimestamp: lastBlock.Header.Time, - blob: blob, - z: z, - blobBytes: blobBytes, - } - - daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() - if err != nil { - return nil, err - } - - return &daBatch, nil + return NewDABatchV2( + uint8(CodecV3), // version + batch.Index, // batchIndex + totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + lastBlock.Header.Time, // lastBlockTimestamp + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z + blobBytes, // blobBytes + ) } // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. @@ -247,22 +238,24 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV4) } - b := &DABatchV4{ - DABatchV0: DABatchV0{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[89:121]), - }, - BlobVersionedHash: common.BytesToHash(data[57:89]), - LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), - BlobDataProof: [2]common.Hash{ + b := NewDABatchV2WithProof( + data[0], // Version + binary.BigEndian.Uint64(data[1:9]), // BatchIndex + binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped + binary.BigEndian.Uint64(data[17:25]), // TotalL1MessagePopped + binary.BigEndian.Uint64(data[121:129]), // LastBlockTimestamp + common.BytesToHash(data[25:57]), // DataHash + common.BytesToHash(data[89:121]), // ParentBatchHash + common.BytesToHash(data[57:89]), // BlobVersionedHash + nil, // skippedL1MessageBitmap + nil, // blob + nil, // z + nil, // blobBytes + [2]common.Hash{ // BlobDataProof common.BytesToHash(data[129:161]), common.BytesToHash(data[161:193]), }, - } + ) return b, nil } diff --git a/encoding/dabatch.go b/encoding/dabatch.go index 81f0358..481ce5c 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -13,25 +13,38 @@ import ( // DABatchV0 contains metadata about a batch of DAChunks. type DABatchV0 struct { - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte + version uint8 + batchIndex uint64 + l1MessagePopped uint64 + totalL1MessagePopped uint64 + dataHash common.Hash + parentBatchHash common.Hash + skippedL1MessageBitmap []byte +} + +// NewDABatchV0 is a constructor for DABatchV0. +func NewDABatchV0(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash common.Hash, skippedL1MessageBitmap []byte) *DABatchV0 { + return &DABatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + } } // Encode serializes the DABatch into bytes. func (b *DABatchV0) Encode() []byte { - batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.ParentBatchHash[:]) - copy(batchBytes[89:], b.SkippedL1MessageBitmap[:]) + batchBytes := make([]byte, 89+len(b.skippedL1MessageBitmap)) + batchBytes[0] = b.version + binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped) + copy(batchBytes[25:], b.dataHash[:]) + copy(batchBytes[57:], b.parentBatchHash[:]) + copy(batchBytes[89:], b.skippedL1MessageBitmap[:]) return batchBytes } @@ -65,24 +78,40 @@ func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { type DABatchV1 struct { DABatchV0 - BlobVersionedHash common.Hash + blobVersionedHash common.Hash + blob *kzg4844.Blob + z *kzg4844.Point +} - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point +// NewDABatchV1 is a constructor for DABatchV1. +func NewDABatchV1(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point) *DABatchV1 { + return &DABatchV1{ + DABatchV0: DABatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + }, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } } // Encode serializes the DABatch into bytes. func (b *DABatchV1) Encode() []byte { - batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.BlobVersionedHash[:]) - copy(batchBytes[89:], b.ParentBatchHash[:]) - copy(batchBytes[121:], b.SkippedL1MessageBitmap[:]) + batchBytes := make([]byte, 121+len(b.skippedL1MessageBitmap)) + batchBytes[0] = b.version + binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped) + copy(batchBytes[25:], b.dataHash[:]) + copy(batchBytes[57:], b.blobVersionedHash[:]) + copy(batchBytes[89:], b.parentBatchHash[:]) + copy(batchBytes[121:], b.skippedL1MessageBitmap[:]) return batchBytes } @@ -121,7 +150,7 @@ func (b *DABatchV1) Blob() *kzg4844.Blob { // BlobVersionedHashes returns the blob versioned hashes of the batch. func (b *DABatchV1) BlobVersionedHashes() []common.Hash { - return []common.Hash{b.BlobVersionedHash} + return []common.Hash{b.blobVersionedHash} } // BlobBytes returns the blob bytes of the batch. @@ -134,48 +163,104 @@ func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { return nil, nil } -type DABatchV2 = DABatchV1 - -// DABatchV3 contains metadata about a batch of DAChunks. -type DABatchV3 struct { +// DABatchV2 contains metadata about a batch of DAChunks. +type DABatchV2 struct { DABatchV0 - BlobVersionedHash common.Hash `json:"blob_versioned_hash"` - LastBlockTimestamp uint64 `json:"last_block_timestamp"` - BlobDataProof [2]common.Hash `json:"blob_data_proof"` + // FIXME: export correct JSON format for prover. + blobVersionedHash common.Hash + lastBlockTimestamp uint64 + blobDataProof [2]common.Hash + blob *kzg4844.Blob + z *kzg4844.Point + blobBytes []byte +} + +// NewDABatchV2 is a constructor for DABatchV2 that calls blobDataProofForPICircuit internally. +func NewDABatchV2(version uint8, + batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, + dataHash, parentBatchHash, blobVersionedHash common.Hash, + skippedL1MessageBitmap []byte, + blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, +) (*DABatchV2, error) { + daBatch := &DABatchV2{ + DABatchV0: DABatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + }, + blobVersionedHash: blobVersionedHash, + lastBlockTimestamp: lastBlockTimestamp, + blob: blob, + z: z, + blobBytes: blobBytes, + } + + proof, err := daBatch.blobDataProofForPICircuit() + if err != nil { + return nil, err + } - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point + daBatch.blobDataProof = proof - // for batch task - blobBytes []byte + return daBatch, nil +} + +// NewDABatchV2WithProof is a constructor for DABatchV2 that allows directly passing blobDataProof. +func NewDABatchV2WithProof(version uint8, + batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, + dataHash, parentBatchHash, blobVersionedHash common.Hash, + skippedL1MessageBitmap []byte, + blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, + blobDataProof [2]common.Hash, // Accept blobDataProof directly +) *DABatchV2 { + return &DABatchV2{ + DABatchV0: DABatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + }, + blobVersionedHash: blobVersionedHash, + lastBlockTimestamp: lastBlockTimestamp, + blob: blob, + z: z, + blobBytes: blobBytes, + blobDataProof: blobDataProof, // Set blobDataProof directly + } } // Encode serializes the DABatch into bytes. -func (b *DABatchV3) Encode() []byte { +func (b *DABatchV2) Encode() []byte { batchBytes := make([]byte, 193) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) - copy(batchBytes[25:57], b.DataHash[:]) - copy(batchBytes[57:89], b.BlobVersionedHash[:]) - copy(batchBytes[89:121], b.ParentBatchHash[:]) - binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) - copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) - copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) + batchBytes[0] = b.version + binary.BigEndian.PutUint64(batchBytes[1:9], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[9:17], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:25], b.totalL1MessagePopped) + copy(batchBytes[25:57], b.dataHash[:]) + copy(batchBytes[57:89], b.blobVersionedHash[:]) + copy(batchBytes[89:121], b.parentBatchHash[:]) + binary.BigEndian.PutUint64(batchBytes[121:129], b.lastBlockTimestamp) + copy(batchBytes[129:161], b.blobDataProof[0].Bytes()) + copy(batchBytes[161:193], b.blobDataProof[1].Bytes()) return batchBytes } // Hash computes the hash of the serialized DABatch. -func (b *DABatchV3) Hash() common.Hash { +func (b *DABatchV2) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatchV3) blobDataProofForPICircuit() ([2]common.Hash, error) { +func (b *DABatchV2) blobDataProofForPICircuit() ([2]common.Hash, error) { if b.blob == nil { return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") } @@ -200,7 +285,7 @@ func (b *DABatchV3) blobDataProofForPICircuit() ([2]common.Hash, error) { } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { +func (b *DABatchV2) BlobDataProofForPointEvaluation() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") } @@ -222,18 +307,16 @@ func (b *DABatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *DABatchV3) Blob() *kzg4844.Blob { +func (b *DABatchV2) Blob() *kzg4844.Blob { return b.blob } // BlobVersionedHashes returns the blob versioned hashes of the batch. -func (b *DABatchV3) BlobVersionedHashes() []common.Hash { - return []common.Hash{b.BlobVersionedHash} +func (b *DABatchV2) BlobVersionedHashes() []common.Hash { + return []common.Hash{b.blobVersionedHash} } // BlobBytes returns the blob bytes of the batch. -func (b *DABatchV3) BlobBytes() []byte { +func (b *DABatchV2) BlobBytes() []byte { return b.blobBytes } - -type DABatchV4 = DABatchV3 diff --git a/encoding/dablock.go b/encoding/dablock.go index bea446c..baa7d44 100644 --- a/encoding/dablock.go +++ b/encoding/dablock.go @@ -6,8 +6,8 @@ import ( "math/big" ) -// DABlockImpl represents a Data Availability Block. -type DABlockImpl struct { +// DABlockV0 represents a Data Availability Block. +type DABlockV0 struct { number uint64 timestamp uint64 baseFee *big.Int @@ -16,9 +16,9 @@ type DABlockImpl struct { numL1Messages uint16 } -// NewDABlockImpl is a constructor function for DABlockImpl that initializes the internal fields. -func NewDABlockImpl(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *DABlockImpl { - return &DABlockImpl{ +// NewDABlockV0 is a constructor function for DABlockV0 that initializes the internal fields. +func NewDABlockV0(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *DABlockV0 { + return &DABlockV0{ number: number, timestamp: timestamp, baseFee: baseFee, @@ -29,7 +29,7 @@ func NewDABlockImpl(number uint64, timestamp uint64, baseFee *big.Int, gasLimit } // Encode serializes the DABlock into a slice of bytes. -func (b *DABlockImpl) Encode() []byte { +func (b *DABlockV0) Encode() []byte { bytes := make([]byte, BlockContextByteSize) binary.BigEndian.PutUint64(bytes[0:], b.number) binary.BigEndian.PutUint64(bytes[8:], b.timestamp) @@ -43,7 +43,7 @@ func (b *DABlockImpl) Encode() []byte { } // Decode populates the fields of a DABlock from a byte slice. -func (b *DABlockImpl) Decode(bytes []byte) error { +func (b *DABlockV0) Decode(bytes []byte) error { if len(bytes) != BlockContextByteSize { return errors.New("block encoding is not BlockContextByteSize bytes long") } @@ -59,31 +59,31 @@ func (b *DABlockImpl) Decode(bytes []byte) error { } // Number returns the block number. -func (b *DABlockImpl) Number() uint64 { +func (b *DABlockV0) Number() uint64 { return b.number } // Timestamp returns the block timestamp. -func (b *DABlockImpl) Timestamp() uint64 { +func (b *DABlockV0) Timestamp() uint64 { return b.timestamp } // BaseFee returns the block base fee. -func (b *DABlockImpl) BaseFee() *big.Int { +func (b *DABlockV0) BaseFee() *big.Int { return b.baseFee } // GasLimit returns the block gas limit. -func (b *DABlockImpl) GasLimit() uint64 { +func (b *DABlockV0) GasLimit() uint64 { return b.gasLimit } // NumTransactions returns the number of transactions in the block. -func (b *DABlockImpl) NumTransactions() uint16 { +func (b *DABlockV0) NumTransactions() uint16 { return b.numTransactions } // NumL1Messages returns the number of L1 messages in the block. -func (b *DABlockImpl) NumL1Messages() uint16 { +func (b *DABlockV0) NumL1Messages() uint16 { return b.numL1Messages } From 955f375e23003ba5b448006e1ec18233c370130f Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 23 Sep 2024 02:16:24 +0800 Subject: [PATCH 044/126] add JSONFromBytes --- encoding/codecv0.go | 6 ++++++ encoding/codecv1.go | 6 ++++++ encoding/codecv2.go | 6 ++++++ encoding/codecv3.go | 16 ++++++++++++++++ encoding/codecv4.go | 5 +++++ encoding/dabatch.go | 37 ++++++++++++++++++++++++++++++++++++- encoding/interfaces.go | 3 ++- 7 files changed, 77 insertions(+), 2 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 6f30bed..87a5a01 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -369,3 +369,9 @@ func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { } return chunks, nil } + +// JSONFromBytes for CodecV1 returns empty values. +func (c *DACodecV0) JSONFromBytes(data []byte) ([]byte, error) { + // DACodecV0 doesn't need this, so just return empty values + return nil, nil +} diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 4c8ccd2..a8ab0ea 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -447,3 +447,9 @@ func (o *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe func (o *DACodecV1) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { return (&DACodecV0{}).DecodeDAChunks(bytes) } + +// JSONFromBytes for CodecV1 returns empty values. +func (c *DACodecV1) JSONFromBytes(data []byte) ([]byte, error) { + // DACodecV1 doesn't need this, so just return empty values + return nil, nil +} diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 3d4052b..4519fe0 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -344,3 +344,9 @@ func (o *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe func (o *DACodecV2) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { return (&DACodecV1{}).DecodeDAChunks(bytes) } + +// JSONFromBytes for CodecV1 returns empty values. +func (c *DACodecV2) JSONFromBytes(data []byte) ([]byte, error) { + // DACodecV2 doesn't need this, so just return empty values + return nil, nil +} diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 50437a8..ab0e288 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -2,6 +2,7 @@ package encoding import ( "encoding/binary" + "encoding/json" "errors" "fmt" "reflect" @@ -198,3 +199,18 @@ func (o *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe func (o *DACodecV3) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { return (&DACodecV2{}).DecodeDAChunks(bytes) } + +// JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. +func (o *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { + batch, err := o.NewDABatchFromBytes(data) + if err != nil { + return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) + } + + jsonBytes, err := json.Marshal(batch) + if err != nil { + return nil, fmt.Errorf("failed to marshal DABatchV2 to JSON: %w", err) + } + + return jsonBytes, nil +} diff --git a/encoding/codecv4.go b/encoding/codecv4.go index eb4af9c..9f77171 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -378,3 +378,8 @@ func (o *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe func (o *DACodecV4) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { return (&DACodecV3{}).DecodeDAChunks(bytes) } + +// JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. +func (o *DACodecV4) JSONFromBytes(data []byte) ([]byte, error) { + return (&DACodecV3{}).JSONFromBytes(data) +} diff --git a/encoding/dabatch.go b/encoding/dabatch.go index 481ce5c..d23690e 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -3,6 +3,7 @@ package encoding import ( "encoding/binary" "encoding/hex" + "encoding/json" "errors" "fmt" @@ -167,7 +168,6 @@ func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { type DABatchV2 struct { DABatchV0 - // FIXME: export correct JSON format for prover. blobVersionedHash common.Hash lastBlockTimestamp uint64 blobDataProof [2]common.Hash @@ -320,3 +320,38 @@ func (b *DABatchV2) BlobVersionedHashes() []common.Hash { func (b *DABatchV2) BlobBytes() []byte { return b.blobBytes } + +// MarshalJSON implements the custom JSON serialization for DABatchV2. +// This method is designed to provide prover with batch info in snake_case format. +func (b *DABatchV2) MarshalJSON() ([]byte, error) { + type daBatchV2JSON struct { + Version uint8 `json:"version"` + BatchIndex uint64 `json:"batch_index"` + L1MessagePopped uint64 `json:"l1_message_popped"` + TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` + DataHash string `json:"data_hash"` + ParentBatchHash string `json:"parent_batch_hash"` + SkippedL1MessageBitmap string `json:"skipped_l1_message_bitmap"` + BlobVersionedHash string `json:"blob_versioned_hash"` + LastBlockTimestamp uint64 `json:"last_block_timestamp"` + BlobBytes string `json:"blob_bytes"` + BlobDataProof [2]string `json:"blob_data_proof"` + } + + return json.Marshal(&daBatchV2JSON{ + Version: b.version, + BatchIndex: b.batchIndex, + L1MessagePopped: b.l1MessagePopped, + TotalL1MessagePopped: b.totalL1MessagePopped, + DataHash: b.dataHash.Hex(), + ParentBatchHash: b.parentBatchHash.Hex(), + SkippedL1MessageBitmap: common.Bytes2Hex(b.skippedL1MessageBitmap), + BlobVersionedHash: b.blobVersionedHash.Hex(), + LastBlockTimestamp: b.lastBlockTimestamp, + BlobBytes: common.Bytes2Hex(b.blobBytes), + BlobDataProof: [2]string{ + b.blobDataProof[0].Hex(), + b.blobDataProof[1].Hex(), + }, + }) +} diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 43c5967..fd133e9 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -54,7 +54,8 @@ type Codec interface { EstimateBatchL1CommitGas(*Batch) (uint64, error) EstimateBatchL1CommitCalldataSize(*Batch) (uint64, error) - SetCompression(enable bool) // only used for codecv4 + SetCompression(enable bool) + JSONFromBytes([]byte) ([]byte, error) } // CodecVersion represents the version of the codec. From f73c63e89ffb75828b4b37002bc788173f959a3e Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 30 Sep 2024 17:06:14 +0800 Subject: [PATCH 045/126] fix a typo --- encoding/codecv3.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index ab0e288..c348905 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -209,7 +209,7 @@ func (o *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { jsonBytes, err := json.Marshal(batch) if err != nil { - return nil, fmt.Errorf("failed to marshal DABatchV2 to JSON: %w", err) + return nil, fmt.Errorf("failed to marshal DABatch to JSON: %w", err) } return jsonBytes, nil From cf9f08431fe10dc1963338f808fc934f4f86c6a5 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 1 Oct 2024 16:17:41 +0800 Subject: [PATCH 046/126] use register mode --- encoding/bitmap.go | 4 +- encoding/{ => codecv0}/codecv0.go | 136 +++---- encoding/{ => codecv1}/codecv1.go | 184 ++++++---- encoding/{ => codecv2}/codecv2.go | 293 ++++++++++++--- encoding/codecv3.go | 216 ----------- encoding/codecv3/codecv3.go | 574 ++++++++++++++++++++++++++++++ encoding/{ => codecv4}/codecv4.go | 312 +++++++++++++--- encoding/da.go | 3 +- encoding/interfaces.go | 47 +-- 9 files changed, 1310 insertions(+), 459 deletions(-) rename encoding/{ => codecv0}/codecv0.go (65%) rename encoding/{ => codecv1}/codecv1.go (64%) rename encoding/{ => codecv2}/codecv2.go (51%) delete mode 100644 encoding/codecv3.go create mode 100644 encoding/codecv3/codecv3.go rename encoding/{ => codecv4}/codecv4.go (51%) diff --git a/encoding/bitmap.go b/encoding/bitmap.go index da4386e..7ada6d6 100644 --- a/encoding/bitmap.go +++ b/encoding/bitmap.go @@ -7,8 +7,8 @@ import ( "github.com/scroll-tech/go-ethereum/core/types" ) -// constructSkippedBitmap constructs skipped L1 message bitmap of the batch. -func constructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePoppedBefore uint64) ([]byte, uint64, error) { +// ConstructSkippedBitmap constructs skipped L1 message bitmap of the batch. +func ConstructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePoppedBefore uint64) ([]byte, uint64, error) { // skipped L1 message bitmap, an array of 256-bit bitmaps var skippedBitmap []*big.Int diff --git a/encoding/codecv0.go b/encoding/codecv0/codecv0.go similarity index 65% rename from encoding/codecv0.go rename to encoding/codecv0/codecv0.go index 87a5a01..c8fcd63 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0/codecv0.go @@ -1,4 +1,4 @@ -package encoding +package codecv0 import ( "encoding/binary" @@ -7,6 +7,7 @@ import ( "math" "reflect" + "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -14,13 +15,20 @@ import ( type DACodecV0 struct{} +// init registers the DACodecV0 with the encoding package. +func init() { + encoding.RegisterCodec(encoding.CodecV0, func() encoding.Codec { + return &DACodecV0{} + }) +} + // Version returns the codec version. -func (o *DACodecV0) Version() CodecVersion { - return CodecV0 +func (o *DACodecV0) Version() encoding.CodecVersion { + return encoding.CodecV0 } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (o *DACodecV0) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -38,7 +46,7 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := NewDABlockV0( + daBlock := encoding.NewDABlockV0( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -51,8 +59,8 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - var blocks []DABlock +func (o *DACodecV0) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { + var blocks []encoding.DABlock var txs [][]*types.TransactionData if chunk == nil { @@ -77,7 +85,7 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV0( + daChunk := encoding.NewDAChunkV0( blocks, // blocks txs, // transactions ) @@ -86,7 +94,7 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { +func (o *DACodecV0) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { // compute batch data hash var dataBytes []byte totalL1MessagePoppedBeforeChunk := batch.TotalL1MessagePoppedBefore @@ -109,14 +117,14 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { dataHash := crypto.Keccak256Hash(dataBytes) // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } - daBatch := NewDABatchV0( - uint8(CodecV0), // version - batch.Index, // batchIndex + daBatch := encoding.NewDABatchV0( + uint8(encoding.CodecV0), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped dataHash, // dataHash @@ -129,7 +137,7 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { +func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { daBatch, err := o.NewDABatch(batch) if err != nil { return nil, err @@ -143,16 +151,16 @@ func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // NewDABatchFromBytes decodes the given byte slice into a DABatch. -func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (o *DACodecV0) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { if len(data) < 89 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 89 bytes but got %d", len(data)) } - if CodecVersion(data[0]) != CodecV0 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV0) + if encoding.CodecVersion(data[0]) != encoding.CodecV0 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV0) } - b := NewDABatchV0( + b := encoding.NewDABatchV0( data[0], // version binary.BigEndian.Uint64(data[1:9]), // batchIndex binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped @@ -166,25 +174,25 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { } // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { +func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *encoding.Block) (uint64, error) { var size uint64 for _, txData := range b.Transactions { if txData.Type == types.L1MessageTxType { continue } size += 4 // 4 bytes payload length - txPayloadLength, err := getTxPayloadLength(txData) + txPayloadLength, err := encoding.GetTxPayloadLength(txData) if err != nil { return 0, err } size += txPayloadLength } - size += BlockContextByteSize + size += encoding.BlockContextByteSize return size, nil } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { +func (o *DACodecV0) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -193,16 +201,16 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { continue } - txPayloadLength, err := getTxPayloadLength(txData) + txPayloadLength, err := encoding.GetTxPayloadLength(txData) if err != nil { return 0, err } - total += CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero - total += CalldataNonZeroByteGas * 4 // 4 bytes payload length - total += GetKeccak256Gas(txPayloadLength) // l2 tx hash + total += encoding.CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero + total += encoding.CalldataNonZeroByteGas * 4 // 4 bytes payload length + total += encoding.GetKeccak256Gas(txPayloadLength) // l2 tx hash } - total += CalldataNonZeroByteGas * BlockContextByteSize + total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -211,17 +219,17 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, block := range c.Blocks { blockL1CommitCalldataSize, err := o.EstimateBlockL1CommitCalldataSize(block) @@ -234,7 +242,7 @@ func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { var totalTxNum uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { @@ -247,35 +255,35 @@ func (o *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * BlockContextByteSize // numBlocks of BlockContext in chunk + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += encoding.CalldataNonZeroByteGas * numBlocks * encoding.BlockContextByteSize // numBlocks of BlockContext in chunk - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -289,21 +297,21 @@ func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) totalL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas, nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -316,22 +324,22 @@ func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV0) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { +func (o *DACodecV0) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { return true, nil } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV0) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { +func (o *DACodecV0) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { return true, nil } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV0) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { return 0, 0, nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { return 0, 0, nil } @@ -339,30 +347,30 @@ func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, func (o *DACodecV0) SetCompression(enable bool) {} // DecodeDAChunks takes a byte slice and decodes it into a []DAChunk -func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { - var chunks []DAChunk +func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]encoding.DAChunk, error) { + var chunks []encoding.DAChunk for _, chunk := range bytes { if len(chunk) < 1 { return nil, fmt.Errorf("invalid chunk, length is less than 1") } numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) + if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) } - blocks := make([]DABlock, numBlocks) + blocks := make([]encoding.DABlock, numBlocks) for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlockV0{} + startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + encoding.BlockContextByteSize + blocks[i] = &encoding.DABlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err } } - chunks = append(chunks, NewDAChunkV0( + chunks = append(chunks, encoding.NewDAChunkV0( blocks, // blocks nil, // transactions )) diff --git a/encoding/codecv1.go b/encoding/codecv1/codecv1.go similarity index 64% rename from encoding/codecv1.go rename to encoding/codecv1/codecv1.go index a8ab0ea..30a4373 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -1,13 +1,15 @@ -package encoding +package codecv1 import ( "crypto/sha256" "encoding/binary" "errors" "fmt" + "math" "math/big" "reflect" + "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -16,21 +18,54 @@ import ( type DACodecV1 struct{} +// init registers the DACodecV1 with the encoding package. +func init() { + encoding.RegisterCodec(encoding.CodecV1, func() encoding.Codec { + return &DACodecV1{} + }) +} + // Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv1MaxNumChunks = 15 // Version returns the codec version. -func (o *DACodecV1) Version() CodecVersion { - return CodecV1 +func (o *DACodecV1) Version() encoding.CodecVersion { + return encoding.CodecV1 } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - return (&DACodecV0{}).NewDABlock(block, totalL1MessagePoppedBefore) +func (o *DACodecV1) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { + if !block.Header.Number.IsUint64() { + return nil, errors.New("block number is not uint64") + } + + // note: numL1Messages includes skipped messages + numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) + if numL1Messages > math.MaxUint16 { + return nil, errors.New("number of L1 messages exceeds max uint16") + } + + // note: numTransactions includes skipped messages + numL2Transactions := block.NumL2Transactions() + numTransactions := numL1Messages + numL2Transactions + if numTransactions > math.MaxUint16 { + return nil, errors.New("number of transactions exceeds max uint16") + } + + daBlock := encoding.NewDABlockV0( + block.Header.Number.Uint64(), // number + block.Header.Time, // timestamp + block.Header.BaseFee, // baseFee + block.Header.GasLimit, // gasLimit + uint16(numTransactions), // numTransactions + uint16(numL1Messages), // numL1Messages + ) + + return daBlock, nil } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { +func (o *DACodecV1) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { if len(chunk.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } @@ -39,7 +74,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []DABlock + var blocks []encoding.DABlock var txs [][]*types.TransactionData for _, block := range chunk.Blocks { @@ -52,7 +87,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV1( + daChunk := encoding.NewDAChunkV1( blocks, // blocks txs, // transactions ) @@ -61,7 +96,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { +func (o *DACodecV1) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > Codecv1MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -78,7 +113,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -89,9 +124,9 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := NewDABatchV1( - uint8(CodecV2), // version - batch.Index, // batchIndex + daBatch := encoding.NewDABatchV1( + uint8(encoding.CodecV1), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped dataHash, // dataHash @@ -107,7 +142,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { +func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { daBatch, err := o.NewDABatch(batch) if err != nil { return nil, err @@ -121,7 +156,7 @@ func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func (o *DACodecV1) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv1MaxNumChunks*4 @@ -150,7 +185,7 @@ func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } // encode L2 txs into blob payload - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, err } @@ -181,7 +216,7 @@ func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* copy(challengePreimage[0:], hash[:]) // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, err } @@ -198,7 +233,7 @@ func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -211,16 +246,16 @@ func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (o *DACodecV1) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { if len(data) < 121 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } - if CodecVersion(data[0]) != CodecV1 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV1) + if encoding.CodecVersion(data[0]) != encoding.CodecV1 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV1) } - b := NewDABatchV1( + b := encoding.NewDABatchV1( data[0], // version binary.BigEndian.Uint64(data[1:9]), // batchIndex binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped @@ -237,17 +272,17 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { } // EstimateChunkL1CommitBlobSize estimates the size of the L1 commit blob for a single chunk. -func (o *DACodecV1) EstimateChunkL1CommitBlobSize(c *Chunk) (uint64, error) { +func (o *DACodecV1) EstimateChunkL1CommitBlobSize(c *encoding.Chunk) (uint64, error) { metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) // over-estimate: adding metadata length chunkDataSize, err := o.chunkL1CommitBlobDataSize(c) if err != nil { return 0, err } - return CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil + return encoding.CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil } // EstimateBatchL1CommitBlobSize estimates the total size of the L1 commit blob for a batch. -func (o *DACodecV1) EstimateBatchL1CommitBlobSize(b *Batch) (uint64, error) { +func (o *DACodecV1) EstimateBatchL1CommitBlobSize(b *encoding.Batch) (uint64, error) { metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) var batchDataSize uint64 for _, c := range b.Chunks { @@ -257,10 +292,10 @@ func (o *DACodecV1) EstimateBatchL1CommitBlobSize(b *Batch) (uint64, error) { } batchDataSize += chunkDataSize } - return CalculatePaddedBlobSize(metadataSize + batchDataSize), nil + return encoding.CalculatePaddedBlobSize(metadataSize + batchDataSize), nil } -func (o *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { +func (o *DACodecV1) chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { var dataSize uint64 for _, block := range c.Blocks { for _, tx := range block.Transactions { @@ -268,7 +303,7 @@ func (o *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { continue } - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, false /* no mock */) + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) if err != nil { return 0, err } @@ -279,7 +314,7 @@ func (o *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { +func (o *DACodecV1) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -289,7 +324,7 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { } } - total += CalldataNonZeroByteGas * BlockContextByteSize + total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -298,22 +333,22 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return uint64(BlockContextByteSize * len(c.Blocks)), nil +func (o *DACodecV1) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { + return uint64(encoding.BlockContextByteSize * len(c.Blocks)), nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { +func (o *DACodecV1) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { @@ -326,34 +361,34 @@ func (o *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { +func (o *DACodecV1) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -367,8 +402,8 @@ func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) var totalL1CommitCalldataSize uint64 chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -376,14 +411,14 @@ func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { return 0, err } totalL1CommitCalldataSize += chunkL1CommitCalldataSize - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas, nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { +func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -396,22 +431,22 @@ func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV1) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { +func (o *DACodecV1) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { return true, nil } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV1) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { +func (o *DACodecV1) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { return true, nil } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { +func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { return 0, 0, nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { +func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { return 0, 0, nil } @@ -422,7 +457,7 @@ func (o *DACodecV1) SetCompression(enable bool) {} // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { +func (o *DACodecV1) computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore @@ -444,8 +479,35 @@ func (o *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe } // DecodeDAChunks takes a byte slice and decodes it into a []DAChunk -func (o *DACodecV1) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { - return (&DACodecV0{}).DecodeDAChunks(bytes) +func (o *DACodecV1) DecodeDAChunks(bytes [][]byte) ([]encoding.DAChunk, error) { + var chunks []encoding.DAChunk + for _, chunk := range bytes { + if len(chunk) < 1 { + return nil, fmt.Errorf("invalid chunk, length is less than 1") + } + + numBlocks := int(chunk[0]) + if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + } + + blocks := make([]encoding.DABlock, numBlocks) + for i := 0; i < numBlocks; i++ { + startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + encoding.BlockContextByteSize + blocks[i] = &encoding.DABlockV0{} + err := blocks[i].Decode(chunk[startIdx:endIdx]) + if err != nil { + return nil, err + } + } + + chunks = append(chunks, encoding.NewDAChunkV1( + blocks, // blocks + nil, // transactions + )) + } + return chunks, nil } // JSONFromBytes for CodecV1 returns empty values. diff --git a/encoding/codecv2.go b/encoding/codecv2/codecv2.go similarity index 51% rename from encoding/codecv2.go rename to encoding/codecv2/codecv2.go index 4519fe0..3ce1cd4 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -1,4 +1,4 @@ -package encoding +package codecv2 import ( "crypto/sha256" @@ -6,6 +6,7 @@ import ( "encoding/hex" "errors" "fmt" + "math" "math/big" "reflect" @@ -15,31 +16,91 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" + "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/da-codec/encoding/zstd" ) type DACodecV2 struct{} +// init registers the DACodecV2 with the encoding package. +func init() { + encoding.RegisterCodec(encoding.CodecV2, func() encoding.Codec { + return &DACodecV2{} + }) +} + // Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv2MaxNumChunks = 45 // Version returns the codec version. -func (o *DACodecV2) Version() CodecVersion { - return CodecV2 +func (o *DACodecV2) Version() encoding.CodecVersion { + return encoding.CodecV2 } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - return (&DACodecV1{}).NewDABlock(block, totalL1MessagePoppedBefore) +func (o *DACodecV2) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { + if !block.Header.Number.IsUint64() { + return nil, errors.New("block number is not uint64") + } + + // note: numL1Messages includes skipped messages + numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) + if numL1Messages > math.MaxUint16 { + return nil, errors.New("number of L1 messages exceeds max uint16") + } + + // note: numTransactions includes skipped messages + numL2Transactions := block.NumL2Transactions() + numTransactions := numL1Messages + numL2Transactions + if numTransactions > math.MaxUint16 { + return nil, errors.New("number of transactions exceeds max uint16") + } + + daBlock := encoding.NewDABlockV0( + block.Header.Number.Uint64(), // number + block.Header.Time, // timestamp + block.Header.BaseFee, // baseFee + block.Header.GasLimit, // gasLimit + uint16(numTransactions), // numTransactions + uint16(numL1Messages), // numL1Messages + ) + + return daBlock, nil } // NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func (o *DACodecV2) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - return (&DACodecV1{}).NewDAChunk(chunk, totalL1MessagePoppedBefore) +func (o *DACodecV2) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { + if len(chunk.Blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(chunk.Blocks) > 255 { + return nil, errors.New("number of blocks exceeds 1 byte") + } + + var blocks []encoding.DABlock + var txs [][]*types.TransactionData + + for _, block := range chunk.Blocks { + b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + blocks = append(blocks, b) + totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) + txs = append(txs, block.Transactions) + } + + daChunk := encoding.NewDAChunkV1( + blocks, // blocks + txs, // transactions + ) + + return daChunk, nil } // NewDABatch creates a DABatch from the provided encoding.Batch. -func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { +func (o *DACodecV2) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > Codecv2MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -56,7 +117,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -67,9 +128,9 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := NewDABatchV1( - uint8(CodecV2), // version - batch.Index, // batchIndex + daBatch := encoding.NewDABatchV1( + uint8(encoding.CodecV2), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped dataHash, // dataHash @@ -85,7 +146,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { +func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { daBatch, err := o.NewDABatch(batch) if err != nil { return nil, err @@ -99,7 +160,7 @@ func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (o *DACodecV2) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv2MaxNumChunks*4 @@ -128,7 +189,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } // encode L2 txs into blob payload - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -167,7 +228,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // Only apply this check when the uncompressed batch data has exceeded 128 KiB. if !useMockTxData && len(batchBytes) > 131072 { // Check compressed data compatibility. - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -179,7 +240,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -196,7 +257,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -209,16 +270,16 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (o *DACodecV2) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { if len(data) < 121 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } - if CodecVersion(data[0]) != CodecV2 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV2) + if encoding.CodecVersion(data[0]) != encoding.CodecV2 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV2) } - b := NewDABatchV1( + b := encoding.NewDABatchV1( data[0], // version binary.BigEndian.Uint64(data[1:9]), // batchIndex binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped @@ -235,8 +296,8 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) +func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv2MaxNumChunks) if err != nil { return 0, 0, err } @@ -244,12 +305,12 @@ func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) +func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) if err != nil { return 0, 0, err } @@ -257,13 +318,13 @@ func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) +func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv2MaxNumChunks) if err != nil { return false, err } @@ -275,7 +336,7 @@ func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error if len(batchBytes) <= 131072 { return true, nil } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -284,8 +345,8 @@ func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) +func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) if err != nil { return false, err } @@ -297,7 +358,7 @@ func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error if len(batchBytes) <= 131072 { return true, nil } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -305,28 +366,120 @@ func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV2) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return (&DACodecV1{}).EstimateChunkL1CommitCalldataSize(c) +func (o *DACodecV2) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { + return uint64(encoding.BlockContextByteSize * len(c.Blocks)), nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV2) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { - return (&DACodecV1{}).EstimateBatchL1CommitCalldataSize(b) +func (o *DACodecV2) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { + var totalL1CommitCalldataSize uint64 + for _, chunk := range b.Chunks { + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + } + return totalL1CommitCalldataSize, nil } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV2) EstimateBlockL1CommitGas(b *Block) (uint64, error) { - return (&DACodecV1{}).EstimateBlockL1CommitGas(b) +func (o *DACodecV2) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { + var total uint64 + var numL1Messages uint64 + for _, txData := range b.Transactions { + if txData.Type == types.L1MessageTxType { + numL1Messages++ + continue + } + } + + total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize + + // sload + total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue + + // staticcall + total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue + total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue + + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + + return total, nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV2) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - return (&DACodecV1{}).EstimateChunkL1CommitGas(c) +func (o *DACodecV2) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { + var totalNonSkippedL1Messages uint64 + var totalL1CommitGas uint64 + for _, block := range c.Blocks { + totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() + blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + if err != nil { + return 0, err + } + totalL1CommitGas += blockL1CommitGas + } + + numBlocks := uint64(len(c.Blocks)) + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { - return (&DACodecV1{}).EstimateBatchL1CommitGas(b) +func (o *DACodecV2) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { + var totalL1CommitGas uint64 + + // Add extra gas costs + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + + // adjusting gas: + // add 1 time cold sload (2100 gas) for L1MessageQueue + // add 1 time cold address access (2600 gas) for L1MessageQueue + // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) + totalL1CommitGas += (2100 + 2600 - 100 - 100) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + + // adjust batch data hash gas cost + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + + totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore + + for _, chunk := range b.Chunks { + chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + if err != nil { + return 0, err + } + totalL1CommitGas += chunkL1CommitGas + + totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) + totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk + + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + + var totalL1CommitCalldataSize uint64 + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + } + + return totalL1CommitGas, nil } // SetCompression enables or disables compression. @@ -336,13 +489,57 @@ func (o *DACodecV2) SetCompression(enable bool) {} // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return (&DACodecV1{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) +func (o *DACodecV2) computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + var dataBytes []byte + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil } // DecodeDAChunks takes a byte slice and decodes it into a []DAChunk -func (o *DACodecV2) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { - return (&DACodecV1{}).DecodeDAChunks(bytes) +func (o *DACodecV2) DecodeDAChunks(bytes [][]byte) ([]encoding.DAChunk, error) { + var chunks []encoding.DAChunk + for _, chunk := range bytes { + if len(chunk) < 1 { + return nil, fmt.Errorf("invalid chunk, length is less than 1") + } + + numBlocks := int(chunk[0]) + if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + } + + blocks := make([]encoding.DABlock, numBlocks) + for i := 0; i < numBlocks; i++ { + startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + encoding.BlockContextByteSize + blocks[i] = &encoding.DABlockV0{} + err := blocks[i].Decode(chunk[startIdx:endIdx]) + if err != nil { + return nil, err + } + } + + chunks = append(chunks, encoding.NewDAChunkV1( + blocks, // blocks + nil, // transactions + )) + } + return chunks, nil } // JSONFromBytes for CodecV1 returns empty values. diff --git a/encoding/codecv3.go b/encoding/codecv3.go deleted file mode 100644 index c348905..0000000 --- a/encoding/codecv3.go +++ /dev/null @@ -1,216 +0,0 @@ -package encoding - -import ( - "encoding/binary" - "encoding/json" - "errors" - "fmt" - "reflect" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" -) - -type DACodecV3 struct{} - -// Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. -const Codecv3MaxNumChunks = 45 - -// Version returns the codec version. -func (o *DACodecV3) Version() CodecVersion { - return CodecV3 -} - -// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - return (&DACodecV2{}).NewDABlock(block, totalL1MessagePoppedBefore) -} - -// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV3) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - return (&DACodecV2{}).NewDAChunk(chunk, totalL1MessagePoppedBefore) -} - -// NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { - // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > Codecv3MaxNumChunks { - return nil, errors.New("too many chunks in batch") - } - - if len(batch.Chunks) == 0 { - return nil, errors.New("too few chunks in batch") - } - - if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { - return nil, errors.New("too few blocks in last chunk of the batch") - } - - // batch data hash - dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - // blob payload - blob, blobVersionedHash, z, blobBytes, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) - if err != nil { - return nil, err - } - - lastChunk := batch.Chunks[len(batch.Chunks)-1] - lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - - return NewDABatchV2( - uint8(CodecV3), // version - batch.Index, // batchIndex - totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped - totalL1MessagePoppedAfter, // totalL1MessagePopped - lastBlock.Header.Time, // lastBlockTimestamp - dataHash, // dataHash - batch.ParentBatchHash, // parentBatchHash - blobVersionedHash, // blobVersionedHash - bitmapBytes, // skippedL1MessageBitmap - blob, // blob - z, // z - blobBytes, // blobBytes - ) -} - -// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. -// It also checks if the blob versioned hashes are as expected. -func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { - daBatch, err := o.NewDABatch(batch) - if err != nil { - return nil, err - } - - if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) - } - - return daBatch, nil -} - -// constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { - return (&DACodecV2{}).constructBlobPayload(chunks, useMockTxData) -} - -// NewDABatchFromBytes decodes the given byte slice into a DABatch. -// Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. -func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { - if len(data) != 193 { - return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) - } - - if CodecVersion(data[0]) != CodecV3 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV3) - } - - b := NewDABatchV2WithProof( - data[0], // Version - binary.BigEndian.Uint64(data[1:9]), // BatchIndex - binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped - binary.BigEndian.Uint64(data[17:25]), // TotalL1MessagePopped - binary.BigEndian.Uint64(data[121:129]), // LastBlockTimestamp - common.BytesToHash(data[25:57]), // DataHash - common.BytesToHash(data[89:121]), // ParentBatchHash - common.BytesToHash(data[57:89]), // BlobVersionedHash - nil, // skippedL1MessageBitmap - nil, // blob - nil, // z - nil, // blobBytes - [2]common.Hash{ // BlobDataProof - common.BytesToHash(data[129:161]), - common.BytesToHash(data[161:193]), - }, - ) - - return b, nil -} - -// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - return (&DACodecV2{}).EstimateChunkL1CommitBatchSizeAndBlobSize(c) -} - -// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - return (&DACodecV2{}).EstimateBatchL1CommitBatchSizeAndBlobSize(b) -} - -// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - return (&DACodecV2{}).CheckChunkCompressedDataCompatibility(c) -} - -// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - return (&DACodecV2{}).CheckBatchCompressedDataCompatibility(b) -} - -// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV3) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return (&DACodecV2{}).EstimateChunkL1CommitCalldataSize(c) -} - -// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV3) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { - return (&DACodecV2{}).EstimateBatchL1CommitCalldataSize(b) -} - -// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - chunkL1CommitGas, err := (&DACodecV2{}).EstimateChunkL1CommitGas(c) - if err != nil { - return 0, err - } - return chunkL1CommitGas + 50000, nil // plus 50000 for the point-evaluation precompile call. -} - -// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { - batchL1CommitGas, err := (&DACodecV2{}).EstimateBatchL1CommitGas(b) - if err != nil { - return 0, err - } - return batchL1CommitGas + 50000, nil // plus 50000 for the point-evaluation precompile call. -} - -// SetCompression enables or disables compression. -func (o *DACodecV3) SetCompression(enable bool) {} - -// computeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (o *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return (&DACodecV2{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) -} - -// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk -func (o *DACodecV3) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { - return (&DACodecV2{}).DecodeDAChunks(bytes) -} - -// JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. -func (o *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { - batch, err := o.NewDABatchFromBytes(data) - if err != nil { - return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) - } - - jsonBytes, err := json.Marshal(batch) - if err != nil { - return nil, fmt.Errorf("failed to marshal DABatch to JSON: %w", err) - } - - return jsonBytes, nil -} diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go new file mode 100644 index 0000000..23a66b7 --- /dev/null +++ b/encoding/codecv3/codecv3.go @@ -0,0 +1,574 @@ +package codecv3 + +import ( + "crypto/sha256" + "encoding/binary" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "math" + "math/big" + "reflect" + + "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/da-codec/encoding/zstd" + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/log" +) + +type DACodecV3 struct{} + +// init registers the DACodecV3 with the encoding package. +func init() { + encoding.RegisterCodec(encoding.CodecV3, func() encoding.Codec { + return &DACodecV3{} + }) +} + +// Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. +const Codecv3MaxNumChunks = 45 + +// Version returns the codec version. +func (o *DACodecV3) Version() encoding.CodecVersion { + return encoding.CodecV3 +} + +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (o *DACodecV3) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { + if !block.Header.Number.IsUint64() { + return nil, errors.New("block number is not uint64") + } + + // note: numL1Messages includes skipped messages + numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) + if numL1Messages > math.MaxUint16 { + return nil, errors.New("number of L1 messages exceeds max uint16") + } + + // note: numTransactions includes skipped messages + numL2Transactions := block.NumL2Transactions() + numTransactions := numL1Messages + numL2Transactions + if numTransactions > math.MaxUint16 { + return nil, errors.New("number of transactions exceeds max uint16") + } + + daBlock := encoding.NewDABlockV0( + block.Header.Number.Uint64(), // number + block.Header.Time, // timestamp + block.Header.BaseFee, // baseFee + block.Header.GasLimit, // gasLimit + uint16(numTransactions), // numTransactions + uint16(numL1Messages), // numL1Messages + ) + + return daBlock, nil +} + +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (o *DACodecV3) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { + if len(chunk.Blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(chunk.Blocks) > 255 { + return nil, errors.New("number of blocks exceeds 1 byte") + } + + var blocks []encoding.DABlock + var txs [][]*types.TransactionData + + for _, block := range chunk.Blocks { + b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + blocks = append(blocks, b) + totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) + txs = append(txs, block.Transactions) + } + + daChunk := encoding.NewDAChunkV1( + blocks, // blocks + txs, // transactions + ) + + return daChunk, nil +} + +// NewDABatch creates a DABatch from the provided Batch. +func (o *DACodecV3) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { + // this encoding can only support a fixed number of chunks per batch + if len(batch.Chunks) > Codecv3MaxNumChunks { + return nil, errors.New("too many chunks in batch") + } + + if len(batch.Chunks) == 0 { + return nil, errors.New("too few chunks in batch") + } + + if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { + return nil, errors.New("too few blocks in last chunk of the batch") + } + + // batch data hash + dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // skipped L1 messages bitmap + bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // blob payload + blob, blobVersionedHash, z, blobBytes, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) + if err != nil { + return nil, err + } + + lastChunk := batch.Chunks[len(batch.Chunks)-1] + lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] + + return encoding.NewDABatchV2( + uint8(encoding.CodecV3), // version + batch.Index, // batchIndex + totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + lastBlock.Header.Time, // lastBlockTimestamp + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z + blobBytes, // blobBytes + ) +} + +// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. +// It also checks if the blob versioned hashes are as expected. +func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { + daBatch, err := o.NewDABatch(batch) + if err != nil { + return nil, err + } + + if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) + } + + return daBatch, nil +} + +// constructBlobPayload constructs the 4844 blob payload. +func (o *DACodecV3) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + Codecv3MaxNumChunks*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // challenge digest preimage + // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash + challengePreimage := make([]byte, (1+Codecv3MaxNumChunks+1)*32) + + // the chunk data hash used for calculating the challenge preimage + var chunkDataHash common.Hash + + // blob metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode blob metadata and L2 transactions, + // and simultaneously also build challenge preimage + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into blob payload + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + if err != nil { + return nil, common.Hash{}, nil, nil, err + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // blob metadata: chunki_size + if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + } + + // challenge: compute chunk data hash + chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) + copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + } + + // if we have fewer than Codecv2MaxNumChunks chunks, the rest + // of the blob metadata is correctly initialized to 0, + // but we need to add padding to the challenge preimage + for chunkID := len(chunks); chunkID < Codecv3MaxNumChunks; chunkID++ { + // use the last chunk's data hash as padding + copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + } + + // challenge: compute metadata hash + hash := crypto.Keccak256Hash(batchBytes[0:metadataLength]) + copy(challengePreimage[0:], hash[:]) + + // blobBytes represents the compressed blob payload (batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return nil, common.Hash{}, nil, nil, err + } + + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. + if !useMockTxData && len(batchBytes) > 131072 { + // Check compressed data compatibility. + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, nil, err + } + } + + if len(blobBytes) > 126976 { + log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") + } + + // convert raw data to BLSFieldElements + blob, err := encoding.MakeBlobCanonical(blobBytes) + if err != nil { + return nil, common.Hash{}, nil, nil, err + } + + // compute blob versioned hash + c, err := kzg4844.BlobToCommitment(blob) + if err != nil { + return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") + } + blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) + + // challenge: append blob versioned hash + copy(challengePreimage[(1+Codecv3MaxNumChunks)*32:], blobVersionedHash[:]) + + // compute z = challenge_digest % BLS_MODULUS + challengeDigest := crypto.Keccak256Hash(challengePreimage) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBytes := pointBigInt.Bytes() + + // the challenge point z + var z kzg4844.Point + start := 32 - len(pointBytes) + copy(z[start:], pointBytes) + + return blob, blobVersionedHash, &z, blobBytes, nil +} + +// NewDABatchFromBytes decodes the given byte slice into a DABatch. +// Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. +func (o *DACodecV3) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { + if len(data) != 193 { + return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) + } + + if encoding.CodecVersion(data[0]) != encoding.CodecV3 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV3) + } + + b := encoding.NewDABatchV2WithProof( + data[0], // Version + binary.BigEndian.Uint64(data[1:9]), // BatchIndex + binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped + binary.BigEndian.Uint64(data[17:25]), // TotalL1MessagePopped + binary.BigEndian.Uint64(data[121:129]), // LastBlockTimestamp + common.BytesToHash(data[25:57]), // DataHash + common.BytesToHash(data[89:121]), // ParentBatchHash + common.BytesToHash(data[57:89]), // BlobVersionedHash + nil, // skippedL1MessageBitmap + nil, // blob + nil, // z + nil, // blobBytes + [2]common.Hash{ // BlobDataProof + common.BytesToHash(data[129:161]), + common.BytesToHash(data[161:193]), + }, + ) + + return b, nil +} + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv3MaxNumChunks) + if err != nil { + return 0, 0, err + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, err + } + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) + if err != nil { + return 0, 0, err + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, err + } + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil +} + +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv3MaxNumChunks) + if err != nil { + return false, err + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. + if len(batchBytes) <= 131072 { + return true, nil + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) + if err != nil { + return false, err + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. + if len(batchBytes) <= 131072 { + return true, nil + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. +func (o *DACodecV3) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { + return uint64(encoding.BlockContextByteSize * len(c.Blocks)), nil +} + +// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. +func (o *DACodecV3) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { + var totalL1CommitCalldataSize uint64 + for _, chunk := range b.Chunks { + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + } + return totalL1CommitCalldataSize, nil +} + +// EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. +func (o *DACodecV3) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { + var total uint64 + var numL1Messages uint64 + for _, txData := range b.Transactions { + if txData.Type == types.L1MessageTxType { + numL1Messages++ + continue + } + } + + total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize + + // sload + total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue + + // staticcall + total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue + total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue + + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + + return total, nil +} + +// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. +func (o *DACodecV3) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { + var totalNonSkippedL1Messages uint64 + var totalL1CommitGas uint64 + for _, block := range c.Blocks { + totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() + blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + if err != nil { + return 0, err + } + totalL1CommitGas += blockL1CommitGas + } + + numBlocks := uint64(len(c.Blocks)) + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + + totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. + + return totalL1CommitGas, nil +} + +// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. +func (o *DACodecV3) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { + var totalL1CommitGas uint64 + + // Add extra gas costs + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + + // adjusting gas: + // add 1 time cold sload (2100 gas) for L1MessageQueue + // add 1 time cold address access (2600 gas) for L1MessageQueue + // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) + totalL1CommitGas += (2100 + 2600 - 100 - 100) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + + // adjust batch data hash gas cost + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + + totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore + + for _, chunk := range b.Chunks { + chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + if err != nil { + return 0, err + } + totalL1CommitGas += chunkL1CommitGas + + totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) + totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk + + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + + var totalL1CommitCalldataSize uint64 + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + } + + totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. + + return totalL1CommitGas, nil +} + +// SetCompression enables or disables compression. +func (o *DACodecV3) SetCompression(enable bool) {} + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func (o *DACodecV3) computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + var dataBytes []byte + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil +} + +// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk +func (o *DACodecV3) DecodeDAChunks(bytes [][]byte) ([]encoding.DAChunk, error) { + var chunks []encoding.DAChunk + for _, chunk := range bytes { + if len(chunk) < 1 { + return nil, fmt.Errorf("invalid chunk, length is less than 1") + } + + numBlocks := int(chunk[0]) + if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + } + + blocks := make([]encoding.DABlock, numBlocks) + for i := 0; i < numBlocks; i++ { + startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + encoding.BlockContextByteSize + blocks[i] = &encoding.DABlockV0{} + err := blocks[i].Decode(chunk[startIdx:endIdx]) + if err != nil { + return nil, err + } + } + + chunks = append(chunks, encoding.NewDAChunkV1( + blocks, // blocks + nil, // transactions + )) + } + return chunks, nil +} + +// JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. +func (o *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { + batch, err := o.NewDABatchFromBytes(data) + if err != nil { + return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) + } + + jsonBytes, err := json.Marshal(batch) + if err != nil { + return nil, fmt.Errorf("failed to marshal DABatch to JSON: %w", err) + } + + return jsonBytes, nil +} diff --git a/encoding/codecv4.go b/encoding/codecv4/codecv4.go similarity index 51% rename from encoding/codecv4.go rename to encoding/codecv4/codecv4.go index 9f77171..bffeb9c 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -1,11 +1,13 @@ -package encoding +package codecv4 import ( "crypto/sha256" "encoding/binary" "encoding/hex" + "encoding/json" "errors" "fmt" + "math" "math/big" "reflect" "sync/atomic" @@ -16,6 +18,7 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" + "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/da-codec/encoding/zstd" ) @@ -23,26 +26,85 @@ type DACodecV4 struct { enableCompress uint32 } +// init registers the DACodecV4 with the encoding package. +func init() { + encoding.RegisterCodec(encoding.CodecV4, func() encoding.Codec { + return &DACodecV4{} + }) +} + // Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv4MaxNumChunks = 45 // Version returns the codec version. -func (o *DACodecV4) Version() CodecVersion { - return CodecV4 +func (o *DACodecV4) Version() encoding.CodecVersion { + return encoding.CodecV4 } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - return (&DACodecV3{}).NewDABlock(block, totalL1MessagePoppedBefore) +func (o *DACodecV4) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { + if !block.Header.Number.IsUint64() { + return nil, errors.New("block number is not uint64") + } + + // note: numL1Messages includes skipped messages + numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) + if numL1Messages > math.MaxUint16 { + return nil, errors.New("number of L1 messages exceeds max uint16") + } + + // note: numTransactions includes skipped messages + numL2Transactions := block.NumL2Transactions() + numTransactions := numL1Messages + numL2Transactions + if numTransactions > math.MaxUint16 { + return nil, errors.New("number of transactions exceeds max uint16") + } + + daBlock := encoding.NewDABlockV0( + block.Header.Number.Uint64(), // number + block.Header.Time, // timestamp + block.Header.BaseFee, // baseFee + block.Header.GasLimit, // gasLimit + uint16(numTransactions), // numTransactions + uint16(numL1Messages), // numL1Messages + ) + + return daBlock, nil } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV4) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - return (&DACodecV3{}).NewDAChunk(chunk, totalL1MessagePoppedBefore) +func (o *DACodecV4) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { + if len(chunk.Blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(chunk.Blocks) > 255 { + return nil, errors.New("number of blocks exceeds 1 byte") + } + + var blocks []encoding.DABlock + var txs [][]*types.TransactionData + + for _, block := range chunk.Blocks { + b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + blocks = append(blocks, b) + totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) + txs = append(txs, block.Transactions) + } + + daChunk := encoding.NewDAChunkV1( + blocks, // blocks + txs, // transactions + ) + + return daChunk, nil } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { +func (o *DACodecV4) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > Codecv4MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -63,7 +125,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -77,9 +139,9 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - return NewDABatchV2( - uint8(CodecV3), // version - batch.Index, // batchIndex + return encoding.NewDABatchV2( + uint8(encoding.CodecV4), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped lastBlock.Header.Time, // lastBlockTimestamp @@ -95,7 +157,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { +func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { o.SetCompression(true) daBatch, err := o.NewDABatch(batch) if err != nil || !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { @@ -114,7 +176,7 @@ func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (o *DACodecV4) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv4MaxNumChunks*4 @@ -143,7 +205,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } // encode L2 txs into blob payload - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -183,7 +245,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } if !useMockTxData { // Check compressed data compatibility. - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -199,7 +261,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -216,7 +278,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -229,16 +291,16 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (o *DACodecV4) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { if len(data) != 193 { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } - if CodecVersion(data[0]) != CodecV4 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV4) + if encoding.CodecVersion(data[0]) != encoding.CodecV4 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV4) } - b := NewDABatchV2WithProof( + b := encoding.NewDABatchV2WithProof( data[0], // Version binary.BigEndian.Uint64(data[1:9]), // BatchIndex binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped @@ -261,8 +323,8 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) +func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv4MaxNumChunks) if err != nil { return 0, 0, err } @@ -276,12 +338,12 @@ func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) +func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) if err != nil { return 0, 0, err } @@ -295,12 +357,12 @@ func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) +func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv4MaxNumChunks) if err != nil { return false, err } @@ -308,7 +370,7 @@ func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error if err != nil { return false, err } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -316,8 +378,8 @@ func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) +func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) if err != nil { return false, err } @@ -325,7 +387,7 @@ func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error if err != nil { return false, err } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -333,23 +395,125 @@ func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV4) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return (&DACodecV3{}).EstimateChunkL1CommitCalldataSize(c) +func (o *DACodecV4) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { + return uint64(encoding.BlockContextByteSize * len(c.Blocks)), nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV4) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { - return (&DACodecV3{}).EstimateBatchL1CommitCalldataSize(b) +func (o *DACodecV4) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { + var totalL1CommitCalldataSize uint64 + for _, chunk := range b.Chunks { + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + } + return totalL1CommitCalldataSize, nil +} + +// EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. +func (o *DACodecV4) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { + var total uint64 + var numL1Messages uint64 + for _, txData := range b.Transactions { + if txData.Type == types.L1MessageTxType { + numL1Messages++ + continue + } + } + + total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize + + // sload + total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue + + // staticcall + total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue + total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue + + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + + return total, nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV4) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - return (&DACodecV3{}).EstimateChunkL1CommitGas(c) +func (o *DACodecV4) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { + var totalNonSkippedL1Messages uint64 + var totalL1CommitGas uint64 + for _, block := range c.Blocks { + totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() + blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + if err != nil { + return 0, err + } + totalL1CommitGas += blockL1CommitGas + } + + numBlocks := uint64(len(c.Blocks)) + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + + totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. + + return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { - return (&DACodecV3{}).EstimateBatchL1CommitGas(b) +func (o *DACodecV4) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { + var totalL1CommitGas uint64 + + // Add extra gas costs + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + + // adjusting gas: + // add 1 time cold sload (2100 gas) for L1MessageQueue + // add 1 time cold address access (2600 gas) for L1MessageQueue + // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) + totalL1CommitGas += (2100 + 2600 - 100 - 100) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + + // adjust batch data hash gas cost + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + + totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore + + for _, chunk := range b.Chunks { + chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + if err != nil { + return 0, err + } + totalL1CommitGas += chunkL1CommitGas + + totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) + totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk + + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + + var totalL1CommitCalldataSize uint64 + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + } + + totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. + + return totalL1CommitGas, nil } // isCompressEnabled checks if compression is enabled. @@ -370,16 +534,70 @@ func (o *DACodecV4) SetCompression(enable bool) { // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return (&DACodecV3{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) +func (o *DACodecV4) computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + var dataBytes []byte + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil } // DecodeDAChunks takes a byte slice and decodes it into a []DAChunk -func (o *DACodecV4) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { - return (&DACodecV3{}).DecodeDAChunks(bytes) +func (o *DACodecV4) DecodeDAChunks(bytes [][]byte) ([]encoding.DAChunk, error) { + var chunks []encoding.DAChunk + for _, chunk := range bytes { + if len(chunk) < 1 { + return nil, fmt.Errorf("invalid chunk, length is less than 1") + } + + numBlocks := int(chunk[0]) + if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + } + + blocks := make([]encoding.DABlock, numBlocks) + for i := 0; i < numBlocks; i++ { + startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + encoding.BlockContextByteSize + blocks[i] = &encoding.DABlockV0{} + err := blocks[i].Decode(chunk[startIdx:endIdx]) + if err != nil { + return nil, err + } + } + + chunks = append(chunks, encoding.NewDAChunkV1( + blocks, // blocks + nil, // transactions + )) + } + return chunks, nil } // JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. func (o *DACodecV4) JSONFromBytes(data []byte) ([]byte, error) { - return (&DACodecV3{}).JSONFromBytes(data) + batch, err := o.NewDABatchFromBytes(data) + if err != nil { + return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) + } + + jsonBytes, err := json.Marshal(batch) + if err != nil { + return nil, fmt.Errorf("failed to marshal DABatch to JSON: %w", err) + } + + return jsonBytes, nil } diff --git a/encoding/da.go b/encoding/da.go index b634967..66f34f1 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -406,7 +406,8 @@ func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { return memoryCost } -func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { +// GetTxPayloadLength calculates the length of the transaction payload. +func GetTxPayloadLength(txData *types.TransactionData) (uint64, error) { rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) if err != nil { return 0, err diff --git a/encoding/interfaces.go b/encoding/interfaces.go index fd133e9..ad25c06 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -69,35 +69,42 @@ const ( CodecV4 ) -// CodecFromVersion returns the appropriate codec for the given version. -func CodecFromVersion(version CodecVersion) (Codec, error) { - switch version { - case CodecV0: - return &DACodecV0{}, nil - case CodecV1: - return &DACodecV1{}, nil - case CodecV2: - return &DACodecV2{}, nil - case CodecV3: - return &DACodecV3{}, nil - case CodecV4: - return &DACodecV4{}, nil - default: +// MyCodecGen is a map that stores codec generator functions for each version. +var MyCodecGen = make(map[CodecVersion]func() Codec) + +// RegisterCodec registers a codec generator function for a specific version. +func RegisterCodec(version CodecVersion, codecGenFunc func() Codec) { + MyCodecGen[version] = codecGenFunc +} + +// getCodec retrieves a Codec instance for the specified version. +// It returns an error if the version is not supported. +func getCodec(version CodecVersion) (Codec, error) { + codecGen, ok := MyCodecGen[version] + if !ok { return nil, fmt.Errorf("unsupported codec version: %d", version) } + return codecGen(), nil +} + +// CodecFromVersion returns the appropriate codec for the given version. +func CodecFromVersion(version CodecVersion) (Codec, error) { + return getCodec(version) } // CodecFromConfig determines and returns the appropriate codec based on chain configuration, block number, and timestamp. -func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { +func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) (Codec, error) { + var version CodecVersion if chainCfg.IsDarwinV2(startBlockTimestamp) { - return &DACodecV4{} + version = CodecV4 } else if chainCfg.IsDarwin(startBlockTimestamp) { - return &DACodecV3{} + version = CodecV3 } else if chainCfg.IsCurie(startBlockNumber) { - return &DACodecV2{} + version = CodecV2 } else if chainCfg.IsBernoulli(startBlockNumber) { - return &DACodecV1{} + version = CodecV1 } else { - return &DACodecV0{} + version = CodecV0 } + return getCodec(version) } From 7eb1dc042238399e9fb788e861a4fc0c298f7790 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 4 Oct 2024 03:47:28 +0800 Subject: [PATCH 047/126] fix CI --- encoding/codecv0/codecv0.go | 3 ++- encoding/codecv1/codecv1.go | 3 ++- encoding/codecv3/codecv3.go | 5 +++-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/encoding/codecv0/codecv0.go b/encoding/codecv0/codecv0.go index 991dd16..7a792c9 100644 --- a/encoding/codecv0/codecv0.go +++ b/encoding/codecv0/codecv0.go @@ -7,11 +7,12 @@ import ( "math" "reflect" - "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + + "github.com/scroll-tech/da-codec/encoding" ) type DACodecV0 struct{} diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1/codecv1.go index 96a8a19..252b60d 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -9,11 +9,12 @@ import ( "math/big" "reflect" - "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + + "github.com/scroll-tech/da-codec/encoding" ) type DACodecV1 struct{} diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index 4dcda1e..16d11b8 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -11,13 +11,14 @@ import ( "math/big" "reflect" - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/zstd" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" + + "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/da-codec/encoding/zstd" ) type DACodecV3 struct{} From 91ac897e28fc2fb6108d9129e42e25d83c05f9fe Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 4 Oct 2024 18:00:20 +0800 Subject: [PATCH 048/126] remove register mode --- encoding/{codecv0 => }/codecv0.go | 157 ++++++++++++++-------------- encoding/{codecv1 => }/codecv1.go | 151 +++++++++++++-------------- encoding/{codecv2 => }/codecv2.go | 164 ++++++++++++++---------------- encoding/{codecv3 => }/codecv3.go | 158 ++++++++++++++-------------- encoding/{codecv4 => }/codecv4.go | 160 ++++++++++++++--------------- encoding/interfaces.go | 47 ++++----- 6 files changed, 394 insertions(+), 443 deletions(-) rename encoding/{codecv0 => }/codecv0.go (66%) rename encoding/{codecv1 => }/codecv1.go (72%) rename encoding/{codecv2 => }/codecv2.go (71%) rename encoding/{codecv3 => }/codecv3.go (74%) rename encoding/{codecv4 => }/codecv4.go (74%) diff --git a/encoding/codecv0/codecv0.go b/encoding/codecv0.go similarity index 66% rename from encoding/codecv0/codecv0.go rename to encoding/codecv0.go index 7a792c9..c207819 100644 --- a/encoding/codecv0/codecv0.go +++ b/encoding/codecv0.go @@ -1,4 +1,4 @@ -package codecv0 +package encoding import ( "encoding/binary" @@ -11,26 +11,17 @@ import ( "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - - "github.com/scroll-tech/da-codec/encoding" ) type DACodecV0 struct{} -// init registers the DACodecV0 with the encoding package. -func init() { - encoding.RegisterCodec(encoding.CodecV0, func() encoding.Codec { - return &DACodecV0{} - }) -} - // Version returns the codec version. -func (o *DACodecV0) Version() encoding.CodecVersion { - return encoding.CodecV0 +func (o *DACodecV0) Version() CodecVersion { + return CodecV0 } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV0) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { +func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -48,7 +39,7 @@ func (o *DACodecV0) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := encoding.NewDABlockV0( + daBlock := NewDABlockV0( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -61,8 +52,8 @@ func (o *DACodecV0) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV0) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { - var blocks []encoding.DABlock +func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + var blocks []DABlock var txs [][]*types.TransactionData if chunk == nil { @@ -87,7 +78,7 @@ func (o *DACodecV0) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore txs = append(txs, block.Transactions) } - daChunk := encoding.NewDAChunkV0( + daChunk := NewDAChunkV0( blocks, // blocks txs, // transactions ) @@ -95,24 +86,24 @@ func (o *DACodecV0) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore return daChunk, nil } -// DecodeDAChunksRawTx takes a byte slice and decodes it into a []*encoding.DAChunkRawTx. -func (o *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*encoding.DAChunkRawTx, error) { - var chunks []*encoding.DAChunkRawTx +// DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. +func (o *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) { + var chunks []*DAChunkRawTx for _, chunk := range chunkBytes { if len(chunk) < 1 { return nil, fmt.Errorf("invalid chunk, length is less than 1") } numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + if len(chunk) < 1+numBlocks*BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) } - blocks := make([]encoding.DABlock, numBlocks) + blocks := make([]DABlock, numBlocks) for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + encoding.BlockContextByteSize - blocks[i] = &encoding.DABlockV0{} + startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + BlockContextByteSize + blocks[i] = &DABlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err @@ -120,32 +111,32 @@ func (o *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*encoding.DAChun } var transactions []types.Transactions - currentIndex := 1 + numBlocks*encoding.BlockContextByteSize + currentIndex := 1 + numBlocks*BlockContextByteSize for _, block := range blocks { var blockTransactions types.Transactions // ignore L1 msg transactions from the block, consider only L2 transactions txNum := int(block.NumTransactions() - block.NumL1Messages()) for i := 0; i < txNum; i++ { - if len(chunk) < currentIndex+encoding.TxLenByteSize { - return nil, fmt.Errorf("chunk size doesn't match, next tx size is less then 4, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+encoding.TxLenByteSize, i) + if len(chunk) < currentIndex+TxLenByteSize { + return nil, fmt.Errorf("chunk size doesn't match, next tx size is less then 4, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+TxLenByteSize, i) } - txLen := int(binary.BigEndian.Uint32(chunk[currentIndex : currentIndex+encoding.TxLenByteSize])) - if len(chunk) < currentIndex+encoding.TxLenByteSize+txLen { - return nil, fmt.Errorf("chunk size doesn't match with next tx length, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+encoding.TxLenByteSize+txLen, i) + txLen := int(binary.BigEndian.Uint32(chunk[currentIndex : currentIndex+TxLenByteSize])) + if len(chunk) < currentIndex+TxLenByteSize+txLen { + return nil, fmt.Errorf("chunk size doesn't match with next tx length, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+TxLenByteSize+txLen, i) } - txData := chunk[currentIndex+encoding.TxLenByteSize : currentIndex+encoding.TxLenByteSize+txLen] + txData := chunk[currentIndex+TxLenByteSize : currentIndex+TxLenByteSize+txLen] tx := &types.Transaction{} err := tx.UnmarshalBinary(txData) if err != nil { return nil, fmt.Errorf("failed to unmarshal tx, pos of tx in chunk bytes: %d. tx num without l1 msgs: %d, err: %w", currentIndex, i, err) } blockTransactions = append(blockTransactions, tx) - currentIndex += encoding.TxLenByteSize + txLen + currentIndex += TxLenByteSize + txLen } transactions = append(transactions, blockTransactions) } - chunks = append(chunks, &encoding.DAChunkRawTx{ + chunks = append(chunks, &DAChunkRawTx{ Blocks: blocks, Transactions: transactions, }) @@ -154,12 +145,12 @@ func (o *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*encoding.DAChun } // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func (o *DACodecV0) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*encoding.DAChunkRawTx) error { +func (o *DACodecV0) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { return nil } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV0) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { +func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // compute batch data hash var dataBytes []byte totalL1MessagePoppedBeforeChunk := batch.TotalL1MessagePoppedBefore @@ -182,14 +173,14 @@ func (o *DACodecV0) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) dataHash := crypto.Keccak256Hash(dataBytes) // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } - daBatch := encoding.NewDABatchV0( - uint8(encoding.CodecV0), // version - batch.Index, // batchIndex + daBatch := NewDABatchV0( + uint8(CodecV0), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped dataHash, // dataHash @@ -202,7 +193,7 @@ func (o *DACodecV0) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { +func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { daBatch, err := o.NewDABatch(batch) if err != nil { return nil, err @@ -216,16 +207,16 @@ func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Ba } // NewDABatchFromBytes decodes the given byte slice into a DABatch. -func (o *DACodecV0) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { +func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 89 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 89 bytes but got %d", len(data)) } - if encoding.CodecVersion(data[0]) != encoding.CodecV0 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV0) + if CodecVersion(data[0]) != CodecV0 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV0) } - b := encoding.NewDABatchV0( + b := NewDABatchV0( data[0], // version binary.BigEndian.Uint64(data[1:9]), // batchIndex binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped @@ -239,25 +230,25 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { } // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *encoding.Block) (uint64, error) { +func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { var size uint64 for _, txData := range b.Transactions { if txData.Type == types.L1MessageTxType { continue } size += 4 // 4 bytes payload length - txPayloadLength, err := encoding.GetTxPayloadLength(txData) + txPayloadLength, err := GetTxPayloadLength(txData) if err != nil { return 0, err } size += txPayloadLength } - size += encoding.BlockContextByteSize + size += BlockContextByteSize return size, nil } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV0) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { +func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -266,16 +257,16 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) continue } - txPayloadLength, err := encoding.GetTxPayloadLength(txData) + txPayloadLength, err := GetTxPayloadLength(txData) if err != nil { return 0, err } - total += encoding.CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero - total += encoding.CalldataNonZeroByteGas * 4 // 4 bytes payload length - total += encoding.GetKeccak256Gas(txPayloadLength) // l2 tx hash + total += CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero + total += CalldataNonZeroByteGas * 4 // 4 bytes payload length + total += GetKeccak256Gas(txPayloadLength) // l2 tx hash } - total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize + total += CalldataNonZeroByteGas * BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -284,17 +275,17 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, block := range c.Blocks { blockL1CommitCalldataSize, err := o.EstimateBlockL1CommitCalldataSize(block) @@ -307,7 +298,7 @@ func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64 } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV0) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalTxNum uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { @@ -320,35 +311,35 @@ func (o *DACodecV0) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += encoding.CalldataNonZeroByteGas * numBlocks * encoding.BlockContextByteSize // numBlocks of BlockContext in chunk + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * BlockContextByteSize // numBlocks of BlockContext in chunk - totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash + totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV0) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -362,21 +353,21 @@ func (o *DACodecV0) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) totalL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas, nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -389,22 +380,22 @@ func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64 } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV0) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { +func (o *DACodecV0) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { return true, nil } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV0) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { +func (o *DACodecV0) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { return true, nil } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV0) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { return 0, 0, nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { return 0, 0, nil } diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1.go similarity index 72% rename from encoding/codecv1/codecv1.go rename to encoding/codecv1.go index 252b60d..f7f70ef 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1.go @@ -1,4 +1,4 @@ -package codecv1 +package encoding import ( "crypto/sha256" @@ -13,29 +13,20 @@ import ( "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - - "github.com/scroll-tech/da-codec/encoding" ) type DACodecV1 struct{} -// init registers the DACodecV1 with the encoding package. -func init() { - encoding.RegisterCodec(encoding.CodecV1, func() encoding.Codec { - return &DACodecV1{} - }) -} - // Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv1MaxNumChunks = 15 // Version returns the codec version. -func (o *DACodecV1) Version() encoding.CodecVersion { - return encoding.CodecV1 +func (o *DACodecV1) Version() CodecVersion { + return CodecV1 } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV1) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { +func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -53,7 +44,7 @@ func (o *DACodecV1) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := encoding.NewDABlockV0( + daBlock := NewDABlockV0( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -66,7 +57,7 @@ func (o *DACodecV1) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV1) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { +func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { if len(chunk.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } @@ -75,7 +66,7 @@ func (o *DACodecV1) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []encoding.DABlock + var blocks []DABlock var txs [][]*types.TransactionData for _, block := range chunk.Blocks { @@ -88,7 +79,7 @@ func (o *DACodecV1) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore txs = append(txs, block.Transactions) } - daChunk := encoding.NewDAChunkV1( + daChunk := NewDAChunkV1( blocks, // blocks txs, // transactions ) @@ -98,23 +89,23 @@ func (o *DACodecV1) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore // DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. // Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func (o *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*encoding.DAChunkRawTx, error) { - var chunks []*encoding.DAChunkRawTx +func (o *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { + var chunks []*DAChunkRawTx for _, chunk := range bytes { if len(chunk) < 1 { return nil, fmt.Errorf("invalid chunk, length is less than 1") } numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + if len(chunk) < 1+numBlocks*BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) } - blocks := make([]encoding.DABlock, numBlocks) + blocks := make([]DABlock, numBlocks) for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + encoding.BlockContextByteSize - blocks[i] = &encoding.DABlockV0{} + startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + BlockContextByteSize + blocks[i] = &DABlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err @@ -123,7 +114,7 @@ func (o *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*encoding.DAChunkRawT var transactions []types.Transactions - chunks = append(chunks, &encoding.DAChunkRawTx{ + chunks = append(chunks, &DAChunkRawTx{ Blocks: blocks, Transactions: transactions, // Transactions field is still empty in the phase of DecodeDAChunksRawTx, because txs moved to blobs and filled in DecodeTxsFromBlob method. }) @@ -132,13 +123,13 @@ func (o *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*encoding.DAChunkRawT } // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func (o *DACodecV1) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*encoding.DAChunkRawTx) error { - batchBytes := encoding.BytesFromBlobCanonical(blob) - return encoding.DecodeTxsFromBytes(batchBytes[:], chunks, Codecv1MaxNumChunks) +func (o *DACodecV1) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { + batchBytes := BytesFromBlobCanonical(blob) + return DecodeTxsFromBytes(batchBytes[:], chunks, Codecv1MaxNumChunks) } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV1) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { +func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > Codecv1MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -155,7 +146,7 @@ func (o *DACodecV1) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -166,9 +157,9 @@ func (o *DACodecV1) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) return nil, err } - daBatch := encoding.NewDABatchV1( - uint8(encoding.CodecV1), // version - batch.Index, // batchIndex + daBatch := NewDABatchV1( + uint8(CodecV1), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped dataHash, // dataHash @@ -184,7 +175,7 @@ func (o *DACodecV1) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { +func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { daBatch, err := o.NewDABatch(batch) if err != nil { return nil, err @@ -198,7 +189,7 @@ func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Ba } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV1) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv1MaxNumChunks*4 @@ -227,7 +218,7 @@ func (o *DACodecV1) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData } // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, err } @@ -258,7 +249,7 @@ func (o *DACodecV1) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData copy(challengePreimage[0:], hash[:]) // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) + blob, err := MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, err } @@ -275,7 +266,7 @@ func (o *DACodecV1) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -288,16 +279,16 @@ func (o *DACodecV1) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func (o *DACodecV1) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { +func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 121 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } - if encoding.CodecVersion(data[0]) != encoding.CodecV1 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV1) + if CodecVersion(data[0]) != CodecV1 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV1) } - b := encoding.NewDABatchV1( + b := NewDABatchV1( data[0], // version binary.BigEndian.Uint64(data[1:9]), // batchIndex binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped @@ -314,17 +305,17 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { } // EstimateChunkL1CommitBlobSize estimates the size of the L1 commit blob for a single chunk. -func (o *DACodecV1) EstimateChunkL1CommitBlobSize(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV1) EstimateChunkL1CommitBlobSize(c *Chunk) (uint64, error) { metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) // over-estimate: adding metadata length chunkDataSize, err := o.chunkL1CommitBlobDataSize(c) if err != nil { return 0, err } - return encoding.CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil + return CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil } // EstimateBatchL1CommitBlobSize estimates the total size of the L1 commit blob for a batch. -func (o *DACodecV1) EstimateBatchL1CommitBlobSize(b *encoding.Batch) (uint64, error) { +func (o *DACodecV1) EstimateBatchL1CommitBlobSize(b *Batch) (uint64, error) { metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) var batchDataSize uint64 for _, c := range b.Chunks { @@ -334,10 +325,10 @@ func (o *DACodecV1) EstimateBatchL1CommitBlobSize(b *encoding.Batch) (uint64, er } batchDataSize += chunkDataSize } - return encoding.CalculatePaddedBlobSize(metadataSize + batchDataSize), nil + return CalculatePaddedBlobSize(metadataSize + batchDataSize), nil } -func (o *DACodecV1) chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { var dataSize uint64 for _, block := range c.Blocks { for _, tx := range block.Transactions { @@ -345,7 +336,7 @@ func (o *DACodecV1) chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) continue } - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, false /* no mock */) if err != nil { return 0, err } @@ -356,7 +347,7 @@ func (o *DACodecV1) chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV1) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { +func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -366,7 +357,7 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) } } - total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize + total += CalldataNonZeroByteGas * BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -375,22 +366,22 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV1) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { - return uint64(encoding.BlockContextByteSize * len(c.Blocks)), nil +func (o *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return uint64(BlockContextByteSize * len(c.Blocks)), nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV1) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { @@ -403,34 +394,34 @@ func (o *DACodecV1) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV1) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { +func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -444,8 +435,8 @@ func (o *DACodecV1) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) var totalL1CommitCalldataSize uint64 chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -453,14 +444,14 @@ func (o *DACodecV1) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) return 0, err } totalL1CommitCalldataSize += chunkL1CommitCalldataSize - totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas, nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { +func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -473,22 +464,22 @@ func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64 } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV1) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { +func (o *DACodecV1) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { return true, nil } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV1) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { +func (o *DACodecV1) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { return true, nil } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { +func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { return 0, 0, nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { +func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { return 0, 0, nil } @@ -499,7 +490,7 @@ func (o *DACodecV1) SetCompression(enable bool) {} // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV1) computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { +func (o *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2.go similarity index 71% rename from encoding/codecv2/codecv2.go rename to encoding/codecv2.go index 2fe7cae..fbf6a5f 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2.go @@ -1,4 +1,4 @@ -package codecv2 +package encoding import ( "crypto/sha256" @@ -16,29 +16,21 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" - "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/da-codec/encoding/zstd" ) type DACodecV2 struct{} -// init registers the DACodecV2 with the encoding package. -func init() { - encoding.RegisterCodec(encoding.CodecV2, func() encoding.Codec { - return &DACodecV2{} - }) -} - // Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv2MaxNumChunks = 45 // Version returns the codec version. -func (o *DACodecV2) Version() encoding.CodecVersion { - return encoding.CodecV2 +func (o *DACodecV2) Version() CodecVersion { + return CodecV2 } -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func (o *DACodecV2) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -56,7 +48,7 @@ func (o *DACodecV2) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := encoding.NewDABlockV0( + daBlock := NewDABlockV0( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -68,8 +60,8 @@ func (o *DACodecV2) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore return daBlock, nil } -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func (o *DACodecV2) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (o *DACodecV2) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { if len(chunk.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } @@ -78,7 +70,7 @@ func (o *DACodecV2) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []encoding.DABlock + var blocks []DABlock var txs [][]*types.TransactionData for _, block := range chunk.Blocks { @@ -91,7 +83,7 @@ func (o *DACodecV2) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore txs = append(txs, block.Transactions) } - daChunk := encoding.NewDAChunkV1( + daChunk := NewDAChunkV1( blocks, // blocks txs, // transactions ) @@ -101,23 +93,23 @@ func (o *DACodecV2) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore // DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. // Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func (o *DACodecV2) DecodeDAChunksRawTx(bytes [][]byte) ([]*encoding.DAChunkRawTx, error) { - var chunks []*encoding.DAChunkRawTx +func (o *DACodecV2) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { + var chunks []*DAChunkRawTx for _, chunk := range bytes { if len(chunk) < 1 { return nil, fmt.Errorf("invalid chunk, length is less than 1") } numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + if len(chunk) < 1+numBlocks*BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) } - blocks := make([]encoding.DABlock, numBlocks) + blocks := make([]DABlock, numBlocks) for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + encoding.BlockContextByteSize - blocks[i] = &encoding.DABlockV0{} + startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + BlockContextByteSize + blocks[i] = &DABlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err @@ -126,7 +118,7 @@ func (o *DACodecV2) DecodeDAChunksRawTx(bytes [][]byte) ([]*encoding.DAChunkRawT var transactions []types.Transactions - chunks = append(chunks, &encoding.DAChunkRawTx{ + chunks = append(chunks, &DAChunkRawTx{ Blocks: blocks, Transactions: transactions, // Transactions field is still empty in the phase of DecodeDAChunksRawTx, because txs moved to blobs and filled in DecodeTxsFromBlob method. }) @@ -135,19 +127,19 @@ func (o *DACodecV2) DecodeDAChunksRawTx(bytes [][]byte) ([]*encoding.DAChunkRawT } // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func (o *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*encoding.DAChunkRawTx) error { - compressedBytes := encoding.BytesFromBlobCanonical(blob) +func (o *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { + compressedBytes := BytesFromBlobCanonical(blob) magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - batchBytes, err := encoding.DecompressScrollBlobToBatch(append(magics, compressedBytes[:]...)) + batchBytes, err := DecompressScrollBlobToBatch(append(magics, compressedBytes[:]...)) if err != nil { return err } - return encoding.DecodeTxsFromBytes(batchBytes, chunks, Codecv2MaxNumChunks) + return DecodeTxsFromBytes(batchBytes, chunks, Codecv2MaxNumChunks) } -// NewDABatch creates a DABatch from the provided encoding.Batch. -func (o *DACodecV2) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { +// NewDABatch creates a DABatch from the provided Batch. +func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > Codecv2MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -164,7 +156,7 @@ func (o *DACodecV2) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -175,9 +167,9 @@ func (o *DACodecV2) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) return nil, err } - daBatch := encoding.NewDABatchV1( - uint8(encoding.CodecV2), // version - batch.Index, // batchIndex + daBatch := NewDABatchV1( + uint8(CodecV2), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped dataHash, // dataHash @@ -193,7 +185,7 @@ func (o *DACodecV2) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { +func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { daBatch, err := o.NewDABatch(batch) if err != nil { return nil, err @@ -207,7 +199,7 @@ func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Ba } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV2) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv2MaxNumChunks*4 @@ -236,7 +228,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData } // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -275,7 +267,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData // Only apply this check when the uncompressed batch data has exceeded 128 KiB. if !useMockTxData && len(batchBytes) > 131072 { // Check compressed data compatibility. - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -287,7 +279,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData } // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) + blob, err := MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -304,7 +296,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -317,16 +309,16 @@ func (o *DACodecV2) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func (o *DACodecV2) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { +func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 121 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } - if encoding.CodecVersion(data[0]) != encoding.CodecV2 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV2) + if CodecVersion(data[0]) != CodecV2 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV2) } - b := encoding.NewDABatchV1( + b := NewDABatchV1( data[0], // version binary.BigEndian.Uint64(data[1:9]), // batchIndex binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped @@ -343,8 +335,8 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv2MaxNumChunks) +func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) if err != nil { return 0, 0, err } @@ -352,12 +344,12 @@ func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) +func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) if err != nil { return 0, 0, err } @@ -365,13 +357,13 @@ func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv2MaxNumChunks) +func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) if err != nil { return false, err } @@ -383,7 +375,7 @@ func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bo if len(batchBytes) <= 131072 { return true, nil } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -392,8 +384,8 @@ func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bo // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) +func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) if err != nil { return false, err } @@ -405,7 +397,7 @@ func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bo if len(batchBytes) <= 131072 { return true, nil } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -413,12 +405,12 @@ func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bo } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV2) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { - return uint64(encoding.BlockContextByteSize * len(c.Blocks)), nil +func (o *DACodecV2) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return uint64(BlockContextByteSize * len(c.Blocks)), nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV2) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { +func (o *DACodecV2) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -431,7 +423,7 @@ func (o *DACodecV2) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64 } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV2) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { +func (o *DACodecV2) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -441,7 +433,7 @@ func (o *DACodecV2) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) } } - total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize + total += CalldataNonZeroByteGas * BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -450,17 +442,17 @@ func (o *DACodecV2) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV2) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV2) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { @@ -473,34 +465,34 @@ func (o *DACodecV2) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV2) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { +func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -514,8 +506,8 @@ func (o *DACodecV2) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) var totalL1CommitCalldataSize uint64 chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -523,7 +515,7 @@ func (o *DACodecV2) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) return 0, err } totalL1CommitCalldataSize += chunkL1CommitCalldataSize - totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas, nil @@ -536,7 +528,7 @@ func (o *DACodecV2) SetCompression(enable bool) {} // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV2) computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { +func (o *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3.go similarity index 74% rename from encoding/codecv3/codecv3.go rename to encoding/codecv3.go index 16d11b8..70fb756 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3.go @@ -1,4 +1,4 @@ -package codecv3 +package encoding import ( "crypto/sha256" @@ -17,29 +17,21 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" - "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/da-codec/encoding/zstd" ) type DACodecV3 struct{} -// init registers the DACodecV3 with the encoding package. -func init() { - encoding.RegisterCodec(encoding.CodecV3, func() encoding.Codec { - return &DACodecV3{} - }) -} - // Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv3MaxNumChunks = 45 // Version returns the codec version. -func (o *DACodecV3) Version() encoding.CodecVersion { - return encoding.CodecV3 +func (o *DACodecV3) Version() CodecVersion { + return CodecV3 } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV3) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { +func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -57,7 +49,7 @@ func (o *DACodecV3) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := encoding.NewDABlockV0( + daBlock := NewDABlockV0( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -70,7 +62,7 @@ func (o *DACodecV3) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV3) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { +func (o *DACodecV3) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { if len(chunk.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } @@ -79,7 +71,7 @@ func (o *DACodecV3) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []encoding.DABlock + var blocks []DABlock var txs [][]*types.TransactionData for _, block := range chunk.Blocks { @@ -92,7 +84,7 @@ func (o *DACodecV3) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore txs = append(txs, block.Transactions) } - daChunk := encoding.NewDAChunkV1( + daChunk := NewDAChunkV1( blocks, // blocks txs, // transactions ) @@ -102,23 +94,23 @@ func (o *DACodecV3) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore // DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. // Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func (o *DACodecV3) DecodeDAChunksRawTx(bytes [][]byte) ([]*encoding.DAChunkRawTx, error) { - var chunks []*encoding.DAChunkRawTx +func (o *DACodecV3) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { + var chunks []*DAChunkRawTx for _, chunk := range bytes { if len(chunk) < 1 { return nil, fmt.Errorf("invalid chunk, length is less than 1") } numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + if len(chunk) < 1+numBlocks*BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) } - blocks := make([]encoding.DABlock, numBlocks) + blocks := make([]DABlock, numBlocks) for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + encoding.BlockContextByteSize - blocks[i] = &encoding.DABlockV0{} + startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + BlockContextByteSize + blocks[i] = &DABlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err @@ -127,7 +119,7 @@ func (o *DACodecV3) DecodeDAChunksRawTx(bytes [][]byte) ([]*encoding.DAChunkRawT var transactions []types.Transactions - chunks = append(chunks, &encoding.DAChunkRawTx{ + chunks = append(chunks, &DAChunkRawTx{ Blocks: blocks, Transactions: transactions, // Transactions field is still empty in the phase of DecodeDAChunksRawTx, because txs moved to blobs and filled in DecodeTxsFromBlob method. }) @@ -136,19 +128,19 @@ func (o *DACodecV3) DecodeDAChunksRawTx(bytes [][]byte) ([]*encoding.DAChunkRawT } // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func (o *DACodecV3) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*encoding.DAChunkRawTx) error { - compressedBytes := encoding.BytesFromBlobCanonical(blob) +func (o *DACodecV3) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { + compressedBytes := BytesFromBlobCanonical(blob) magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - batchBytes, err := encoding.DecompressScrollBlobToBatch(append(magics, compressedBytes[:]...)) + batchBytes, err := DecompressScrollBlobToBatch(append(magics, compressedBytes[:]...)) if err != nil { return err } - return encoding.DecodeTxsFromBytes(batchBytes, chunks, Codecv3MaxNumChunks) + return DecodeTxsFromBytes(batchBytes, chunks, Codecv3MaxNumChunks) } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV3) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { +func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > Codecv3MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -169,7 +161,7 @@ func (o *DACodecV3) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -183,9 +175,9 @@ func (o *DACodecV3) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - return encoding.NewDABatchV2( - uint8(encoding.CodecV3), // version - batch.Index, // batchIndex + return NewDABatchV2( + uint8(CodecV3), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped lastBlock.Header.Time, // lastBlockTimestamp @@ -201,7 +193,7 @@ func (o *DACodecV3) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { +func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { daBatch, err := o.NewDABatch(batch) if err != nil { return nil, err @@ -215,7 +207,7 @@ func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Ba } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV3) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv3MaxNumChunks*4 @@ -244,7 +236,7 @@ func (o *DACodecV3) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData } // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -283,7 +275,7 @@ func (o *DACodecV3) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData // Only apply this check when the uncompressed batch data has exceeded 128 KiB. if !useMockTxData && len(batchBytes) > 131072 { // Check compressed data compatibility. - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -295,7 +287,7 @@ func (o *DACodecV3) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData } // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) + blob, err := MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -312,7 +304,7 @@ func (o *DACodecV3) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -325,16 +317,16 @@ func (o *DACodecV3) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. -func (o *DACodecV3) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { +func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) != 193 { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } - if encoding.CodecVersion(data[0]) != encoding.CodecV3 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV3) + if CodecVersion(data[0]) != CodecV3 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV3) } - b := encoding.NewDABatchV2WithProof( + b := NewDABatchV2WithProof( data[0], // Version binary.BigEndian.Uint64(data[1:9]), // BatchIndex binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped @@ -357,8 +349,8 @@ func (o *DACodecV3) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv3MaxNumChunks) +func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv3MaxNumChunks) if err != nil { return 0, 0, err } @@ -366,12 +358,12 @@ func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) +func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) if err != nil { return 0, 0, err } @@ -379,12 +371,12 @@ func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv3MaxNumChunks) +func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv3MaxNumChunks) if err != nil { return false, err } @@ -396,7 +388,7 @@ func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bo if len(batchBytes) <= 131072 { return true, nil } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -404,8 +396,8 @@ func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bo } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) +func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) if err != nil { return false, err } @@ -417,7 +409,7 @@ func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bo if len(batchBytes) <= 131072 { return true, nil } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -425,12 +417,12 @@ func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bo } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV3) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { - return uint64(encoding.BlockContextByteSize * len(c.Blocks)), nil +func (o *DACodecV3) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return uint64(BlockContextByteSize * len(c.Blocks)), nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV3) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { +func (o *DACodecV3) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -443,7 +435,7 @@ func (o *DACodecV3) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64 } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV3) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { +func (o *DACodecV3) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -453,7 +445,7 @@ func (o *DACodecV3) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) } } - total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize + total += CalldataNonZeroByteGas * BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -462,17 +454,17 @@ func (o *DACodecV3) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV3) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { @@ -485,10 +477,10 @@ func (o *DACodecV3) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. @@ -496,26 +488,26 @@ func (o *DACodecV3) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV3) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { +func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -529,8 +521,8 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) var totalL1CommitCalldataSize uint64 chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -538,7 +530,7 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) return 0, err } totalL1CommitCalldataSize += chunkL1CommitCalldataSize - totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) } totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. @@ -553,7 +545,7 @@ func (o *DACodecV3) SetCompression(enable bool) {} // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV3) computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { +func (o *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4.go similarity index 74% rename from encoding/codecv4/codecv4.go rename to encoding/codecv4.go index 07c7ce9..799ca50 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4.go @@ -1,4 +1,4 @@ -package codecv4 +package encoding import ( "crypto/sha256" @@ -18,7 +18,6 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" - "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/da-codec/encoding/zstd" ) @@ -26,23 +25,16 @@ type DACodecV4 struct { enableCompress uint32 } -// init registers the DACodecV4 with the encoding package. -func init() { - encoding.RegisterCodec(encoding.CodecV4, func() encoding.Codec { - return &DACodecV4{} - }) -} - // Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv4MaxNumChunks = 45 // Version returns the codec version. -func (o *DACodecV4) Version() encoding.CodecVersion { - return encoding.CodecV4 +func (o *DACodecV4) Version() CodecVersion { + return CodecV4 } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV4) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { +func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -60,7 +52,7 @@ func (o *DACodecV4) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := encoding.NewDABlockV0( + daBlock := NewDABlockV0( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -73,7 +65,7 @@ func (o *DACodecV4) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV4) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { +func (o *DACodecV4) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { if len(chunk.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } @@ -82,7 +74,7 @@ func (o *DACodecV4) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []encoding.DABlock + var blocks []DABlock var txs [][]*types.TransactionData for _, block := range chunk.Blocks { @@ -95,7 +87,7 @@ func (o *DACodecV4) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore txs = append(txs, block.Transactions) } - daChunk := encoding.NewDAChunkV1( + daChunk := NewDAChunkV1( blocks, // blocks txs, // transactions ) @@ -105,23 +97,23 @@ func (o *DACodecV4) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore // DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. // Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func (o *DACodecV4) DecodeDAChunksRawTx(bytes [][]byte) ([]*encoding.DAChunkRawTx, error) { - var chunks []*encoding.DAChunkRawTx +func (o *DACodecV4) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { + var chunks []*DAChunkRawTx for _, chunk := range bytes { if len(chunk) < 1 { return nil, fmt.Errorf("invalid chunk, length is less than 1") } numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + if len(chunk) < 1+numBlocks*BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) } - blocks := make([]encoding.DABlock, numBlocks) + blocks := make([]DABlock, numBlocks) for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + encoding.BlockContextByteSize - blocks[i] = &encoding.DABlockV0{} + startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + BlockContextByteSize + blocks[i] = &DABlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err @@ -130,7 +122,7 @@ func (o *DACodecV4) DecodeDAChunksRawTx(bytes [][]byte) ([]*encoding.DAChunkRawT var transactions []types.Transactions - chunks = append(chunks, &encoding.DAChunkRawTx{ + chunks = append(chunks, &DAChunkRawTx{ Blocks: blocks, Transactions: transactions, // Transactions field is still empty in the phase of DecodeDAChunksRawTx, because txs moved to blobs and filled in DecodeTxsFromBlob method. }) @@ -139,24 +131,24 @@ func (o *DACodecV4) DecodeDAChunksRawTx(bytes [][]byte) ([]*encoding.DAChunkRawT } // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func (o *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*encoding.DAChunkRawTx) error { - rawBytes := encoding.BytesFromBlobCanonical(blob) +func (o *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { + rawBytes := BytesFromBlobCanonical(blob) // if first byte is 1 - data compressed, 0 - not compressed if rawBytes[0] == 0x1 { magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - batchBytes, err := encoding.DecompressScrollBlobToBatch(append(magics, rawBytes[1:]...)) + batchBytes, err := DecompressScrollBlobToBatch(append(magics, rawBytes[1:]...)) if err != nil { return err } - return encoding.DecodeTxsFromBytes(batchBytes, chunks, Codecv4MaxNumChunks) + return DecodeTxsFromBytes(batchBytes, chunks, Codecv4MaxNumChunks) } else { - return encoding.DecodeTxsFromBytes(rawBytes[1:], chunks, Codecv4MaxNumChunks) + return DecodeTxsFromBytes(rawBytes[1:], chunks, Codecv4MaxNumChunks) } } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV4) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { +func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > Codecv4MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -177,7 +169,7 @@ func (o *DACodecV4) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -191,9 +183,9 @@ func (o *DACodecV4) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - return encoding.NewDABatchV2( - uint8(encoding.CodecV4), // version - batch.Index, // batchIndex + return NewDABatchV2( + uint8(CodecV4), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped lastBlock.Header.Time, // lastBlockTimestamp @@ -209,7 +201,7 @@ func (o *DACodecV4) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { +func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { o.SetCompression(true) daBatch, err := o.NewDABatch(batch) if err != nil || !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { @@ -228,7 +220,7 @@ func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Ba } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV4) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv4MaxNumChunks*4 @@ -257,7 +249,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData } // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -297,7 +289,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData } if !useMockTxData { // Check compressed data compatibility. - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -313,7 +305,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData } // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) + blob, err := MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -330,7 +322,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -343,16 +335,16 @@ func (o *DACodecV4) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func (o *DACodecV4) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { +func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) != 193 { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } - if encoding.CodecVersion(data[0]) != encoding.CodecV4 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV4) + if CodecVersion(data[0]) != CodecV4 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV4) } - b := encoding.NewDABatchV2WithProof( + b := NewDABatchV2WithProof( data[0], // Version binary.BigEndian.Uint64(data[1:9]), // BatchIndex binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped @@ -375,8 +367,8 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv4MaxNumChunks) +func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) if err != nil { return 0, 0, err } @@ -390,12 +382,12 @@ func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) +func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) if err != nil { return 0, 0, err } @@ -409,12 +401,12 @@ func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv4MaxNumChunks) +func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) if err != nil { return false, err } @@ -422,7 +414,7 @@ func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bo if err != nil { return false, err } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -430,8 +422,8 @@ func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bo } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) +func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) if err != nil { return false, err } @@ -439,7 +431,7 @@ func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bo if err != nil { return false, err } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -447,12 +439,12 @@ func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bo } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV4) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { - return uint64(encoding.BlockContextByteSize * len(c.Blocks)), nil +func (o *DACodecV4) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return uint64(BlockContextByteSize * len(c.Blocks)), nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV4) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { +func (o *DACodecV4) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -465,7 +457,7 @@ func (o *DACodecV4) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64 } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV4) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { +func (o *DACodecV4) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -475,7 +467,7 @@ func (o *DACodecV4) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) } } - total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize + total += CalldataNonZeroByteGas * BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -484,17 +476,17 @@ func (o *DACodecV4) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV4) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV4) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { @@ -507,10 +499,10 @@ func (o *DACodecV4) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. @@ -518,26 +510,26 @@ func (o *DACodecV4) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV4) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { +func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -551,8 +543,8 @@ func (o *DACodecV4) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) var totalL1CommitCalldataSize uint64 chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -560,7 +552,7 @@ func (o *DACodecV4) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) return 0, err } totalL1CommitCalldataSize += chunkL1CommitCalldataSize - totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) } totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. @@ -586,7 +578,7 @@ func (o *DACodecV4) SetCompression(enable bool) { // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV4) computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { +func (o *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 02b6e2c..8d21461 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -72,42 +72,35 @@ const ( CodecV4 ) -// MyCodecGen is a map that stores codec generator functions for each version. -var MyCodecGen = make(map[CodecVersion]func() Codec) - -// RegisterCodec registers a codec generator function for a specific version. -func RegisterCodec(version CodecVersion, codecGenFunc func() Codec) { - MyCodecGen[version] = codecGenFunc -} - -// getCodec retrieves a Codec instance for the specified version. -// It returns an error if the version is not supported. -func getCodec(version CodecVersion) (Codec, error) { - codecGen, ok := MyCodecGen[version] - if !ok { - return nil, fmt.Errorf("unsupported codec version: %d", version) - } - return codecGen(), nil -} - // CodecFromVersion returns the appropriate codec for the given version. func CodecFromVersion(version CodecVersion) (Codec, error) { - return getCodec(version) + switch version { + case CodecV0: + return &DACodecV0{}, nil + case CodecV1: + return &DACodecV1{}, nil + case CodecV2: + return &DACodecV2{}, nil + case CodecV3: + return &DACodecV3{}, nil + case CodecV4: + return &DACodecV4{}, nil + default: + return nil, fmt.Errorf("unsupported codec version: %v", version) + } } // CodecFromConfig determines and returns the appropriate codec based on chain configuration, block number, and timestamp. -func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) (Codec, error) { - var version CodecVersion +func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { if chainCfg.IsDarwinV2(startBlockTimestamp) { - version = CodecV4 + return &DACodecV4{} } else if chainCfg.IsDarwin(startBlockTimestamp) { - version = CodecV3 + return &DACodecV3{} } else if chainCfg.IsCurie(startBlockNumber) { - version = CodecV2 + return &DACodecV2{} } else if chainCfg.IsBernoulli(startBlockNumber) { - version = CodecV1 + return &DACodecV1{} } else { - version = CodecV0 + return &DACodecV0{} } - return getCodec(version) } From e6c8965dc0d708973dcf64e56e2cb5073b3fb86a Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sun, 6 Oct 2024 00:14:15 +0800 Subject: [PATCH 049/126] add common functions --- encoding/da.go | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/encoding/da.go b/encoding/da.go index 7e54800..7b6a1fa 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -12,6 +12,7 @@ import ( "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/params" ) // BLSModulus is the BLS modulus defined in EIP-4844. @@ -559,3 +560,48 @@ func DecodeTxsFromBytes(blobBytes []byte, chunks []*DAChunkRawTx, maxNumChunks i } return nil } + +// GetHardforkName returns the name of the hardfork active at the given block height and timestamp. +func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uint64) string { + if !config.IsBernoulli(new(big.Int).SetUint64(blockHeight)) { + return "homestead" + } else if !config.IsCurie(new(big.Int).SetUint64(blockHeight)) { + return "bernoulli" + } else if !config.IsDarwin(blockTimestamp) { + return "curie" + } else if !config.IsDarwinV2(blockTimestamp) { + return "darwin" + } else { + return "darwinV2" + } +} + +// GetCodecVersion returns the encoding codec version for the given block height and timestamp. +func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uint64) CodecVersion { + if !config.IsBernoulli(new(big.Int).SetUint64(blockHeight)) { + return CodecV0 + } else if !config.IsCurie(new(big.Int).SetUint64(blockHeight)) { + return CodecV1 + } else if !config.IsDarwin(blockTimestamp) { + return CodecV2 + } else if !config.IsDarwinV2(blockTimestamp) { + return CodecV3 + } else { + return CodecV4 + } +} + +// GetMaxChunksPerBatch returns the maximum number of chunks allowed per batch for the given block height and timestamp. +func GetMaxChunksPerBatch(config *params.ChainConfig, blockHeight, blockTimestamp uint64) uint64 { + if !config.IsBernoulli(new(big.Int).SetUint64(blockHeight)) { + return 15 + } else if !config.IsCurie(new(big.Int).SetUint64(blockHeight)) { + return 15 + } else if !config.IsDarwin(blockTimestamp) { + return 45 + } else if !config.IsDarwinV2(blockTimestamp) { + return 45 + } else { + return 45 + } +} From ba853e3447d6e5da5bb9be17569b0d5ee2ec735e Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sun, 6 Oct 2024 00:58:08 +0800 Subject: [PATCH 050/126] add EstimateBlockL1CommitCalldataSize --- encoding/codecv1.go | 5 +++++ encoding/codecv2.go | 5 +++++ encoding/codecv3.go | 5 +++++ encoding/codecv4.go | 5 +++++ encoding/interfaces.go | 1 + 5 files changed, 21 insertions(+) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index f7f70ef..c203525 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -375,6 +375,11 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { return total, nil } +// EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. +func (o *DACodecV1) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { + return BlockContextByteSize, nil +} + // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. func (o *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { return uint64(BlockContextByteSize * len(c.Blocks)), nil diff --git a/encoding/codecv2.go b/encoding/codecv2.go index fbf6a5f..3b0c149 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -404,6 +404,11 @@ func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error return true, nil } +// EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. +func (o *DACodecV2) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { + return BlockContextByteSize, nil +} + // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. func (o *DACodecV2) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { return uint64(BlockContextByteSize * len(c.Blocks)), nil diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 70fb756..1af557f 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -416,6 +416,11 @@ func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error return true, nil } +// EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. +func (o *DACodecV3) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { + return BlockContextByteSize, nil +} + // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. func (o *DACodecV3) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { return uint64(BlockContextByteSize * len(c.Blocks)), nil diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 799ca50..6b5a91f 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -438,6 +438,11 @@ func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error return true, nil } +// EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. +func (o *DACodecV4) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { + return BlockContextByteSize, nil +} + // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. func (o *DACodecV4) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { return uint64(BlockContextByteSize * len(c.Blocks)), nil diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 8d21461..1a59ad9 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -52,6 +52,7 @@ type Codec interface { EstimateBatchL1CommitBatchSizeAndBlobSize(*Batch) (uint64, uint64, error) CheckChunkCompressedDataCompatibility(*Chunk) (bool, error) CheckBatchCompressedDataCompatibility(*Batch) (bool, error) + EstimateBlockL1CommitCalldataSize(*Block) (uint64, error) EstimateChunkL1CommitCalldataSize(*Chunk) (uint64, error) EstimateChunkL1CommitGas(*Chunk) (uint64, error) EstimateBatchL1CommitGas(*Batch) (uint64, error) From aca0bef2163802a7e028e669d6c46782b67ff0c8 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sun, 6 Oct 2024 01:20:14 +0800 Subject: [PATCH 051/126] add dabatch interfaces --- encoding/dabatch.go | 30 ++++++++++++++++++++++++++++++ encoding/interfaces.go | 2 ++ 2 files changed, 32 insertions(+) diff --git a/encoding/dabatch.go b/encoding/dabatch.go index d23690e..b145a68 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -75,6 +75,16 @@ func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { return nil, nil } +// Version returns the version of the DABatch. +func (b *DABatchV0) Version() uint8 { + return b.version +} + +// SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. +func (b *DABatchV0) SkippedL1MessageBitmap() []byte { + return b.skippedL1MessageBitmap +} + // DABatchV1 contains metadata about a batch of DAChunks. type DABatchV1 struct { DABatchV0 @@ -164,6 +174,16 @@ func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { return nil, nil } +// Version returns the version of the DABatch. +func (b *DABatchV1) Version() uint8 { + return b.version +} + +// SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. +func (b *DABatchV1) SkippedL1MessageBitmap() []byte { + return b.skippedL1MessageBitmap +} + // DABatchV2 contains metadata about a batch of DAChunks. type DABatchV2 struct { DABatchV0 @@ -355,3 +375,13 @@ func (b *DABatchV2) MarshalJSON() ([]byte, error) { }, }) } + +// Version returns the version of the DABatch. +func (b *DABatchV2) Version() uint8 { + return b.version +} + +// SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. +func (b *DABatchV2) SkippedL1MessageBitmap() []byte { + return b.skippedL1MessageBitmap +} diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 1a59ad9..53af758 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -33,6 +33,8 @@ type DABatch interface { Blob() *kzg4844.Blob BlobBytes() []byte BlobVersionedHashes() []common.Hash + Version() uint8 + SkippedL1MessageBitmap() []byte } // Codec represents the interface for encoding and decoding DA-related structures. From 95d2bc79b7e8fe084fbe5571e41830d3b6636dcf Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 7 Oct 2024 03:23:42 +0800 Subject: [PATCH 052/126] update interface implementations --- encoding/codecv1.go | 44 ++++++++++++++++++-------------------------- 1 file changed, 18 insertions(+), 26 deletions(-) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index c203525..9c45cf8 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -304,30 +304,6 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// EstimateChunkL1CommitBlobSize estimates the size of the L1 commit blob for a single chunk. -func (o *DACodecV1) EstimateChunkL1CommitBlobSize(c *Chunk) (uint64, error) { - metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) // over-estimate: adding metadata length - chunkDataSize, err := o.chunkL1CommitBlobDataSize(c) - if err != nil { - return 0, err - } - return CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil -} - -// EstimateBatchL1CommitBlobSize estimates the total size of the L1 commit blob for a batch. -func (o *DACodecV1) EstimateBatchL1CommitBlobSize(b *Batch) (uint64, error) { - metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) - var batchDataSize uint64 - for _, c := range b.Chunks { - chunkDataSize, err := o.chunkL1CommitBlobDataSize(c) - if err != nil { - return 0, err - } - batchDataSize += chunkDataSize - } - return CalculatePaddedBlobSize(metadataSize + batchDataSize), nil -} - func (o *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { var dataSize uint64 for _, block := range c.Blocks { @@ -480,12 +456,28 @@ func (o *DACodecV1) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - return 0, 0, nil + metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) + batchDataSize, err := o.chunkL1CommitBlobDataSize(c) + if err != nil { + return 0, 0, err + } + blobSize := CalculatePaddedBlobSize(metadataSize + batchDataSize) + return blobSize, blobSize, nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - return 0, 0, nil + metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) + var batchDataSize uint64 + for _, c := range b.Chunks { + chunkDataSize, err := o.chunkL1CommitBlobDataSize(c) + if err != nil { + return 0, 0, err + } + batchDataSize += chunkDataSize + } + blobSize := CalculatePaddedBlobSize(metadataSize + batchDataSize) + return blobSize, blobSize, nil } // SetCompression enables or disables compression. From 75812c4c0ba14957a0fcfd66629a9562a3a437c5 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 7 Oct 2024 04:09:38 +0800 Subject: [PATCH 053/126] add data hash --- encoding/dabatch.go | 15 +++++++++++++++ encoding/interfaces.go | 1 + 2 files changed, 16 insertions(+) diff --git a/encoding/dabatch.go b/encoding/dabatch.go index b145a68..2ad38fa 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -85,6 +85,11 @@ func (b *DABatchV0) SkippedL1MessageBitmap() []byte { return b.skippedL1MessageBitmap } +// DataHash returns the data hash of the DABatch. +func (b *DABatchV0) DataHash() common.Hash { + return b.dataHash +} + // DABatchV1 contains metadata about a batch of DAChunks. type DABatchV1 struct { DABatchV0 @@ -184,6 +189,11 @@ func (b *DABatchV1) SkippedL1MessageBitmap() []byte { return b.skippedL1MessageBitmap } +// DataHash returns the data hash of the DABatch. +func (b *DABatchV1) DataHash() common.Hash { + return b.dataHash +} + // DABatchV2 contains metadata about a batch of DAChunks. type DABatchV2 struct { DABatchV0 @@ -385,3 +395,8 @@ func (b *DABatchV2) Version() uint8 { func (b *DABatchV2) SkippedL1MessageBitmap() []byte { return b.skippedL1MessageBitmap } + +// DataHash returns the data hash of the DABatch. +func (b *DABatchV2) DataHash() common.Hash { + return b.dataHash +} diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 53af758..8e43fd8 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -29,6 +29,7 @@ type DAChunk interface { type DABatch interface { Encode() []byte Hash() common.Hash + DataHash() common.Hash BlobDataProofForPointEvaluation() ([]byte, error) Blob() *kzg4844.Blob BlobBytes() []byte From 78588e478cd47f599db08c34a0781ccc0c89ae22 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 7 Oct 2024 17:52:58 +0800 Subject: [PATCH 054/126] fix codecv3 & codecv4 estimate gas --- encoding/codecv3.go | 16 +++++++++++++--- encoding/codecv4.go | 16 +++++++++++++--- 2 files changed, 26 insertions(+), 6 deletions(-) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 1af557f..6a60305 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -468,8 +468,8 @@ func (o *DACodecV3) EstimateBlockL1CommitGas(b *Block) (uint64, error) { return total, nil } -// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { +// estimateChunkL1CommitGasWithoutPointEvaluation calculates the total L1 commit gas without point-evaluation for this chunk approximately. +func (o *DACodecV3) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { @@ -492,6 +492,16 @@ func (o *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { return totalL1CommitGas, nil } +// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. +func (o *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + totalL1CommitGas, err := o.estimateChunkL1CommitGasWithoutPointEvaluation(c) + if err != nil { + return 0, err + } + totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. + return totalL1CommitGas, nil +} + // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 @@ -530,7 +540,7 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) var totalL1CommitCalldataSize uint64 - chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := o.estimateChunkL1CommitGasWithoutPointEvaluation(chunk) if err != nil { return 0, err } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 6b5a91f..ee3d38b 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -490,8 +490,8 @@ func (o *DACodecV4) EstimateBlockL1CommitGas(b *Block) (uint64, error) { return total, nil } -// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV4) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { +// estimateChunkL1CommitGasWithoutPointEvaluation calculates the total L1 commit gas without point-evaluation for this chunk approximately. +func (o *DACodecV4) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { @@ -514,6 +514,16 @@ func (o *DACodecV4) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { return totalL1CommitGas, nil } +// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. +func (o *DACodecV4) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + totalL1CommitGas, err := o.estimateChunkL1CommitGasWithoutPointEvaluation(c) + if err != nil { + return 0, err + } + totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. + return totalL1CommitGas, nil +} + // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 @@ -552,7 +562,7 @@ func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) var totalL1CommitCalldataSize uint64 - chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := o.estimateChunkL1CommitGasWithoutPointEvaluation(chunk) if err != nil { return 0, err } From 45548e1d35fe07ffc24e86aacd8d92000fcf740b Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 7 Oct 2024 20:43:39 +0800 Subject: [PATCH 055/126] fix --- encoding/codecv3.go | 8 ++------ encoding/codecv4.go | 8 ++------ 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 6a60305..de7c389 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -482,13 +482,10 @@ func (o *DACodecV3) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (ui } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash - totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. - return totalL1CommitGas, nil } @@ -549,7 +546,6 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { } totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. - return totalL1CommitGas, nil } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index ee3d38b..32ba2e4 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -504,13 +504,10 @@ func (o *DACodecV4) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (ui } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash - totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. - return totalL1CommitGas, nil } @@ -571,7 +568,6 @@ func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { } totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. - return totalL1CommitGas, nil } From 4469219611eb2ddee83a22cc32c37b8dd67369dd Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 7 Oct 2024 21:27:51 +0800 Subject: [PATCH 056/126] fix bugs --- encoding/codecv1.go | 4 +--- encoding/codecv2.go | 4 +--- encoding/codecv3.go | 8 +++----- encoding/codecv4.go | 8 +++----- 4 files changed, 8 insertions(+), 16 deletions(-) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 9c45cf8..86d5241 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -419,13 +419,11 @@ func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - var totalL1CommitCalldataSize uint64 chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitCalldataSize += chunkL1CommitCalldataSize - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += GetMemoryExpansionCost(chunkL1CommitCalldataSize) } return totalL1CommitGas, nil diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 3b0c149..af686ce 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -514,13 +514,11 @@ func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - var totalL1CommitCalldataSize uint64 chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitCalldataSize += chunkL1CommitCalldataSize - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += GetMemoryExpansionCost(chunkL1CommitCalldataSize) } return totalL1CommitGas, nil diff --git a/encoding/codecv3.go b/encoding/codecv3.go index de7c389..2de23aa 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -524,7 +524,7 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore for _, chunk := range b.Chunks { - chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + chunkL1CommitGas, err := o.estimateChunkL1CommitGasWithoutPointEvaluation(chunk) if err != nil { return 0, err } @@ -536,13 +536,11 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - var totalL1CommitCalldataSize uint64 - chunkL1CommitCalldataSize, err := o.estimateChunkL1CommitGasWithoutPointEvaluation(chunk) + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitCalldataSize += chunkL1CommitCalldataSize - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += GetMemoryExpansionCost(chunkL1CommitCalldataSize) } totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 32ba2e4..2954814 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -546,7 +546,7 @@ func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore for _, chunk := range b.Chunks { - chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + chunkL1CommitGas, err := o.estimateChunkL1CommitGasWithoutPointEvaluation(chunk) if err != nil { return 0, err } @@ -558,13 +558,11 @@ func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - var totalL1CommitCalldataSize uint64 - chunkL1CommitCalldataSize, err := o.estimateChunkL1CommitGasWithoutPointEvaluation(chunk) + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitCalldataSize += chunkL1CommitCalldataSize - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += GetMemoryExpansionCost(chunkL1CommitCalldataSize) } totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. From 6934f3dd22744976375d74aa1d83ddc5f46918e9 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 7 Oct 2024 21:44:01 +0800 Subject: [PATCH 057/126] tweak --- encoding/codecv0.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index c207819..4b670b9 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -356,11 +356,11 @@ func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - totalL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += GetMemoryExpansionCost(chunkL1CommitCalldataSize) } return totalL1CommitGas, nil From 0079225045ccfbff30822131a886fda350a404d2 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 7 Oct 2024 22:45:55 +0800 Subject: [PATCH 058/126] add CheckChunkCompressedDataCompatibility & CheckBatchCompressedDataCompatibility --- encoding/da.go | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/encoding/da.go b/encoding/da.go index 7b6a1fa..1b6b047 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -605,3 +605,21 @@ func GetMaxChunksPerBatch(config *params.ChainConfig, blockHeight, blockTimestam return 45 } } + +// CheckChunkCompressedDataCompatibility checks compressed data compatibility of a batch built by a single chunk. +func CheckChunkCompressedDataCompatibility(chunk *Chunk, codecVersion CodecVersion) (bool, error) { + codec, err := CodecFromVersion(codecVersion) + if err != nil { + return false, fmt.Errorf("failed to get codec from version: %w", err) + } + return codec.CheckChunkCompressedDataCompatibility(chunk) +} + +// CheckBatchCompressedDataCompatibility checks compressed data compatibility of a batch built by a single chunk. +func CheckBatchCompressedDataCompatibility(batch *Batch, codecVersion CodecVersion) (bool, error) { + codec, err := CodecFromVersion(codecVersion) + if err != nil { + return false, fmt.Errorf("failed to get codec from version: %w", err) + } + return codec.CheckBatchCompressedDataCompatibility(batch) +} From 3f774d2c32fbde4cacd6f77fedd9c950c5511a7f Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 7 Oct 2024 23:07:35 +0800 Subject: [PATCH 059/126] fix --- encoding/codecv0.go | 2 +- encoding/codecv2.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 4b670b9..4a87738 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -402,7 +402,7 @@ func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, // SetCompression enables or disables compression. func (o *DACodecV0) SetCompression(enable bool) {} -// JSONFromBytes for CodecV1 returns empty values. +// JSONFromBytes for CodecV0 returns empty values. func (c *DACodecV0) JSONFromBytes(data []byte) ([]byte, error) { // DACodecV0 doesn't need this, so just return empty values return nil, nil diff --git a/encoding/codecv2.go b/encoding/codecv2.go index af686ce..dd8a1b8 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -552,7 +552,7 @@ func (o *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe return dataHash, nil } -// JSONFromBytes for CodecV1 returns empty values. +// JSONFromBytes for CodecV2 returns empty values. func (c *DACodecV2) JSONFromBytes(data []byte) ([]byte, error) { // DACodecV2 doesn't need this, so just return empty values return nil, nil From 669b454fad33f927f6d1af4085937cce9364edd5 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 8 Oct 2024 01:25:57 +0800 Subject: [PATCH 060/126] add BlobDataProofForPointEvaluation --- encoding/dabatch.go | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/encoding/dabatch.go b/encoding/dabatch.go index 2ad38fa..cb0ec8d 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -176,7 +176,24 @@ func (b *DABatchV1) BlobBytes() []byte { // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { - return nil, nil + if b.blob == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, errors.New("failed to create blob commitment") + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + return BlobDataProofFromValues(*b.z, y, commitment, proof), nil } // Version returns the version of the DABatch. From 09127a535cb720d0bbdc597d102bb3af37cc8fb2 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 11 Oct 2024 01:52:14 +0800 Subject: [PATCH 061/126] add nil check in NewDAChunk --- encoding/codecv1.go | 4 ++++ encoding/codecv2.go | 4 ++++ encoding/codecv3.go | 4 ++++ encoding/codecv4.go | 4 ++++ 4 files changed, 16 insertions(+) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 86d5241..a5c03fc 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -58,6 +58,10 @@ func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + if chunk == nil { + return nil, errors.New("chunk is nil") + } + if len(chunk.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index dd8a1b8..858c1eb 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -62,6 +62,10 @@ func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. func (o *DACodecV2) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + if chunk == nil { + return nil, errors.New("chunk is nil") + } + if len(chunk.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 2de23aa..c0cc3d6 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -63,6 +63,10 @@ func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. func (o *DACodecV3) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + if chunk == nil { + return nil, errors.New("chunk is nil") + } + if len(chunk.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 2954814..6bcaa35 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -66,6 +66,10 @@ func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. func (o *DACodecV4) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + if chunk == nil { + return nil, errors.New("chunk is nil") + } + if len(chunk.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } From 1c519d6feadffd059342566e5232db0f440079a8 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 11 Oct 2024 15:07:03 +0800 Subject: [PATCH 062/126] make some util functions internal --- encoding/codecv0.go | 20 ++++++++++---------- encoding/codecv1.go | 18 +++++++++--------- encoding/codecv2.go | 26 +++++++++++++------------- encoding/codecv3.go | 26 +++++++++++++------------- encoding/codecv4.go | 26 +++++++++++++------------- encoding/da.go | 22 +++++++++++----------- 6 files changed, 69 insertions(+), 69 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 4a87738..0842e4c 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -237,7 +237,7 @@ func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) continue } size += 4 // 4 bytes payload length - txPayloadLength, err := GetTxPayloadLength(txData) + txPayloadLength, err := getTxPayloadLength(txData) if err != nil { return 0, err } @@ -257,13 +257,13 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { continue } - txPayloadLength, err := GetTxPayloadLength(txData) + txPayloadLength, err := getTxPayloadLength(txData) if err != nil { return 0, err } total += CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero total += CalldataNonZeroByteGas * 4 // 4 bytes payload length - total += GetKeccak256Gas(txPayloadLength) // l2 tx hash + total += getKeccak256Gas(txPayloadLength) // l2 tx hash } total += CalldataNonZeroByteGas * BlockContextByteSize @@ -275,11 +275,11 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += getMemoryExpansionCost(36) * numL1Messages // staticcall to proxy total += 100 * numL1Messages // read admin in proxy total += 100 * numL1Messages // read impl in proxy total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += getMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } @@ -315,7 +315,7 @@ func (o *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * BlockContextByteSize // numBlocks of BlockContext in chunk - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash + totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil } @@ -335,11 +335,11 @@ func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -354,13 +354,13 @@ func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += GetMemoryExpansionCost(chunkL1CommitCalldataSize) + totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) } return totalL1CommitGas, nil diff --git a/encoding/codecv1.go b/encoding/codecv1.go index a5c03fc..35db016 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -346,11 +346,11 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += getMemoryExpansionCost(36) * numL1Messages // staticcall to proxy total += 100 * numL1Messages // read admin in proxy total += 100 * numL1Messages // read impl in proxy total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += getMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } @@ -382,7 +382,7 @@ func (o *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas, nil } @@ -402,11 +402,11 @@ func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -421,13 +421,13 @@ func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += GetMemoryExpansionCost(chunkL1CommitCalldataSize) + totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) } return totalL1CommitGas, nil @@ -463,7 +463,7 @@ func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, if err != nil { return 0, 0, err } - blobSize := CalculatePaddedBlobSize(metadataSize + batchDataSize) + blobSize := calculatePaddedBlobSize(metadataSize + batchDataSize) return blobSize, blobSize, nil } @@ -478,7 +478,7 @@ func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, } batchDataSize += chunkDataSize } - blobSize := CalculatePaddedBlobSize(metadataSize + batchDataSize) + blobSize := calculatePaddedBlobSize(metadataSize + batchDataSize) return blobSize, blobSize, nil } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 858c1eb..0bf14b5 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -340,7 +340,7 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) if err != nil { return 0, 0, err } @@ -348,12 +348,12 @@ func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), calculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) if err != nil { return 0, 0, err } @@ -361,13 +361,13 @@ func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), calculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) if err != nil { return false, err } @@ -389,7 +389,7 @@ func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) if err != nil { return false, err } @@ -451,11 +451,11 @@ func (o *DACodecV2) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += getMemoryExpansionCost(36) * numL1Messages // staticcall to proxy total += 100 * numL1Messages // read admin in proxy total += 100 * numL1Messages // read impl in proxy total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += getMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } @@ -477,7 +477,7 @@ func (o *DACodecV2) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas, nil } @@ -497,11 +497,11 @@ func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -516,13 +516,13 @@ func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += GetMemoryExpansionCost(chunkL1CommitCalldataSize) + totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) } return totalL1CommitGas, nil diff --git a/encoding/codecv3.go b/encoding/codecv3.go index c0cc3d6..eac9c28 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -354,7 +354,7 @@ func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv3MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, Codecv3MaxNumChunks) if err != nil { return 0, 0, err } @@ -362,12 +362,12 @@ func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), calculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) if err != nil { return 0, 0, err } @@ -375,12 +375,12 @@ func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), calculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv3MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, Codecv3MaxNumChunks) if err != nil { return false, err } @@ -401,7 +401,7 @@ func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) if err != nil { return false, err } @@ -463,11 +463,11 @@ func (o *DACodecV3) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += getMemoryExpansionCost(36) * numL1Messages // staticcall to proxy total += 100 * numL1Messages // read admin in proxy total += 100 * numL1Messages // read impl in proxy total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += getMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } @@ -488,7 +488,7 @@ func (o *DACodecV3) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (ui numBlocks := uint64(len(c.Blocks)) totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas, nil } @@ -519,11 +519,11 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -538,13 +538,13 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += GetMemoryExpansionCost(chunkL1CommitCalldataSize) + totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) } totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 6bcaa35..fee3b76 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -372,7 +372,7 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) if err != nil { return 0, 0, err } @@ -386,12 +386,12 @@ func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), calculatePaddedBlobSize(blobBytesLength), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) if err != nil { return 0, 0, err } @@ -405,12 +405,12 @@ func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), calculatePaddedBlobSize(blobBytesLength), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) if err != nil { return false, err } @@ -427,7 +427,7 @@ func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) if err != nil { return false, err } @@ -485,11 +485,11 @@ func (o *DACodecV4) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += getMemoryExpansionCost(36) * numL1Messages // staticcall to proxy total += 100 * numL1Messages // read admin in proxy total += 100 * numL1Messages // read impl in proxy total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += getMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } @@ -510,7 +510,7 @@ func (o *DACodecV4) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (ui numBlocks := uint64(len(c.Blocks)) totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas, nil } @@ -541,11 +541,11 @@ func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -560,13 +560,13 @@ func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += GetMemoryExpansionCost(chunkL1CommitCalldataSize) + totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) } totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. diff --git a/encoding/da.go b/encoding/da.go index 1b6b047..93a3332 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -385,9 +385,9 @@ func DecompressScrollBlobToBatch(compressedBytes []byte) ([]byte, error) { return res, nil } -// CalculatePaddedBlobSize calculates the required size on blob storage +// calculatePaddedBlobSize calculates the required size on blob storage // where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. -func CalculatePaddedBlobSize(dataSize uint64) uint64 { +func calculatePaddedBlobSize(dataSize uint64) uint64 { paddedSize := (dataSize / 31) * 32 if dataSize%31 != 0 { @@ -397,9 +397,9 @@ func CalculatePaddedBlobSize(dataSize uint64) uint64 { return paddedSize } -// ConstructBatchPayloadInBlob constructs the batch payload. +// constructBatchPayloadInBlob constructs the batch payload. // This function is only used in compressed batch payload length estimation. -func ConstructBatchPayloadInBlob(chunks []*Chunk, MaxNumChunks uint64) ([]byte, error) { +func constructBatchPayloadInBlob(chunks []*Chunk, MaxNumChunks uint64) ([]byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -436,20 +436,20 @@ func ConstructBatchPayloadInBlob(chunks []*Chunk, MaxNumChunks uint64) ([]byte, return batchBytes, nil } -// GetKeccak256Gas calculates the gas cost for computing the keccak256 hash of a given size. -func GetKeccak256Gas(size uint64) uint64 { - return GetMemoryExpansionCost(size) + 30 + 6*((size+31)/32) +// getKeccak256Gas calculates the gas cost for computing the keccak256 hash of a given size. +func getKeccak256Gas(size uint64) uint64 { + return getMemoryExpansionCost(size) + 30 + 6*((size+31)/32) } -// GetMemoryExpansionCost calculates the cost of memory expansion for a given memoryByteSize. -func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { +// getMemoryExpansionCost calculates the cost of memory expansion for a given memoryByteSize. +func getMemoryExpansionCost(memoryByteSize uint64) uint64 { memorySizeWord := (memoryByteSize + 31) / 32 memoryCost := (memorySizeWord*memorySizeWord)/512 + (3 * memorySizeWord) return memoryCost } -// GetTxPayloadLength calculates the length of the transaction payload. -func GetTxPayloadLength(txData *types.TransactionData) (uint64, error) { +// getTxPayloadLength calculates the length of the transaction payload. +func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) if err != nil { return 0, err From d2350ffcd5444389902274f46ed0f670abad8039 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 11 Oct 2024 15:29:06 +0800 Subject: [PATCH 063/126] remove GetMaxChunksPerBatch --- encoding/codecv0.go | 8 ++++++++ encoding/codecv1.go | 5 +++++ encoding/codecv2.go | 5 +++++ encoding/codecv3.go | 5 +++++ encoding/codecv4.go | 5 +++++ encoding/da.go | 15 --------------- encoding/interfaces.go | 1 + 7 files changed, 29 insertions(+), 15 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 0842e4c..40d8f34 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -15,11 +15,19 @@ import ( type DACodecV0 struct{} +// Codecv0MaxNumChunks is the maximum number of chunks that a batch can contain. +const Codecv0MaxNumChunks = 15 + // Version returns the codec version. func (o *DACodecV0) Version() CodecVersion { return CodecV0 } +// MaxNumChunksPerBatch returns the maximum number of chunks per batch. +func (o *DACodecV0) MaxNumChunksPerBatch() uint64 { + return Codecv0MaxNumChunks +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 35db016..7efa304 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -25,6 +25,11 @@ func (o *DACodecV1) Version() CodecVersion { return CodecV1 } +// MaxNumChunksPerBatch returns the maximum number of chunks per batch. +func (o *DACodecV1) MaxNumChunksPerBatch() uint64 { + return Codecv1MaxNumChunks +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 0bf14b5..50a2186 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -29,6 +29,11 @@ func (o *DACodecV2) Version() CodecVersion { return CodecV2 } +// MaxNumChunksPerBatch returns the maximum number of chunks per batch. +func (o *DACodecV2) MaxNumChunksPerBatch() uint64 { + return Codecv2MaxNumChunks +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { diff --git a/encoding/codecv3.go b/encoding/codecv3.go index eac9c28..2f5081e 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -30,6 +30,11 @@ func (o *DACodecV3) Version() CodecVersion { return CodecV3 } +// MaxNumChunksPerBatch returns the maximum number of chunks per batch. +func (o *DACodecV3) MaxNumChunksPerBatch() uint64 { + return Codecv3MaxNumChunks +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { diff --git a/encoding/codecv4.go b/encoding/codecv4.go index fee3b76..ff9bdfc 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -33,6 +33,11 @@ func (o *DACodecV4) Version() CodecVersion { return CodecV4 } +// MaxNumChunksPerBatch returns the maximum number of chunks per batch. +func (o *DACodecV4) MaxNumChunksPerBatch() uint64 { + return Codecv4MaxNumChunks +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { diff --git a/encoding/da.go b/encoding/da.go index 93a3332..0a239b1 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -591,21 +591,6 @@ func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uin } } -// GetMaxChunksPerBatch returns the maximum number of chunks allowed per batch for the given block height and timestamp. -func GetMaxChunksPerBatch(config *params.ChainConfig, blockHeight, blockTimestamp uint64) uint64 { - if !config.IsBernoulli(new(big.Int).SetUint64(blockHeight)) { - return 15 - } else if !config.IsCurie(new(big.Int).SetUint64(blockHeight)) { - return 15 - } else if !config.IsDarwin(blockTimestamp) { - return 45 - } else if !config.IsDarwinV2(blockTimestamp) { - return 45 - } else { - return 45 - } -} - // CheckChunkCompressedDataCompatibility checks compressed data compatibility of a batch built by a single chunk. func CheckChunkCompressedDataCompatibility(chunk *Chunk, codecVersion CodecVersion) (bool, error) { codec, err := CodecFromVersion(codecVersion) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 8e43fd8..1ac75c4 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -41,6 +41,7 @@ type DABatch interface { // Codec represents the interface for encoding and decoding DA-related structures. type Codec interface { Version() CodecVersion + MaxNumChunksPerBatch() uint64 NewDABlock(*Block, uint64) (DABlock, error) NewDAChunk(*Chunk, uint64) (DAChunk, error) From b885af8bf214f59696d548cfca98d04bc85a65bd Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 11 Oct 2024 15:39:03 +0800 Subject: [PATCH 064/126] fix CI --- encoding/codecv0.go | 6 +++--- encoding/codecv1.go | 24 ++++++++++++------------ encoding/codecv2.go | 28 ++++++++++++++-------------- encoding/codecv3.go | 28 ++++++++++++++-------------- encoding/codecv4.go | 30 +++++++++++++++--------------- encoding/da.go | 4 ++-- 6 files changed, 60 insertions(+), 60 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 40d8f34..edd06c0 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -15,8 +15,8 @@ import ( type DACodecV0 struct{} -// Codecv0MaxNumChunks is the maximum number of chunks that a batch can contain. -const Codecv0MaxNumChunks = 15 +// codecv0MaxNumChunks is the maximum number of chunks that a batch can contain. +const codecv0MaxNumChunks = 15 // Version returns the codec version. func (o *DACodecV0) Version() CodecVersion { @@ -25,7 +25,7 @@ func (o *DACodecV0) Version() CodecVersion { // MaxNumChunksPerBatch returns the maximum number of chunks per batch. func (o *DACodecV0) MaxNumChunksPerBatch() uint64 { - return Codecv0MaxNumChunks + return codecv0MaxNumChunks } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 7efa304..ae0ed1c 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -17,8 +17,8 @@ import ( type DACodecV1 struct{} -// Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. -const Codecv1MaxNumChunks = 15 +// codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. +const codecv1MaxNumChunks = 15 // Version returns the codec version. func (o *DACodecV1) Version() CodecVersion { @@ -27,7 +27,7 @@ func (o *DACodecV1) Version() CodecVersion { // MaxNumChunksPerBatch returns the maximum number of chunks per batch. func (o *DACodecV1) MaxNumChunksPerBatch() uint64 { - return Codecv1MaxNumChunks + return codecv1MaxNumChunks } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. @@ -134,13 +134,13 @@ func (o *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks func (o *DACodecV1) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { batchBytes := BytesFromBlobCanonical(blob) - return DecodeTxsFromBytes(batchBytes[:], chunks, Codecv1MaxNumChunks) + return DecodeTxsFromBytes(batchBytes[:], chunks, codecv1MaxNumChunks) } // NewDABatch creates a DABatch from the provided Batch. func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > Codecv1MaxNumChunks { + if len(batch.Chunks) > codecv1MaxNumChunks { return nil, errors.New("too many chunks in batch") } @@ -200,14 +200,14 @@ func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + Codecv1MaxNumChunks*4 + metadataLength := 2 + codecv1MaxNumChunks*4 // the raw (un-padded) blob payload blobBytes := make([]byte, metadataLength) // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+Codecv1MaxNumChunks+1)*32) + challengePreimage := make([]byte, (1+codecv1MaxNumChunks+1)*32) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -245,10 +245,10 @@ func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than Codecv1MaxNumChunks chunks, the rest + // if we have fewer than codecv1MaxNumChunks chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < Codecv1MaxNumChunks; chunkID++ { + for chunkID := len(chunks); chunkID < codecv1MaxNumChunks; chunkID++ { // use the last chunk's data hash as padding copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } @@ -271,7 +271,7 @@ func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+Codecv1MaxNumChunks)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+codecv1MaxNumChunks)*32:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) @@ -463,7 +463,7 @@ func (o *DACodecV1) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) + metadataSize := uint64(2 + 4*codecv1MaxNumChunks) batchDataSize, err := o.chunkL1CommitBlobDataSize(c) if err != nil { return 0, 0, err @@ -474,7 +474,7 @@ func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) + metadataSize := uint64(2 + 4*codecv1MaxNumChunks) var batchDataSize uint64 for _, c := range b.Chunks { chunkDataSize, err := o.chunkL1CommitBlobDataSize(c) diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 50a2186..f493504 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -21,8 +21,8 @@ import ( type DACodecV2 struct{} -// Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. -const Codecv2MaxNumChunks = 45 +// codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. +const codecv2MaxNumChunks = 45 // Version returns the codec version. func (o *DACodecV2) Version() CodecVersion { @@ -31,7 +31,7 @@ func (o *DACodecV2) Version() CodecVersion { // MaxNumChunksPerBatch returns the maximum number of chunks per batch. func (o *DACodecV2) MaxNumChunksPerBatch() uint64 { - return Codecv2MaxNumChunks + return codecv2MaxNumChunks } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. @@ -144,13 +144,13 @@ func (o *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx if err != nil { return err } - return DecodeTxsFromBytes(batchBytes, chunks, Codecv2MaxNumChunks) + return DecodeTxsFromBytes(batchBytes, chunks, codecv2MaxNumChunks) } // NewDABatch creates a DABatch from the provided Batch. func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > Codecv2MaxNumChunks { + if len(batch.Chunks) > codecv2MaxNumChunks { return nil, errors.New("too many chunks in batch") } @@ -210,14 +210,14 @@ func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + Codecv2MaxNumChunks*4 + metadataLength := 2 + codecv2MaxNumChunks*4 // batchBytes represents the raw (un-compressed and un-padded) blob payload batchBytes := make([]byte, metadataLength) // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+Codecv2MaxNumChunks+1)*32) + challengePreimage := make([]byte, (1+codecv2MaxNumChunks+1)*32) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -255,10 +255,10 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than Codecv2MaxNumChunks chunks, the rest + // if we have fewer than codecv2MaxNumChunks chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < Codecv2MaxNumChunks; chunkID++ { + for chunkID := len(chunks); chunkID < codecv2MaxNumChunks; chunkID++ { // use the last chunk's data hash as padding copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } @@ -301,7 +301,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+Codecv2MaxNumChunks)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+codecv2MaxNumChunks)*32:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) @@ -345,7 +345,7 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, o) if err != nil { return 0, 0, err } @@ -358,7 +358,7 @@ func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, o) if err != nil { return 0, 0, err } @@ -372,7 +372,7 @@ func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, o) if err != nil { return false, err } @@ -394,7 +394,7 @@ func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, o) if err != nil { return false, err } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 2f5081e..0430f9d 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -22,8 +22,8 @@ import ( type DACodecV3 struct{} -// Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. -const Codecv3MaxNumChunks = 45 +// codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. +const codecv3MaxNumChunks = 45 // Version returns the codec version. func (o *DACodecV3) Version() CodecVersion { @@ -32,7 +32,7 @@ func (o *DACodecV3) Version() CodecVersion { // MaxNumChunksPerBatch returns the maximum number of chunks per batch. func (o *DACodecV3) MaxNumChunksPerBatch() uint64 { - return Codecv3MaxNumChunks + return codecv3MaxNumChunks } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. @@ -145,13 +145,13 @@ func (o *DACodecV3) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx if err != nil { return err } - return DecodeTxsFromBytes(batchBytes, chunks, Codecv3MaxNumChunks) + return DecodeTxsFromBytes(batchBytes, chunks, codecv3MaxNumChunks) } // NewDABatch creates a DABatch from the provided Batch. func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > Codecv3MaxNumChunks { + if len(batch.Chunks) > codecv3MaxNumChunks { return nil, errors.New("too many chunks in batch") } @@ -218,14 +218,14 @@ func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + Codecv3MaxNumChunks*4 + metadataLength := 2 + codecv3MaxNumChunks*4 // batchBytes represents the raw (un-compressed and un-padded) blob payload batchBytes := make([]byte, metadataLength) // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+Codecv3MaxNumChunks+1)*32) + challengePreimage := make([]byte, (1+codecv3MaxNumChunks+1)*32) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -263,10 +263,10 @@ func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than Codecv2MaxNumChunks chunks, the rest + // if we have fewer than codecv3MaxNumChunks chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < Codecv3MaxNumChunks; chunkID++ { + for chunkID := len(chunks); chunkID < codecv3MaxNumChunks; chunkID++ { // use the last chunk's data hash as padding copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } @@ -309,7 +309,7 @@ func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+Codecv3MaxNumChunks)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+codecv3MaxNumChunks)*32:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) @@ -359,7 +359,7 @@ func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, Codecv3MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, o) if err != nil { return 0, 0, err } @@ -372,7 +372,7 @@ func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, o) if err != nil { return 0, 0, err } @@ -385,7 +385,7 @@ func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, Codecv3MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, o) if err != nil { return false, err } @@ -406,7 +406,7 @@ func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, o) if err != nil { return false, err } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index ff9bdfc..2b5f2b5 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -25,8 +25,8 @@ type DACodecV4 struct { enableCompress uint32 } -// Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. -const Codecv4MaxNumChunks = 45 +// codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. +const codecv4MaxNumChunks = 45 // Version returns the codec version. func (o *DACodecV4) Version() CodecVersion { @@ -35,7 +35,7 @@ func (o *DACodecV4) Version() CodecVersion { // MaxNumChunksPerBatch returns the maximum number of chunks per batch. func (o *DACodecV4) MaxNumChunksPerBatch() uint64 { - return Codecv4MaxNumChunks + return codecv4MaxNumChunks } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. @@ -150,16 +150,16 @@ func (o *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx if err != nil { return err } - return DecodeTxsFromBytes(batchBytes, chunks, Codecv4MaxNumChunks) + return DecodeTxsFromBytes(batchBytes, chunks, codecv4MaxNumChunks) } else { - return DecodeTxsFromBytes(rawBytes[1:], chunks, Codecv4MaxNumChunks) + return DecodeTxsFromBytes(rawBytes[1:], chunks, codecv4MaxNumChunks) } } // NewDABatch creates a DABatch from the provided Batch. func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > Codecv4MaxNumChunks { + if len(batch.Chunks) > codecv4MaxNumChunks { return nil, errors.New("too many chunks in batch") } @@ -231,14 +231,14 @@ func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + Codecv4MaxNumChunks*4 + metadataLength := 2 + codecv4MaxNumChunks*4 // batchBytes represents the raw (un-compressed and un-padded) blob payload batchBytes := make([]byte, metadataLength) // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+Codecv4MaxNumChunks+1)*32) + challengePreimage := make([]byte, (1+codecv4MaxNumChunks+1)*32) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -276,10 +276,10 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than Codecv4MaxNumChunks chunks, the rest + // if we have fewer than codecv4MaxNumChunks chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < Codecv4MaxNumChunks; chunkID++ { + for chunkID := len(chunks); chunkID < codecv4MaxNumChunks; chunkID++ { // use the last chunk's data hash as padding copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } @@ -327,7 +327,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+Codecv4MaxNumChunks)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+codecv4MaxNumChunks)*32:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) @@ -377,7 +377,7 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, o) if err != nil { return 0, 0, err } @@ -396,7 +396,7 @@ func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, o) if err != nil { return 0, 0, err } @@ -415,7 +415,7 @@ func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, o) if err != nil { return false, err } @@ -432,7 +432,7 @@ func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, o) if err != nil { return false, err } diff --git a/encoding/da.go b/encoding/da.go index 0a239b1..538de67 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -399,9 +399,9 @@ func calculatePaddedBlobSize(dataSize uint64) uint64 { // constructBatchPayloadInBlob constructs the batch payload. // This function is only used in compressed batch payload length estimation. -func constructBatchPayloadInBlob(chunks []*Chunk, MaxNumChunks uint64) ([]byte, error) { +func constructBatchPayloadInBlob(chunks []*Chunk, codec Codec) ([]byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 + metadataLength := 2 + codec.MaxNumChunksPerBatch()*4 // batchBytes represents the raw (un-compressed and un-padded) blob payload batchBytes := make([]byte, metadataLength) From 986850d727f596b38a8accaf5f313d2f41e8e601 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 11 Oct 2024 15:46:04 +0800 Subject: [PATCH 065/126] change receiver mark from o to d --- encoding/codecv0.go | 56 +++++++++++++------------- encoding/codecv1.go | 68 +++++++++++++++---------------- encoding/codecv2.go | 70 ++++++++++++++++---------------- encoding/codecv3.go | 78 ++++++++++++++++++------------------ encoding/codecv4.go | 98 ++++++++++++++++++++++----------------------- 5 files changed, 185 insertions(+), 185 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index edd06c0..54c98cc 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -19,17 +19,17 @@ type DACodecV0 struct{} const codecv0MaxNumChunks = 15 // Version returns the codec version. -func (o *DACodecV0) Version() CodecVersion { +func (d *DACodecV0) Version() CodecVersion { return CodecV0 } // MaxNumChunksPerBatch returns the maximum number of chunks per batch. -func (o *DACodecV0) MaxNumChunksPerBatch() uint64 { +func (d *DACodecV0) MaxNumChunksPerBatch() uint64 { return codecv0MaxNumChunks } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (d *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -60,7 +60,7 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { +func (d *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { var blocks []DABlock var txs [][]*types.TransactionData @@ -77,7 +77,7 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) } for _, block := range chunk.Blocks { - b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) + b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -95,7 +95,7 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) } // DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. -func (o *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) { +func (d *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) { var chunks []*DAChunkRawTx for _, chunk := range chunkBytes { if len(chunk) < 1 { @@ -153,19 +153,19 @@ func (o *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, e } // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func (o *DACodecV0) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { +func (d *DACodecV0) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { return nil } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { +func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // compute batch data hash var dataBytes []byte totalL1MessagePoppedBeforeChunk := batch.TotalL1MessagePoppedBefore for _, chunk := range batch.Chunks { // build data hash - daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + daChunk, err := d.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) if err != nil { return nil, err } @@ -201,8 +201,8 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { - daBatch, err := o.NewDABatch(batch) +func (d *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + daBatch, err := d.NewDABatch(batch) if err != nil { return nil, err } @@ -215,7 +215,7 @@ func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // NewDABatchFromBytes decodes the given byte slice into a DABatch. -func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (d *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 89 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 89 bytes but got %d", len(data)) } @@ -238,7 +238,7 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { } // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { +func (d *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { var size uint64 for _, txData := range b.Transactions { if txData.Type == types.L1MessageTxType { @@ -256,7 +256,7 @@ func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { +func (d *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -293,10 +293,10 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { +func (d *DACodecV0) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, block := range c.Blocks { - blockL1CommitCalldataSize, err := o.EstimateBlockL1CommitCalldataSize(block) + blockL1CommitCalldataSize, err := d.EstimateBlockL1CommitCalldataSize(block) if err != nil { return 0, err } @@ -306,12 +306,12 @@ func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { +func (d *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalTxNum uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { totalTxNum += uint64(len(block.Transactions)) - blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) if err != nil { return 0, err } @@ -328,7 +328,7 @@ func (o *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { +func (d *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs @@ -352,7 +352,7 @@ func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore for _, chunk := range b.Chunks { - chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + chunkL1CommitGas, err := d.EstimateChunkL1CommitGas(chunk) if err != nil { return 0, err } @@ -364,7 +364,7 @@ func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } @@ -375,10 +375,10 @@ func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { +func (d *DACodecV0) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { - chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } @@ -388,27 +388,27 @@ func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV0) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { +func (d *DACodecV0) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { return true, nil } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV0) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { +func (d *DACodecV0) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { return true, nil } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV0) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { +func (d *DACodecV0) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { return 0, 0, nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { +func (d *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { return 0, 0, nil } // SetCompression enables or disables compression. -func (o *DACodecV0) SetCompression(enable bool) {} +func (d *DACodecV0) SetCompression(enable bool) {} // JSONFromBytes for CodecV0 returns empty values. func (c *DACodecV0) JSONFromBytes(data []byte) ([]byte, error) { diff --git a/encoding/codecv1.go b/encoding/codecv1.go index ae0ed1c..a9b4925 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -21,17 +21,17 @@ type DACodecV1 struct{} const codecv1MaxNumChunks = 15 // Version returns the codec version. -func (o *DACodecV1) Version() CodecVersion { +func (d *DACodecV1) Version() CodecVersion { return CodecV1 } // MaxNumChunksPerBatch returns the maximum number of chunks per batch. -func (o *DACodecV1) MaxNumChunksPerBatch() uint64 { +func (d *DACodecV1) MaxNumChunksPerBatch() uint64 { return codecv1MaxNumChunks } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (d *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -62,7 +62,7 @@ func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { +func (d *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { if chunk == nil { return nil, errors.New("chunk is nil") } @@ -79,7 +79,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) var txs [][]*types.TransactionData for _, block := range chunk.Blocks { - b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) + b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -98,7 +98,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) // DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. // Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func (o *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { +func (d *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { var chunks []*DAChunkRawTx for _, chunk := range bytes { if len(chunk) < 1 { @@ -132,13 +132,13 @@ func (o *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) } // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func (o *DACodecV1) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { +func (d *DACodecV1) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { batchBytes := BytesFromBlobCanonical(blob) return DecodeTxsFromBytes(batchBytes[:], chunks, codecv1MaxNumChunks) } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { +func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > codecv1MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -149,7 +149,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -161,7 +161,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, err := d.constructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -184,8 +184,8 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { - daBatch, err := o.NewDABatch(batch) +func (d *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + daBatch, err := d.NewDABatch(batch) if err != nil { return nil, err } @@ -198,7 +198,7 @@ func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + codecv1MaxNumChunks*4 @@ -288,7 +288,7 @@ func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (d *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 121 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } @@ -313,7 +313,7 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -func (o *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { +func (d *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { var dataSize uint64 for _, block := range c.Blocks { for _, tx := range block.Transactions { @@ -332,7 +332,7 @@ func (o *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { +func (d *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -361,22 +361,22 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { } // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func (o *DACodecV1) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { +func (d *DACodecV1) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { return BlockContextByteSize, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { +func (d *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { return uint64(BlockContextByteSize * len(c.Blocks)), nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { +func (d *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() - blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) if err != nil { return 0, err } @@ -392,7 +392,7 @@ func (o *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { +func (d *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs @@ -416,7 +416,7 @@ func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore for _, chunk := range b.Chunks { - chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + chunkL1CommitGas, err := d.EstimateChunkL1CommitGas(chunk) if err != nil { return 0, err } @@ -428,7 +428,7 @@ func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } @@ -439,10 +439,10 @@ func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { +func (d *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { - chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } @@ -452,19 +452,19 @@ func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV1) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { +func (d *DACodecV1) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { return true, nil } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV1) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { +func (d *DACodecV1) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { return true, nil } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { +func (d *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { metadataSize := uint64(2 + 4*codecv1MaxNumChunks) - batchDataSize, err := o.chunkL1CommitBlobDataSize(c) + batchDataSize, err := d.chunkL1CommitBlobDataSize(c) if err != nil { return 0, 0, err } @@ -473,11 +473,11 @@ func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { +func (d *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { metadataSize := uint64(2 + 4*codecv1MaxNumChunks) var batchDataSize uint64 for _, c := range b.Chunks { - chunkDataSize, err := o.chunkL1CommitBlobDataSize(c) + chunkDataSize, err := d.chunkL1CommitBlobDataSize(c) if err != nil { return 0, 0, err } @@ -488,18 +488,18 @@ func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, } // SetCompression enables or disables compression. -func (o *DACodecV1) SetCompression(enable bool) {} +func (d *DACodecV1) SetCompression(enable bool) {} // computeBatchDataHash computes the data hash of the batch. // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { +func (d *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore for _, chunk := range chunks { - daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + daChunk, err := d.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) if err != nil { return common.Hash{}, err } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index f493504..8b318a4 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -25,17 +25,17 @@ type DACodecV2 struct{} const codecv2MaxNumChunks = 45 // Version returns the codec version. -func (o *DACodecV2) Version() CodecVersion { +func (d *DACodecV2) Version() CodecVersion { return CodecV2 } // MaxNumChunksPerBatch returns the maximum number of chunks per batch. -func (o *DACodecV2) MaxNumChunksPerBatch() uint64 { +func (d *DACodecV2) MaxNumChunksPerBatch() uint64 { return codecv2MaxNumChunks } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (d *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -66,7 +66,7 @@ func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV2) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { +func (d *DACodecV2) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { if chunk == nil { return nil, errors.New("chunk is nil") } @@ -83,7 +83,7 @@ func (o *DACodecV2) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) var txs [][]*types.TransactionData for _, block := range chunk.Blocks { - b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) + b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -102,7 +102,7 @@ func (o *DACodecV2) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) // DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. // Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func (o *DACodecV2) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { +func (d *DACodecV2) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { var chunks []*DAChunkRawTx for _, chunk := range bytes { if len(chunk) < 1 { @@ -136,7 +136,7 @@ func (o *DACodecV2) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) } // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func (o *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { +func (d *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { compressedBytes := BytesFromBlobCanonical(blob) magics := []byte{0x28, 0xb5, 0x2f, 0xfd} @@ -148,7 +148,7 @@ func (o *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { +func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > codecv2MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -159,7 +159,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -171,7 +171,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, _, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, _, err := d.constructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -194,8 +194,8 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { - daBatch, err := o.NewDABatch(batch) +func (d *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + daBatch, err := d.NewDABatch(batch) if err != nil { return nil, err } @@ -208,7 +208,7 @@ func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + codecv2MaxNumChunks*4 @@ -318,7 +318,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (d *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 121 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } @@ -344,8 +344,8 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, o) +func (d *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) if err != nil { return 0, 0, err } @@ -357,8 +357,8 @@ func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, o) +func (d *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) if err != nil { return 0, 0, err } @@ -371,8 +371,8 @@ func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, o) +func (d *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) if err != nil { return false, err } @@ -393,8 +393,8 @@ func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, o) +func (d *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) if err != nil { return false, err } @@ -414,20 +414,20 @@ func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error } // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func (o *DACodecV2) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { +func (d *DACodecV2) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { return BlockContextByteSize, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV2) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { +func (d *DACodecV2) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { return uint64(BlockContextByteSize * len(c.Blocks)), nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV2) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { +func (d *DACodecV2) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { - chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } @@ -437,7 +437,7 @@ func (o *DACodecV2) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV2) EstimateBlockL1CommitGas(b *Block) (uint64, error) { +func (d *DACodecV2) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -466,12 +466,12 @@ func (o *DACodecV2) EstimateBlockL1CommitGas(b *Block) (uint64, error) { } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV2) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { +func (d *DACodecV2) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() - blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) if err != nil { return 0, err } @@ -487,7 +487,7 @@ func (o *DACodecV2) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { +func (d *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs @@ -511,7 +511,7 @@ func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore for _, chunk := range b.Chunks { - chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + chunkL1CommitGas, err := d.EstimateChunkL1CommitGas(chunk) if err != nil { return 0, err } @@ -523,7 +523,7 @@ func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } @@ -534,18 +534,18 @@ func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { } // SetCompression enables or disables compression. -func (o *DACodecV2) SetCompression(enable bool) {} +func (d *DACodecV2) SetCompression(enable bool) {} // computeBatchDataHash computes the data hash of the batch. // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { +func (d *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore for _, chunk := range chunks { - daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + daChunk, err := d.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) if err != nil { return common.Hash{}, err } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 0430f9d..8367fc9 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -26,17 +26,17 @@ type DACodecV3 struct{} const codecv3MaxNumChunks = 45 // Version returns the codec version. -func (o *DACodecV3) Version() CodecVersion { +func (d *DACodecV3) Version() CodecVersion { return CodecV3 } // MaxNumChunksPerBatch returns the maximum number of chunks per batch. -func (o *DACodecV3) MaxNumChunksPerBatch() uint64 { +func (d *DACodecV3) MaxNumChunksPerBatch() uint64 { return codecv3MaxNumChunks } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (d *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -67,7 +67,7 @@ func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV3) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { +func (d *DACodecV3) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { if chunk == nil { return nil, errors.New("chunk is nil") } @@ -84,7 +84,7 @@ func (o *DACodecV3) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) var txs [][]*types.TransactionData for _, block := range chunk.Blocks { - b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) + b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -103,7 +103,7 @@ func (o *DACodecV3) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) // DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. // Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func (o *DACodecV3) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { +func (d *DACodecV3) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { var chunks []*DAChunkRawTx for _, chunk := range bytes { if len(chunk) < 1 { @@ -137,7 +137,7 @@ func (o *DACodecV3) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) } // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func (o *DACodecV3) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { +func (d *DACodecV3) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { compressedBytes := BytesFromBlobCanonical(blob) magics := []byte{0x28, 0xb5, 0x2f, 0xfd} @@ -149,7 +149,7 @@ func (o *DACodecV3) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { +func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > codecv3MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -164,7 +164,7 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -176,7 +176,7 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -202,8 +202,8 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { - daBatch, err := o.NewDABatch(batch) +func (d *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + daBatch, err := d.NewDABatch(batch) if err != nil { return nil, err } @@ -216,7 +216,7 @@ func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (d *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + codecv3MaxNumChunks*4 @@ -326,7 +326,7 @@ func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. -func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) != 193 { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } @@ -358,8 +358,8 @@ func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, o) +func (d *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) if err != nil { return 0, 0, err } @@ -371,8 +371,8 @@ func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, o) +func (d *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) if err != nil { return 0, 0, err } @@ -384,8 +384,8 @@ func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, o) +func (d *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) if err != nil { return false, err } @@ -405,8 +405,8 @@ func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, o) +func (d *DACodecV3) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) if err != nil { return false, err } @@ -426,20 +426,20 @@ func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error } // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func (o *DACodecV3) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { +func (d *DACodecV3) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { return BlockContextByteSize, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV3) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { +func (d *DACodecV3) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { return uint64(BlockContextByteSize * len(c.Blocks)), nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV3) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { +func (d *DACodecV3) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { - chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } @@ -449,7 +449,7 @@ func (o *DACodecV3) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV3) EstimateBlockL1CommitGas(b *Block) (uint64, error) { +func (d *DACodecV3) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -478,12 +478,12 @@ func (o *DACodecV3) EstimateBlockL1CommitGas(b *Block) (uint64, error) { } // estimateChunkL1CommitGasWithoutPointEvaluation calculates the total L1 commit gas without point-evaluation for this chunk approximately. -func (o *DACodecV3) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (uint64, error) { +func (d *DACodecV3) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() - blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) if err != nil { return 0, err } @@ -499,8 +499,8 @@ func (o *DACodecV3) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (ui } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - totalL1CommitGas, err := o.estimateChunkL1CommitGasWithoutPointEvaluation(c) +func (d *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + totalL1CommitGas, err := d.estimateChunkL1CommitGasWithoutPointEvaluation(c) if err != nil { return 0, err } @@ -509,7 +509,7 @@ func (o *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { +func (d *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs @@ -533,7 +533,7 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore for _, chunk := range b.Chunks { - chunkL1CommitGas, err := o.estimateChunkL1CommitGasWithoutPointEvaluation(chunk) + chunkL1CommitGas, err := d.estimateChunkL1CommitGasWithoutPointEvaluation(chunk) if err != nil { return 0, err } @@ -545,7 +545,7 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } @@ -557,18 +557,18 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { } // SetCompression enables or disables compression. -func (o *DACodecV3) SetCompression(enable bool) {} +func (d *DACodecV3) SetCompression(enable bool) {} // computeBatchDataHash computes the data hash of the batch. // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { +func (d *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore for _, chunk := range chunks { - daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + daChunk, err := d.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) if err != nil { return common.Hash{}, err } @@ -585,8 +585,8 @@ func (o *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe } // JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. -func (o *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { - batch, err := o.NewDABatchFromBytes(data) +func (d *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { + batch, err := d.NewDABatchFromBytes(data) if err != nil { return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 2b5f2b5..8442e40 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -29,17 +29,17 @@ type DACodecV4 struct { const codecv4MaxNumChunks = 45 // Version returns the codec version. -func (o *DACodecV4) Version() CodecVersion { +func (d *DACodecV4) Version() CodecVersion { return CodecV4 } // MaxNumChunksPerBatch returns the maximum number of chunks per batch. -func (o *DACodecV4) MaxNumChunksPerBatch() uint64 { +func (d *DACodecV4) MaxNumChunksPerBatch() uint64 { return codecv4MaxNumChunks } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (d *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -70,7 +70,7 @@ func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV4) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { +func (d *DACodecV4) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { if chunk == nil { return nil, errors.New("chunk is nil") } @@ -87,7 +87,7 @@ func (o *DACodecV4) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) var txs [][]*types.TransactionData for _, block := range chunk.Blocks { - b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) + b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -106,7 +106,7 @@ func (o *DACodecV4) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) // DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. // Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func (o *DACodecV4) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { +func (d *DACodecV4) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { var chunks []*DAChunkRawTx for _, chunk := range bytes { if len(chunk) < 1 { @@ -140,7 +140,7 @@ func (o *DACodecV4) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) } // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func (o *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { +func (d *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { rawBytes := BytesFromBlobCanonical(blob) // if first byte is 1 - data compressed, 0 - not compressed @@ -157,7 +157,7 @@ func (o *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { +func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > codecv4MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -172,7 +172,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -184,7 +184,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -210,12 +210,12 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { - o.SetCompression(true) - daBatch, err := o.NewDABatch(batch) +func (d *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + d.SetCompression(true) + daBatch, err := d.NewDABatch(batch) if err != nil || !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - o.SetCompression(false) - daBatch, err = o.NewDABatch(batch) + d.SetCompression(false) + daBatch, err = d.NewDABatch(batch) if err != nil { return nil, err } @@ -229,7 +229,7 @@ func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + codecv4MaxNumChunks*4 @@ -289,7 +289,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* copy(challengePreimage[0:], hash[:]) var blobBytes []byte - if o.isCompressEnabled() { + if d.isCompressEnabled() { // blobBytes represents the compressed blob payload (batchBytes) var err error blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) @@ -344,7 +344,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) != 193 { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } @@ -376,13 +376,13 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, o) +func (d *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) if err != nil { return 0, 0, err } var blobBytesLength uint64 - if o.isCompressEnabled() { + if d.isCompressEnabled() { blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err @@ -395,13 +395,13 @@ func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, o) +func (d *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) if err != nil { return 0, 0, err } var blobBytesLength uint64 - if o.isCompressEnabled() { + if d.isCompressEnabled() { blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err @@ -414,8 +414,8 @@ func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, o) +func (d *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) if err != nil { return false, err } @@ -431,8 +431,8 @@ func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, o) +func (d *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) if err != nil { return false, err } @@ -448,20 +448,20 @@ func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error } // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func (o *DACodecV4) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { +func (d *DACodecV4) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { return BlockContextByteSize, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV4) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { +func (d *DACodecV4) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { return uint64(BlockContextByteSize * len(c.Blocks)), nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV4) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { +func (d *DACodecV4) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { - chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } @@ -471,7 +471,7 @@ func (o *DACodecV4) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV4) EstimateBlockL1CommitGas(b *Block) (uint64, error) { +func (d *DACodecV4) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -500,12 +500,12 @@ func (o *DACodecV4) EstimateBlockL1CommitGas(b *Block) (uint64, error) { } // estimateChunkL1CommitGasWithoutPointEvaluation calculates the total L1 commit gas without point-evaluation for this chunk approximately. -func (o *DACodecV4) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (uint64, error) { +func (d *DACodecV4) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() - blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) if err != nil { return 0, err } @@ -521,8 +521,8 @@ func (o *DACodecV4) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (ui } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV4) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - totalL1CommitGas, err := o.estimateChunkL1CommitGasWithoutPointEvaluation(c) +func (d *DACodecV4) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + totalL1CommitGas, err := d.estimateChunkL1CommitGasWithoutPointEvaluation(c) if err != nil { return 0, err } @@ -531,7 +531,7 @@ func (o *DACodecV4) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { +func (d *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs @@ -555,7 +555,7 @@ func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore for _, chunk := range b.Chunks { - chunkL1CommitGas, err := o.estimateChunkL1CommitGasWithoutPointEvaluation(chunk) + chunkL1CommitGas, err := d.estimateChunkL1CommitGasWithoutPointEvaluation(chunk) if err != nil { return 0, err } @@ -567,7 +567,7 @@ func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } @@ -579,16 +579,16 @@ func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { } // isCompressEnabled checks if compression is enabled. -func (o *DACodecV4) isCompressEnabled() bool { - return atomic.LoadUint32(&o.enableCompress) == 1 +func (d *DACodecV4) isCompressEnabled() bool { + return atomic.LoadUint32(&d.enableCompress) == 1 } // SetCompression enables or disables compression. -func (o *DACodecV4) SetCompression(enable bool) { +func (d *DACodecV4) SetCompression(enable bool) { if enable { - atomic.StoreUint32(&o.enableCompress, 1) + atomic.StoreUint32(&d.enableCompress, 1) } else { - atomic.StoreUint32(&o.enableCompress, 0) + atomic.StoreUint32(&d.enableCompress, 0) } } @@ -596,12 +596,12 @@ func (o *DACodecV4) SetCompression(enable bool) { // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { +func (d *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore for _, chunk := range chunks { - daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + daChunk, err := d.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) if err != nil { return common.Hash{}, err } @@ -618,8 +618,8 @@ func (o *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe } // JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. -func (o *DACodecV4) JSONFromBytes(data []byte) ([]byte, error) { - batch, err := o.NewDABatchFromBytes(data) +func (d *DACodecV4) JSONFromBytes(data []byte) ([]byte, error) { + batch, err := d.NewDABatchFromBytes(data) if err != nil { return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) } From e222406f6a3b0bd7038f610d526bafc2e85e1eaa Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 11 Oct 2024 18:22:52 +0800 Subject: [PATCH 066/126] remove SetCompression --- encoding/codecv0.go | 19 -------------- encoding/codecv1.go | 19 -------------- encoding/codecv2.go | 19 -------------- encoding/codecv3.go | 19 -------------- encoding/codecv4.go | 57 +++++++++++++----------------------------- encoding/da.go | 18 ------------- encoding/interfaces.go | 2 -- 7 files changed, 17 insertions(+), 136 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 54c98cc..02de1b8 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -5,7 +5,6 @@ import ( "errors" "fmt" "math" - "reflect" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -199,21 +198,6 @@ func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { return daBatch, nil } -// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. -// It also checks if the blob versioned hashes are as expected. -func (d *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { - daBatch, err := d.NewDABatch(batch) - if err != nil { - return nil, err - } - - if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) - } - - return daBatch, nil -} - // NewDABatchFromBytes decodes the given byte slice into a DABatch. func (d *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 89 { @@ -407,9 +391,6 @@ func (d *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, return 0, 0, nil } -// SetCompression enables or disables compression. -func (d *DACodecV0) SetCompression(enable bool) {} - // JSONFromBytes for CodecV0 returns empty values. func (c *DACodecV0) JSONFromBytes(data []byte) ([]byte, error) { // DACodecV0 doesn't need this, so just return empty values diff --git a/encoding/codecv1.go b/encoding/codecv1.go index a9b4925..26dd3ae 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -7,7 +7,6 @@ import ( "fmt" "math" "math/big" - "reflect" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -182,21 +181,6 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { return daBatch, nil } -// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. -// It also checks if the blob versioned hashes are as expected. -func (d *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { - daBatch, err := d.NewDABatch(batch) - if err != nil { - return nil, err - } - - if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) - } - - return daBatch, nil -} - // constructBlobPayload constructs the 4844 blob payload. func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) @@ -487,9 +471,6 @@ func (d *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, return blobSize, blobSize, nil } -// SetCompression enables or disables compression. -func (d *DACodecV1) SetCompression(enable bool) {} - // computeBatchDataHash computes the data hash of the batch. // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 8b318a4..f575a8f 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -8,7 +8,6 @@ import ( "fmt" "math" "math/big" - "reflect" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -192,21 +191,6 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { return daBatch, nil } -// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. -// It also checks if the blob versioned hashes are as expected. -func (d *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { - daBatch, err := d.NewDABatch(batch) - if err != nil { - return nil, err - } - - if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) - } - - return daBatch, nil -} - // constructBlobPayload constructs the 4844 blob payload. func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) @@ -533,9 +517,6 @@ func (d *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { return totalL1CommitGas, nil } -// SetCompression enables or disables compression. -func (d *DACodecV2) SetCompression(enable bool) {} - // computeBatchDataHash computes the data hash of the batch. // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 8367fc9..23b6d6b 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -9,7 +9,6 @@ import ( "fmt" "math" "math/big" - "reflect" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -200,21 +199,6 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { ) } -// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. -// It also checks if the blob versioned hashes are as expected. -func (d *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { - daBatch, err := d.NewDABatch(batch) - if err != nil { - return nil, err - } - - if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) - } - - return daBatch, nil -} - // constructBlobPayload constructs the 4844 blob payload. func (d *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) @@ -556,9 +540,6 @@ func (d *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { return totalL1CommitGas, nil } -// SetCompression enables or disables compression. -func (d *DACodecV3) SetCompression(enable bool) {} - // computeBatchDataHash computes the data hash of the batch. // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 8442e40..8906069 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -9,8 +9,6 @@ import ( "fmt" "math" "math/big" - "reflect" - "sync/atomic" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -22,7 +20,7 @@ import ( ) type DACodecV4 struct { - enableCompress uint32 + enableCompress bool } // codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. @@ -192,6 +190,11 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] + d.enableCompress, err = d.CheckBatchCompressedDataCompatibility(batch) + if err != nil { + return nil, err + } + return NewDABatchV2( uint8(CodecV4), // version batch.Index, // batchIndex @@ -208,26 +211,6 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { ) } -// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. -// It also checks if the blob versioned hashes are as expected. -func (d *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { - d.SetCompression(true) - daBatch, err := d.NewDABatch(batch) - if err != nil || !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - d.SetCompression(false) - daBatch, err = d.NewDABatch(batch) - if err != nil { - return nil, err - } - } - - if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) - } - - return daBatch, nil -} - // constructBlobPayload constructs the 4844 blob payload. func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) @@ -289,7 +272,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* copy(challengePreimage[0:], hash[:]) var blobBytes []byte - if d.isCompressEnabled() { + if d.enableCompress { // blobBytes represents the compressed blob payload (batchBytes) var err error blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) @@ -382,7 +365,11 @@ func (d *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, return 0, 0, err } var blobBytesLength uint64 - if d.isCompressEnabled() { + enableCompress, err := d.CheckChunkCompressedDataCompatibility(c) + if err != nil { + return 0, 0, err + } + if enableCompress { blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err @@ -401,7 +388,11 @@ func (d *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, return 0, 0, err } var blobBytesLength uint64 - if d.isCompressEnabled() { + enableCompress, err := d.CheckBatchCompressedDataCompatibility(b) + if err != nil { + return 0, 0, err + } + if enableCompress { blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err @@ -578,20 +569,6 @@ func (d *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { return totalL1CommitGas, nil } -// isCompressEnabled checks if compression is enabled. -func (d *DACodecV4) isCompressEnabled() bool { - return atomic.LoadUint32(&d.enableCompress) == 1 -} - -// SetCompression enables or disables compression. -func (d *DACodecV4) SetCompression(enable bool) { - if enable { - atomic.StoreUint32(&d.enableCompress, 1) - } else { - atomic.StoreUint32(&d.enableCompress, 0) - } -} - // computeBatchDataHash computes the data hash of the batch. // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, diff --git a/encoding/da.go b/encoding/da.go index 538de67..e331d0f 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -590,21 +590,3 @@ func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uin return CodecV4 } } - -// CheckChunkCompressedDataCompatibility checks compressed data compatibility of a batch built by a single chunk. -func CheckChunkCompressedDataCompatibility(chunk *Chunk, codecVersion CodecVersion) (bool, error) { - codec, err := CodecFromVersion(codecVersion) - if err != nil { - return false, fmt.Errorf("failed to get codec from version: %w", err) - } - return codec.CheckChunkCompressedDataCompatibility(chunk) -} - -// CheckBatchCompressedDataCompatibility checks compressed data compatibility of a batch built by a single chunk. -func CheckBatchCompressedDataCompatibility(batch *Batch, codecVersion CodecVersion) (bool, error) { - codec, err := CodecFromVersion(codecVersion) - if err != nil { - return false, fmt.Errorf("failed to get codec from version: %w", err) - } - return codec.CheckBatchCompressedDataCompatibility(batch) -} diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 1ac75c4..8fc8c00 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -47,7 +47,6 @@ type Codec interface { NewDAChunk(*Chunk, uint64) (DAChunk, error) NewDABatch(*Batch) (DABatch, error) NewDABatchFromBytes([]byte) (DABatch, error) - NewDABatchWithExpectedBlobVersionedHashes(*Batch, []common.Hash) (DABatch, error) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error @@ -62,7 +61,6 @@ type Codec interface { EstimateBatchL1CommitGas(*Batch) (uint64, error) EstimateBatchL1CommitCalldataSize(*Batch) (uint64, error) - SetCompression(enable bool) JSONFromBytes([]byte) ([]byte, error) } From ef5ea6eec199e162cb0e810a72d241e6b2ac92b5 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 11 Oct 2024 19:00:23 +0800 Subject: [PATCH 067/126] fix --- encoding/da.go | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/encoding/da.go b/encoding/da.go index e331d0f..538de67 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -590,3 +590,21 @@ func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uin return CodecV4 } } + +// CheckChunkCompressedDataCompatibility checks compressed data compatibility of a batch built by a single chunk. +func CheckChunkCompressedDataCompatibility(chunk *Chunk, codecVersion CodecVersion) (bool, error) { + codec, err := CodecFromVersion(codecVersion) + if err != nil { + return false, fmt.Errorf("failed to get codec from version: %w", err) + } + return codec.CheckChunkCompressedDataCompatibility(chunk) +} + +// CheckBatchCompressedDataCompatibility checks compressed data compatibility of a batch built by a single chunk. +func CheckBatchCompressedDataCompatibility(batch *Batch, codecVersion CodecVersion) (bool, error) { + codec, err := CodecFromVersion(codecVersion) + if err != nil { + return false, fmt.Errorf("failed to get codec from version: %w", err) + } + return codec.CheckBatchCompressedDataCompatibility(batch) +} From 9d9fd89ab3c0302116ab2175b8372c5dfb147b70 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 11 Oct 2024 19:48:35 +0800 Subject: [PATCH 068/126] add GetChunkEnableCompression & GetBatchEnableCompression --- encoding/da.go | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/encoding/da.go b/encoding/da.go index 538de67..5a65315 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -608,3 +608,39 @@ func CheckBatchCompressedDataCompatibility(batch *Batch, codecVersion CodecVersi } return codec.CheckBatchCompressedDataCompatibility(batch) } + +// GetChunkEnableCompression returns whether to enable compression for the given block height and timestamp. +func GetChunkEnableCompression(codecVersion CodecVersion, chunk Chunk) (bool, error) { + switch codecVersion { + case CodecV0: + return false, nil + case CodecV1: + return false, nil + case CodecV2: + return true, nil + case CodecV3: + return true, nil + case CodecV4: + return CheckChunkCompressedDataCompatibility(&chunk, codecVersion) + default: + return false, fmt.Errorf("unsupported codec version: %v", codecVersion) + } +} + +// GetBatchEnableCompression returns whether to enable compression for the given block height and timestamp. +func GetBatchEnableCompression(codecVersion CodecVersion, batch Batch) (bool, error) { + switch codecVersion { + case CodecV0: + return false, nil + case CodecV1: + return false, nil + case CodecV2: + return true, nil + case CodecV3: + return true, nil + case CodecV4: + return CheckBatchCompressedDataCompatibility(&batch, codecVersion) + default: + return false, fmt.Errorf("unsupported codec version: %v", codecVersion) + } +} From df76a9d519cfcd6871ebfbe4e4e1288f75c9960b Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 11 Oct 2024 19:55:14 +0800 Subject: [PATCH 069/126] fix --- encoding/da.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/encoding/da.go b/encoding/da.go index 5a65315..04c2c21 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -610,7 +610,7 @@ func CheckBatchCompressedDataCompatibility(batch *Batch, codecVersion CodecVersi } // GetChunkEnableCompression returns whether to enable compression for the given block height and timestamp. -func GetChunkEnableCompression(codecVersion CodecVersion, chunk Chunk) (bool, error) { +func GetChunkEnableCompression(codecVersion CodecVersion, chunk *Chunk) (bool, error) { switch codecVersion { case CodecV0: return false, nil @@ -621,14 +621,14 @@ func GetChunkEnableCompression(codecVersion CodecVersion, chunk Chunk) (bool, er case CodecV3: return true, nil case CodecV4: - return CheckChunkCompressedDataCompatibility(&chunk, codecVersion) + return CheckChunkCompressedDataCompatibility(chunk, codecVersion) default: return false, fmt.Errorf("unsupported codec version: %v", codecVersion) } } // GetBatchEnableCompression returns whether to enable compression for the given block height and timestamp. -func GetBatchEnableCompression(codecVersion CodecVersion, batch Batch) (bool, error) { +func GetBatchEnableCompression(codecVersion CodecVersion, batch *Batch) (bool, error) { switch codecVersion { case CodecV0: return false, nil @@ -639,7 +639,7 @@ func GetBatchEnableCompression(codecVersion CodecVersion, batch Batch) (bool, er case CodecV3: return true, nil case CodecV4: - return CheckBatchCompressedDataCompatibility(&batch, codecVersion) + return CheckBatchCompressedDataCompatibility(batch, codecVersion) default: return false, fmt.Errorf("unsupported codec version: %v", codecVersion) } From 6e28644ea0e4a7f765de0a588fb40eaec4a6477c Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 11 Oct 2024 22:32:54 +0800 Subject: [PATCH 070/126] update l2geth dependency --- encoding/da.go | 8 ++--- encoding/interfaces.go | 4 +-- go.mod | 9 +++--- go.sum | 67 ++++++++++++++++++++++++++++++++++-------- 4 files changed, 65 insertions(+), 23 deletions(-) diff --git a/encoding/da.go b/encoding/da.go index 04c2c21..eb709f1 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -567,9 +567,9 @@ func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uin return "homestead" } else if !config.IsCurie(new(big.Int).SetUint64(blockHeight)) { return "bernoulli" - } else if !config.IsDarwin(blockTimestamp) { + } else if !config.IsDarwin(new(big.Int).SetUint64(blockHeight), blockTimestamp) { return "curie" - } else if !config.IsDarwinV2(blockTimestamp) { + } else if !config.IsDarwinV2(new(big.Int).SetUint64(blockHeight), blockTimestamp) { return "darwin" } else { return "darwinV2" @@ -582,9 +582,9 @@ func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uin return CodecV0 } else if !config.IsCurie(new(big.Int).SetUint64(blockHeight)) { return CodecV1 - } else if !config.IsDarwin(blockTimestamp) { + } else if !config.IsDarwin(new(big.Int).SetUint64(blockHeight), blockTimestamp) { return CodecV2 - } else if !config.IsDarwinV2(blockTimestamp) { + } else if !config.IsDarwinV2(new(big.Int).SetUint64(blockHeight), blockTimestamp) { return CodecV3 } else { return CodecV4 diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 8fc8c00..a9c98f8 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -95,9 +95,9 @@ func CodecFromVersion(version CodecVersion) (Codec, error) { // CodecFromConfig determines and returns the appropriate codec based on chain configuration, block number, and timestamp. func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { - if chainCfg.IsDarwinV2(startBlockTimestamp) { + if chainCfg.IsDarwinV2(startBlockNumber, startBlockTimestamp) { return &DACodecV4{} - } else if chainCfg.IsDarwin(startBlockTimestamp) { + } else if chainCfg.IsDarwin(startBlockNumber, startBlockTimestamp) { return &DACodecV3{} } else if chainCfg.IsCurie(startBlockNumber) { return &DACodecV2{} diff --git a/go.mod b/go.mod index ce443ac..0a6f1fd 100644 --- a/go.mod +++ b/go.mod @@ -3,34 +3,35 @@ module github.com/scroll-tech/da-codec go 1.21 require ( - github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e + github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22 github.com/stretchr/testify v1.9.0 ) require ( github.com/bits-and-blooms/bitset v1.12.0 // indirect github.com/btcsuite/btcd v0.20.1-beta // indirect + github.com/btcsuite/btcd/btcec/v2 v2.2.0 // indirect github.com/consensys/bavard v0.1.13 // indirect github.com/consensys/gnark-crypto v0.12.1 // indirect github.com/crate-crypto/go-kzg-4844 v1.0.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 // indirect github.com/ethereum/c-kzg-4844/bindings/go v0.0.0-20230126171313-363c7d7593b4 // indirect github.com/go-ole/go-ole v1.3.0 // indirect github.com/go-stack/stack v1.8.1 // indirect github.com/holiman/uint256 v1.2.4 // indirect github.com/iden3/go-iden3-crypto v0.0.15 // indirect github.com/klauspost/compress v1.17.9 - github.com/kr/text v0.2.0 // indirect github.com/mmcloughlin/addchain v0.4.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/rogpeppe/go-internal v1.10.0 // indirect github.com/scroll-tech/zktrie v0.8.4 // indirect github.com/shirou/gopsutil v3.21.11+incompatible // indirect - github.com/supranational/blst v0.3.11-0.20230124161941-ca03e11a3ff2 // indirect + github.com/supranational/blst v0.3.11 // indirect github.com/tklauser/go-sysconf v0.3.12 // indirect github.com/tklauser/numcpus v0.6.1 // indirect github.com/yusufpapurcu/wmi v1.2.3 // indirect golang.org/x/crypto v0.17.0 // indirect + golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect golang.org/x/sync v0.6.0 // indirect golang.org/x/sys v0.17.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index be609ea..7ae90e1 100644 --- a/go.sum +++ b/go.sum @@ -1,10 +1,16 @@ +github.com/DataDog/zstd v1.4.5 h1:EndNeuB0l9syBZhut0wns3gV1hL8zX8LIu6ZiVHWLIQ= +github.com/DataDog/zstd v1.4.5/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= github.com/VictoriaMetrics/fastcache v1.12.1 h1:i0mICQuojGDL3KblA7wUNlY5lOK6a4bwt3uRKnkZU40= github.com/VictoriaMetrics/fastcache v1.12.1/go.mod h1:tX04vaqcNoQeGLD+ra5pU5sWkuxnzWhEzLwhP9w653o= github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bits-and-blooms/bitset v1.12.0 h1:U/q1fAF7xXRhFCrhROzIfffYnu+dlS38vCZtmFVPHmA= github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/btcsuite/btcd v0.20.1-beta h1:Ik4hyJqN8Jfyv3S4AGBOmyouMsYE3EdYODkMbQjwPGw= github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= +github.com/btcsuite/btcd/btcec/v2 v2.2.0 h1:fzn1qaOt32TuLjFlkzYSsBC35Q3KUjT1SwPxiMSCF5k= +github.com/btcsuite/btcd/btcec/v2 v2.2.0/go.mod h1:U7MHm051Al6XmscBQ0BoNydpOTsFAn707034b5nY8zU= github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg= @@ -14,30 +20,49 @@ github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtE github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cockroachdb/errors v1.11.1 h1:xSEW75zKaKCWzR3OfxXUxgrk/NtT4G1MiOv5lWZazG8= +github.com/cockroachdb/errors v1.11.1/go.mod h1:8MUxA3Gi6b25tYlFEBGLf+D8aISL+M4MIpiWMSNRfxw= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b h1:r6VH0faHjZeQy818SGhaone5OnYfxFR/+AzdY3sf5aE= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= +github.com/cockroachdb/pebble v1.1.0 h1:pcFh8CdCIt2kmEpK0OIatq67Ln9uGDYY3d5XnE0LJG4= +github.com/cockroachdb/pebble v1.1.0/go.mod h1:sEHm5NOXxyiAoKWhoFxT8xMgd/f3RA6qUqQ1BXKrh2E= +github.com/cockroachdb/redact v1.1.5 h1:u1PMllDkdFfPWaNGMyLD1+so+aq3uUItthCFqzwPJ30= +github.com/cockroachdb/redact v1.1.5/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= +github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 h1:zuQyyAKVxetITBuuhv3BI9cMrmStnpT18zmgmTxunpo= +github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06/go.mod h1:7nc4anLGjupUW/PeY5qiNYsdNXj7zopG+eqsS7To5IQ= github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/YjhQ= github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI= github.com/consensys/gnark-crypto v0.12.1 h1:lHH39WuuFgVHONRl3J0LRBtuYdQTumFSDtJF7HpyG8M= github.com/consensys/gnark-crypto v0.12.1/go.mod h1:v2Gy7L/4ZRosZ7Ivs+9SfUDr0f5UlG+EM5t7MPHiLuY= github.com/crate-crypto/go-kzg-4844 v1.0.0 h1:TsSgHwrkTKecKJ4kadtHi4b3xHW5dCFUDFnUp1TsawI= github.com/crate-crypto/go-kzg-4844 v1.0.0/go.mod h1:1kMhvPgI0Ky3yIa+9lFySEBUBXkYxeOi8ZF1sYioxhc= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0= +github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 h1:YLtO71vCjJRCBcrPMtQ9nqBsqpA1m5sE92cU+pd5Mcc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs= github.com/ethereum/c-kzg-4844/bindings/go v0.0.0-20230126171313-363c7d7593b4 h1:B2mpK+MNqgPqk2/KNi1LbqwtZDy5F7iy0mynQiBr8VA= github.com/ethereum/c-kzg-4844/bindings/go v0.0.0-20230126171313-363c7d7593b4/go.mod h1:y4GA2JbAUama1S4QwYjC2hefgGLU8Ul0GMtL/ADMF1c= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/getsentry/sentry-go v0.18.0 h1:MtBW5H9QgdcJabtZcuJG80BMOwaBpkRDZkxRkNC1sN0= +github.com/getsentry/sentry-go v0.18.0/go.mod h1:Kgon4Mby+FJ7ZWHFUAZgVaIa8sxHtnRJRLTXZr51aKQ= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= github.com/go-stack/stack v1.8.1 h1:ntEHSVwIt7PNXNpgPmVfMrNhLtgjlmnZha2kOpuRiDw= github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4= +github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= +github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb h1:PBC98N2aIaM3XXiurYmW7fx4GZkL8feAMVq7nEjURHk= github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= -github.com/holiman/bloomfilter/v2 v2.0.3 h1:73e0e/V0tCydx14a0SCYS/EWCxgwLZ18CZcZKVu0fao= -github.com/holiman/bloomfilter/v2 v2.0.3/go.mod h1:zpoh+gs7qcpqrHr3dB55AMiJwo0iURXE7ZOP9L9hSkA= github.com/holiman/uint256 v1.2.4 h1:jUc4Nk8fm9jZabQuqr2JzednajVmBpC+oiTiXZJEApU= github.com/holiman/uint256 v1.2.4/go.mod h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= @@ -52,10 +77,14 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c= github.com/leanovate/gopter v0.2.9/go.mod h1:U2L/78B+KVFIx2VmW6onHJQzXtFb+p5y3y2Sh+Jxxv8= -github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= -github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= +github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI= +github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/mmcloughlin/addchain v0.4.0 h1:SobOdjm2xLj1KkXN5/n0xTIWyZA2+s99UCY1iPfkHRY= github.com/mmcloughlin/addchain v0.4.0/go.mod h1:A86O+tHqZLMNO4w6ZZ4FlVQEadcoqkyU72HC5wJ4RlU= github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFVejUS1/tS/qU= @@ -68,22 +97,28 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/tsdb v0.7.1 h1:YZcsG11NqnK4czYLrWd9mpEuAJIHVQLwdrleYfszMAA= -github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= -github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= -github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/prometheus/client_golang v1.12.0 h1:C+UIj/QWtmqY13Arb8kwMt5j34/0Z2iKamrJ+ryC0Gg= +github.com/prometheus/client_golang v1.12.0/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_model v0.2.1-0.20210607210712-147c58e9608a h1:CmF68hwI0XsOQ5UwlBopMi2Ow4Pbg32akc4KIVCOm+Y= +github.com/prometheus/client_model v0.2.1-0.20210607210712-147c58e9608a/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= +github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= -github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e h1:WCJ+UzfrM0jJSirXEYjWCJ89gr5EoRb4KfKb0mo6+Wo= -github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e/go.mod h1:swB5NSp8pKNDuYsTxfR08bHS6L56i119PBx8fxvV8Cs= +github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22 h1:s1/8G2HP1z9jd0FBbUVs7viv/lQZA/8QoQppXYTX1CU= +github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22/go.mod h1:r9FwtxCtybMkTbWYCyBuevT9TW3zHmOTHqD082Uh+Oo= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk= github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI= github.com/shirou/gopsutil v3.21.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/supranational/blst v0.3.11-0.20230124161941-ca03e11a3ff2 h1:wh1wzwAhZBNiZO37uWS/nDaKiIwHz4mDo4pnA+fqTO0= -github.com/supranational/blst v0.3.11-0.20230124161941-ca03e11a3ff2/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= +github.com/supranational/blst v0.3.11 h1:LyU6FolezeWAhvQk0k6O/d49jqgO52MSDDfYgbeoEm4= +github.com/supranational/blst v0.3.11/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY= github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= @@ -95,6 +130,8 @@ github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQ golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g= +golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= @@ -107,6 +144,10 @@ golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= From 233b006ae0389dd1e3438434e9b2b5c100ae97f0 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sat, 12 Oct 2024 02:28:19 +0800 Subject: [PATCH 071/126] make types internal --- encoding/codecv0.go | 10 ++-- encoding/codecv1.go | 10 ++-- encoding/codecv2.go | 10 ++-- encoding/codecv3.go | 10 ++-- encoding/codecv4.go | 10 ++-- encoding/dabatch.go | 110 ++++++++++++++++++++++---------------------- encoding/dablock.go | 26 +++++------ encoding/dachunk.go | 32 ++++++------- 8 files changed, 109 insertions(+), 109 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 02de1b8..913a32a 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -46,7 +46,7 @@ func (d *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := NewDABlockV0( + daBlock := newDABlockV0( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -85,7 +85,7 @@ func (d *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV0( + daChunk := newDAChunkV0( blocks, // blocks txs, // transactions ) @@ -110,7 +110,7 @@ func (d *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, e for i := 0; i < numBlocks; i++ { startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlockV0{} + blocks[i] = &daBlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err @@ -185,7 +185,7 @@ func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := NewDABatchV0( + daBatch := newDABatchV0( uint8(CodecV0), // version batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped @@ -208,7 +208,7 @@ func (d *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV0) } - b := NewDABatchV0( + b := newDABatchV0( data[0], // version binary.BigEndian.Uint64(data[1:9]), // batchIndex binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 26dd3ae..0f7a126 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -48,7 +48,7 @@ func (d *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := NewDABlockV0( + daBlock := newDABlockV0( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -87,7 +87,7 @@ func (d *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV1( + daChunk := newDAChunkV1( blocks, // blocks txs, // transactions ) @@ -113,7 +113,7 @@ func (d *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) for i := 0; i < numBlocks; i++ { startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlockV0{} + blocks[i] = &daBlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err @@ -165,7 +165,7 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := NewDABatchV1( + daBatch := newDABatchV1( uint8(CodecV1), // version batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped @@ -281,7 +281,7 @@ func (d *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV1) } - b := NewDABatchV1( + b := newDABatchV1( data[0], // version binary.BigEndian.Uint64(data[1:9]), // batchIndex binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped diff --git a/encoding/codecv2.go b/encoding/codecv2.go index f575a8f..5a03195 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -52,7 +52,7 @@ func (d *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := NewDABlockV0( + daBlock := newDABlockV0( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -91,7 +91,7 @@ func (d *DACodecV2) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV1( + daChunk := newDAChunkV1( blocks, // blocks txs, // transactions ) @@ -117,7 +117,7 @@ func (d *DACodecV2) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) for i := 0; i < numBlocks; i++ { startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlockV0{} + blocks[i] = &daBlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err @@ -175,7 +175,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := NewDABatchV1( + daBatch := newDABatchV1( uint8(CodecV2), // version batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped @@ -311,7 +311,7 @@ func (d *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV2) } - b := NewDABatchV1( + b := newDABatchV1( data[0], // version binary.BigEndian.Uint64(data[1:9]), // batchIndex binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 23b6d6b..a1e43e6 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -53,7 +53,7 @@ func (d *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := NewDABlockV0( + daBlock := newDABlockV0( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -92,7 +92,7 @@ func (d *DACodecV3) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV1( + daChunk := newDAChunkV1( blocks, // blocks txs, // transactions ) @@ -118,7 +118,7 @@ func (d *DACodecV3) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) for i := 0; i < numBlocks; i++ { startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlockV0{} + blocks[i] = &daBlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err @@ -183,7 +183,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - return NewDABatchV2( + return newDABatchV2( uint8(CodecV3), // version batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped @@ -565,7 +565,7 @@ func (d *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe return dataHash, nil } -// JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. +// JSONFromBytes converts the bytes to a daBatchV2 and then marshals it to JSON. func (d *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { batch, err := d.NewDABatchFromBytes(data) if err != nil { diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 8906069..6c50c0b 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -55,7 +55,7 @@ func (d *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := NewDABlockV0( + daBlock := newDABlockV0( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -94,7 +94,7 @@ func (d *DACodecV4) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV1( + daChunk := newDAChunkV1( blocks, // blocks txs, // transactions ) @@ -120,7 +120,7 @@ func (d *DACodecV4) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) for i := 0; i < numBlocks; i++ { startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlockV0{} + blocks[i] = &daBlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err @@ -195,7 +195,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - return NewDABatchV2( + return newDABatchV2( uint8(CodecV4), // version batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped @@ -594,7 +594,7 @@ func (d *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe return dataHash, nil } -// JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. +// JSONFromBytes converts the bytes to a daBatchV2 and then marshals it to JSON. func (d *DACodecV4) JSONFromBytes(data []byte) ([]byte, error) { batch, err := d.NewDABatchFromBytes(data) if err != nil { diff --git a/encoding/dabatch.go b/encoding/dabatch.go index cb0ec8d..8c26f9c 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -12,8 +12,8 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) -// DABatchV0 contains metadata about a batch of DAChunks. -type DABatchV0 struct { +// daBatchV0 contains metadata about a batch of DAChunks. +type daBatchV0 struct { version uint8 batchIndex uint64 l1MessagePopped uint64 @@ -23,9 +23,9 @@ type DABatchV0 struct { skippedL1MessageBitmap []byte } -// NewDABatchV0 is a constructor for DABatchV0. -func NewDABatchV0(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash common.Hash, skippedL1MessageBitmap []byte) *DABatchV0 { - return &DABatchV0{ +// newDABatchV0 is a constructor for daBatchV0. +func newDABatchV0(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash common.Hash, skippedL1MessageBitmap []byte) *daBatchV0 { + return &daBatchV0{ version: version, batchIndex: batchIndex, l1MessagePopped: l1MessagePopped, @@ -37,7 +37,7 @@ func NewDABatchV0(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopp } // Encode serializes the DABatch into bytes. -func (b *DABatchV0) Encode() []byte { +func (b *daBatchV0) Encode() []byte { batchBytes := make([]byte, 89+len(b.skippedL1MessageBitmap)) batchBytes[0] = b.version binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) @@ -50,59 +50,59 @@ func (b *DABatchV0) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatchV0) Hash() common.Hash { +func (b *daBatchV0) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // Blob returns the blob of the batch. -func (b *DABatchV0) Blob() *kzg4844.Blob { +func (b *daBatchV0) Blob() *kzg4844.Blob { return nil } // BlobVersionedHashes returns the blob versioned hashes of the batch. -func (b *DABatchV0) BlobVersionedHashes() []common.Hash { +func (b *daBatchV0) BlobVersionedHashes() []common.Hash { return nil } // BlobBytes returns the blob bytes of the batch. -func (b *DABatchV0) BlobBytes() []byte { +func (b *daBatchV0) BlobBytes() []byte { return nil } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { +func (b *daBatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { return nil, nil } // Version returns the version of the DABatch. -func (b *DABatchV0) Version() uint8 { +func (b *daBatchV0) Version() uint8 { return b.version } // SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. -func (b *DABatchV0) SkippedL1MessageBitmap() []byte { +func (b *daBatchV0) SkippedL1MessageBitmap() []byte { return b.skippedL1MessageBitmap } // DataHash returns the data hash of the DABatch. -func (b *DABatchV0) DataHash() common.Hash { +func (b *daBatchV0) DataHash() common.Hash { return b.dataHash } -// DABatchV1 contains metadata about a batch of DAChunks. -type DABatchV1 struct { - DABatchV0 +// daBatchV1 contains metadata about a batch of DAChunks. +type daBatchV1 struct { + daBatchV0 blobVersionedHash common.Hash blob *kzg4844.Blob z *kzg4844.Point } -// NewDABatchV1 is a constructor for DABatchV1. -func NewDABatchV1(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point) *DABatchV1 { - return &DABatchV1{ - DABatchV0: DABatchV0{ +// newDABatchV1 is a constructor for daBatchV1. +func newDABatchV1(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point) *daBatchV1 { + return &daBatchV1{ + daBatchV0: daBatchV0{ version: version, batchIndex: batchIndex, l1MessagePopped: l1MessagePopped, @@ -118,7 +118,7 @@ func NewDABatchV1(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopp } // Encode serializes the DABatch into bytes. -func (b *DABatchV1) Encode() []byte { +func (b *daBatchV1) Encode() []byte { batchBytes := make([]byte, 121+len(b.skippedL1MessageBitmap)) batchBytes[0] = b.version binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) @@ -132,13 +132,13 @@ func (b *DABatchV1) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatchV1) Hash() common.Hash { +func (b *daBatchV1) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // BlobDataProof computes the abi-encoded blob verification data. -func (b *DABatchV1) BlobDataProof() ([]byte, error) { +func (b *daBatchV1) BlobDataProof() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProof with empty blob") } @@ -160,22 +160,22 @@ func (b *DABatchV1) BlobDataProof() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *DABatchV1) Blob() *kzg4844.Blob { +func (b *daBatchV1) Blob() *kzg4844.Blob { return b.blob } // BlobVersionedHashes returns the blob versioned hashes of the batch. -func (b *DABatchV1) BlobVersionedHashes() []common.Hash { +func (b *daBatchV1) BlobVersionedHashes() []common.Hash { return []common.Hash{b.blobVersionedHash} } // BlobBytes returns the blob bytes of the batch. -func (b *DABatchV1) BlobBytes() []byte { +func (b *daBatchV1) BlobBytes() []byte { return nil } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { +func (b *daBatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") } @@ -197,23 +197,23 @@ func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { } // Version returns the version of the DABatch. -func (b *DABatchV1) Version() uint8 { +func (b *daBatchV1) Version() uint8 { return b.version } // SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. -func (b *DABatchV1) SkippedL1MessageBitmap() []byte { +func (b *daBatchV1) SkippedL1MessageBitmap() []byte { return b.skippedL1MessageBitmap } // DataHash returns the data hash of the DABatch. -func (b *DABatchV1) DataHash() common.Hash { +func (b *daBatchV1) DataHash() common.Hash { return b.dataHash } -// DABatchV2 contains metadata about a batch of DAChunks. -type DABatchV2 struct { - DABatchV0 +// daBatchV2 contains metadata about a batch of DAChunks. +type daBatchV2 struct { + daBatchV0 blobVersionedHash common.Hash lastBlockTimestamp uint64 @@ -223,15 +223,15 @@ type DABatchV2 struct { blobBytes []byte } -// NewDABatchV2 is a constructor for DABatchV2 that calls blobDataProofForPICircuit internally. -func NewDABatchV2(version uint8, +// newDABatchV2 is a constructor for daBatchV2 that calls blobDataProofForPICircuit internally. +func newDABatchV2(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, -) (*DABatchV2, error) { - daBatch := &DABatchV2{ - DABatchV0: DABatchV0{ +) (*daBatchV2, error) { + daBatch := &daBatchV2{ + daBatchV0: daBatchV0{ version: version, batchIndex: batchIndex, l1MessagePopped: l1MessagePopped, @@ -257,16 +257,16 @@ func NewDABatchV2(version uint8, return daBatch, nil } -// NewDABatchV2WithProof is a constructor for DABatchV2 that allows directly passing blobDataProof. +// NewDABatchV2WithProof is a constructor for daBatchV2 that allows directly passing blobDataProof. func NewDABatchV2WithProof(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, blobDataProof [2]common.Hash, // Accept blobDataProof directly -) *DABatchV2 { - return &DABatchV2{ - DABatchV0: DABatchV0{ +) *daBatchV2 { + return &daBatchV2{ + daBatchV0: daBatchV0{ version: version, batchIndex: batchIndex, l1MessagePopped: l1MessagePopped, @@ -285,7 +285,7 @@ func NewDABatchV2WithProof(version uint8, } // Encode serializes the DABatch into bytes. -func (b *DABatchV2) Encode() []byte { +func (b *daBatchV2) Encode() []byte { batchBytes := make([]byte, 193) batchBytes[0] = b.version binary.BigEndian.PutUint64(batchBytes[1:9], b.batchIndex) @@ -301,13 +301,13 @@ func (b *DABatchV2) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatchV2) Hash() common.Hash { +func (b *daBatchV2) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatchV2) blobDataProofForPICircuit() ([2]common.Hash, error) { +func (b *daBatchV2) blobDataProofForPICircuit() ([2]common.Hash, error) { if b.blob == nil { return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") } @@ -332,7 +332,7 @@ func (b *DABatchV2) blobDataProofForPICircuit() ([2]common.Hash, error) { } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV2) BlobDataProofForPointEvaluation() ([]byte, error) { +func (b *daBatchV2) BlobDataProofForPointEvaluation() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") } @@ -354,23 +354,23 @@ func (b *DABatchV2) BlobDataProofForPointEvaluation() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *DABatchV2) Blob() *kzg4844.Blob { +func (b *daBatchV2) Blob() *kzg4844.Blob { return b.blob } // BlobVersionedHashes returns the blob versioned hashes of the batch. -func (b *DABatchV2) BlobVersionedHashes() []common.Hash { +func (b *daBatchV2) BlobVersionedHashes() []common.Hash { return []common.Hash{b.blobVersionedHash} } // BlobBytes returns the blob bytes of the batch. -func (b *DABatchV2) BlobBytes() []byte { +func (b *daBatchV2) BlobBytes() []byte { return b.blobBytes } -// MarshalJSON implements the custom JSON serialization for DABatchV2. +// MarshalJSON implements the custom JSON serialization for daBatchV2. // This method is designed to provide prover with batch info in snake_case format. -func (b *DABatchV2) MarshalJSON() ([]byte, error) { +func (b *daBatchV2) MarshalJSON() ([]byte, error) { type daBatchV2JSON struct { Version uint8 `json:"version"` BatchIndex uint64 `json:"batch_index"` @@ -404,16 +404,16 @@ func (b *DABatchV2) MarshalJSON() ([]byte, error) { } // Version returns the version of the DABatch. -func (b *DABatchV2) Version() uint8 { +func (b *daBatchV2) Version() uint8 { return b.version } // SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. -func (b *DABatchV2) SkippedL1MessageBitmap() []byte { +func (b *daBatchV2) SkippedL1MessageBitmap() []byte { return b.skippedL1MessageBitmap } // DataHash returns the data hash of the DABatch. -func (b *DABatchV2) DataHash() common.Hash { +func (b *daBatchV2) DataHash() common.Hash { return b.dataHash } diff --git a/encoding/dablock.go b/encoding/dablock.go index baa7d44..0918fed 100644 --- a/encoding/dablock.go +++ b/encoding/dablock.go @@ -6,8 +6,8 @@ import ( "math/big" ) -// DABlockV0 represents a Data Availability Block. -type DABlockV0 struct { +// daBlockV0 represents a Data Availability Block. +type daBlockV0 struct { number uint64 timestamp uint64 baseFee *big.Int @@ -16,9 +16,9 @@ type DABlockV0 struct { numL1Messages uint16 } -// NewDABlockV0 is a constructor function for DABlockV0 that initializes the internal fields. -func NewDABlockV0(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *DABlockV0 { - return &DABlockV0{ +// newDABlockV0 is a constructor function for daBlockV0 that initializes the internal fields. +func newDABlockV0(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *daBlockV0 { + return &daBlockV0{ number: number, timestamp: timestamp, baseFee: baseFee, @@ -29,7 +29,7 @@ func NewDABlockV0(number uint64, timestamp uint64, baseFee *big.Int, gasLimit ui } // Encode serializes the DABlock into a slice of bytes. -func (b *DABlockV0) Encode() []byte { +func (b *daBlockV0) Encode() []byte { bytes := make([]byte, BlockContextByteSize) binary.BigEndian.PutUint64(bytes[0:], b.number) binary.BigEndian.PutUint64(bytes[8:], b.timestamp) @@ -43,7 +43,7 @@ func (b *DABlockV0) Encode() []byte { } // Decode populates the fields of a DABlock from a byte slice. -func (b *DABlockV0) Decode(bytes []byte) error { +func (b *daBlockV0) Decode(bytes []byte) error { if len(bytes) != BlockContextByteSize { return errors.New("block encoding is not BlockContextByteSize bytes long") } @@ -59,31 +59,31 @@ func (b *DABlockV0) Decode(bytes []byte) error { } // Number returns the block number. -func (b *DABlockV0) Number() uint64 { +func (b *daBlockV0) Number() uint64 { return b.number } // Timestamp returns the block timestamp. -func (b *DABlockV0) Timestamp() uint64 { +func (b *daBlockV0) Timestamp() uint64 { return b.timestamp } // BaseFee returns the block base fee. -func (b *DABlockV0) BaseFee() *big.Int { +func (b *daBlockV0) BaseFee() *big.Int { return b.baseFee } // GasLimit returns the block gas limit. -func (b *DABlockV0) GasLimit() uint64 { +func (b *daBlockV0) GasLimit() uint64 { return b.gasLimit } // NumTransactions returns the number of transactions in the block. -func (b *DABlockV0) NumTransactions() uint16 { +func (b *daBlockV0) NumTransactions() uint16 { return b.numTransactions } // NumL1Messages returns the number of L1 messages in the block. -func (b *DABlockV0) NumL1Messages() uint16 { +func (b *daBlockV0) NumL1Messages() uint16 { return b.numL1Messages } diff --git a/encoding/dachunk.go b/encoding/dachunk.go index d764b55..1e4d98e 100644 --- a/encoding/dachunk.go +++ b/encoding/dachunk.go @@ -12,22 +12,22 @@ import ( "github.com/scroll-tech/go-ethereum/crypto" ) -// DAChunkV0 groups consecutive DABlocks with their transactions. -type DAChunkV0 struct { +// daChunkV0 groups consecutive DABlocks with their transactions. +type daChunkV0 struct { blocks []DABlock transactions [][]*types.TransactionData } -// NewDAChunkV0 is a constructor for DAChunkV0, initializing with blocks and transactions. -func NewDAChunkV0(blocks []DABlock, transactions [][]*types.TransactionData) *DAChunkV0 { - return &DAChunkV0{ +// newDAChunkV0 is a constructor for daChunkV0, initializing with blocks and transactions. +func newDAChunkV0(blocks []DABlock, transactions [][]*types.TransactionData) *daChunkV0 { + return &daChunkV0{ blocks: blocks, transactions: transactions, } } // Encode serializes the DAChunk into a slice of bytes. -func (c *DAChunkV0) Encode() ([]byte, error) { +func (c *daChunkV0) Encode() ([]byte, error) { if len(c.blocks) == 0 { return nil, errors.New("number of blocks is 0") } @@ -67,7 +67,7 @@ func (c *DAChunkV0) Encode() ([]byte, error) { } // Hash computes the hash of the DAChunk data. -func (c *DAChunkV0) Hash() (common.Hash, error) { +func (c *daChunkV0) Hash() (common.Hash, error) { chunkBytes, err := c.Encode() if err != nil { return common.Hash{}, err @@ -110,7 +110,7 @@ func (c *DAChunkV0) Hash() (common.Hash, error) { } // BlockRange returns the block range of the DAChunk. -func (c *DAChunkV0) BlockRange() (uint64, uint64, error) { +func (c *daChunkV0) BlockRange() (uint64, uint64, error) { if len(c.blocks) == 0 { return 0, 0, errors.New("number of blocks is 0") } @@ -118,19 +118,19 @@ func (c *DAChunkV0) BlockRange() (uint64, uint64, error) { return c.blocks[0].Number(), c.blocks[len(c.blocks)-1].Number(), nil } -// DAChunkV1 groups consecutive DABlocks with their transactions. -type DAChunkV1 DAChunkV0 +// daChunkV1 groups consecutive DABlocks with their transactions. +type daChunkV1 daChunkV0 -// NewDAChunkV1 is a constructor for DAChunkV1, initializing with blocks and transactions. -func NewDAChunkV1(blocks []DABlock, transactions [][]*types.TransactionData) *DAChunkV1 { - return &DAChunkV1{ +// newDAChunkV1 is a constructor for daChunkV1, initializing with blocks and transactions. +func newDAChunkV1(blocks []DABlock, transactions [][]*types.TransactionData) *daChunkV1 { + return &daChunkV1{ blocks: blocks, transactions: transactions, } } // Encode serializes the DAChunk into a slice of bytes. -func (c *DAChunkV1) Encode() ([]byte, error) { +func (c *daChunkV1) Encode() ([]byte, error) { var chunkBytes []byte chunkBytes = append(chunkBytes, byte(len(c.blocks))) @@ -143,7 +143,7 @@ func (c *DAChunkV1) Encode() ([]byte, error) { } // Hash computes the hash of the DAChunk data. -func (c *DAChunkV1) Hash() (common.Hash, error) { +func (c *daChunkV1) Hash() (common.Hash, error) { var dataBytes []byte // concatenate block contexts @@ -177,7 +177,7 @@ func (c *DAChunkV1) Hash() (common.Hash, error) { } // BlockRange returns the block range of the DAChunk. -func (c *DAChunkV1) BlockRange() (uint64, uint64, error) { +func (c *daChunkV1) BlockRange() (uint64, uint64, error) { if len(c.blocks) == 0 { return 0, 0, errors.New("number of blocks is 0") } From 8dbf796031085af8a34921b8b3ab156c7a3427fb Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sat, 12 Oct 2024 03:20:34 +0800 Subject: [PATCH 072/126] embed codecv0 <- codecv1 <- codecv2 <- codecv3 <- codecv4 --- encoding/codecv0.go | 9 + encoding/codecv1.go | 169 ++--------------- encoding/codecv2.go | 275 ++------------------------- encoding/codecv3.go | 393 +------------------------------------- encoding/codecv4.go | 445 +------------------------------------------- 5 files changed, 48 insertions(+), 1243 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 913a32a..6919b9f 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -158,6 +158,15 @@ func (d *DACodecV0) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx // NewDABatch creates a DABatch from the provided Batch. func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { + // this encoding can only support a fixed number of chunks per batch + if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { + return nil, errors.New("too many chunks in batch") + } + + if len(batch.Chunks) == 0 { + return nil, errors.New("too few chunks in batch") + } + // compute batch data hash var dataBytes []byte totalL1MessagePoppedBeforeChunk := batch.TotalL1MessagePoppedBefore diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 0f7a126..2236c51 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -5,7 +5,6 @@ import ( "encoding/binary" "errors" "fmt" - "math" "math/big" "github.com/scroll-tech/go-ethereum/common" @@ -14,54 +13,20 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) -type DACodecV1 struct{} - -// codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. -const codecv1MaxNumChunks = 15 +type DACodecV1 struct { + DACodecV0 +} // Version returns the codec version. func (d *DACodecV1) Version() CodecVersion { return CodecV1 } -// MaxNumChunksPerBatch returns the maximum number of chunks per batch. -func (d *DACodecV1) MaxNumChunksPerBatch() uint64 { - return codecv1MaxNumChunks -} - -// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (d *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - if !block.Header.Number.IsUint64() { - return nil, errors.New("block number is not uint64") - } - - // note: numL1Messages includes skipped messages - numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) - if numL1Messages > math.MaxUint16 { - return nil, errors.New("number of L1 messages exceeds max uint16") - } - - // note: numTransactions includes skipped messages - numL2Transactions := block.NumL2Transactions() - numTransactions := numL1Messages + numL2Transactions - if numTransactions > math.MaxUint16 { - return nil, errors.New("number of transactions exceeds max uint16") - } - - daBlock := newDABlockV0( - block.Header.Number.Uint64(), // number - block.Header.Time, // timestamp - block.Header.BaseFee, // baseFee - block.Header.GasLimit, // gasLimit - uint16(numTransactions), // numTransactions - uint16(numL1Messages), // numL1Messages - ) - - return daBlock, nil -} - // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. func (d *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + var blocks []DABlock + var txs [][]*types.TransactionData + if chunk == nil { return nil, errors.New("chunk is nil") } @@ -74,9 +39,6 @@ func (d *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []DABlock - var txs [][]*types.TransactionData - for _, block := range chunk.Blocks { b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) if err != nil { @@ -133,13 +95,13 @@ func (d *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks func (d *DACodecV1) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { batchBytes := BytesFromBlobCanonical(blob) - return DecodeTxsFromBytes(batchBytes[:], chunks, codecv1MaxNumChunks) + return DecodeTxsFromBytes(batchBytes[:], chunks, int(d.MaxNumChunksPerBatch())) } // NewDABatch creates a DABatch from the provided Batch. func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > codecv1MaxNumChunks { + if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { return nil, errors.New("too many chunks in batch") } @@ -160,7 +122,7 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := d.constructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, err := d.constructBlobPayload(batch.Chunks, int(d.MaxNumChunksPerBatch()), false /* no mock */) if err != nil { return nil, err } @@ -182,16 +144,16 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // constructBlobPayload constructs the 4844 blob payload. -func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + codecv1MaxNumChunks*4 + metadataLength := 2 + maxNumChunksPerBatch*4 // the raw (un-padded) blob payload blobBytes := make([]byte, metadataLength) // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+codecv1MaxNumChunks+1)*32) + challengePreimage := make([]byte, (1+maxNumChunksPerBatch+1)*32) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -229,10 +191,10 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than codecv1MaxNumChunks chunks, the rest + // if we have fewer than MaxNumChunksPerBatch chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < codecv1MaxNumChunks; chunkID++ { + for chunkID := len(chunks); chunkID < maxNumChunksPerBatch; chunkID++ { // use the last chunk's data hash as padding copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } @@ -255,7 +217,7 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+codecv1MaxNumChunks)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+maxNumChunksPerBatch)*32:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) @@ -354,100 +316,9 @@ func (d *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) return uint64(BlockContextByteSize * len(c.Blocks)), nil } -// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (d *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - var totalNonSkippedL1Messages uint64 - var totalL1CommitGas uint64 - for _, block := range c.Blocks { - totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() - blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) - if err != nil { - return 0, err - } - totalL1CommitGas += blockL1CommitGas - } - - numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - - totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash - return totalL1CommitGas, nil -} - -// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (d *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { - var totalL1CommitGas uint64 - - // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata - - // adjusting gas: - // add 1 time cold sload (2100 gas) for L1MessageQueue - // add 1 time cold address access (2600 gas) for L1MessageQueue - // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) - totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata - - // adjust batch data hash gas cost - totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) - - totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore - - for _, chunk := range b.Chunks { - chunkL1CommitGas, err := d.EstimateChunkL1CommitGas(chunk) - if err != nil { - return 0, err - } - totalL1CommitGas += chunkL1CommitGas - - totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) - totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - - chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) - if err != nil { - return 0, err - } - totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) - } - - return totalL1CommitGas, nil -} - -// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (d *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { - var totalL1CommitCalldataSize uint64 - for _, chunk := range b.Chunks { - chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) - if err != nil { - return 0, err - } - totalL1CommitCalldataSize += chunkL1CommitCalldataSize - } - return totalL1CommitCalldataSize, nil -} - -// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (d *DACodecV1) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - return true, nil -} - -// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (d *DACodecV1) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - return true, nil -} - // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (d *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - metadataSize := uint64(2 + 4*codecv1MaxNumChunks) + metadataSize := 2 + 4*d.MaxNumChunksPerBatch() batchDataSize, err := d.chunkL1CommitBlobDataSize(c) if err != nil { return 0, 0, err @@ -458,7 +329,7 @@ func (d *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func (d *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - metadataSize := uint64(2 + 4*codecv1MaxNumChunks) + metadataSize := 2 + 4*d.MaxNumChunksPerBatch() var batchDataSize uint64 for _, c := range b.Chunks { chunkDataSize, err := d.chunkL1CommitBlobDataSize(c) @@ -495,9 +366,3 @@ func (d *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe dataHash := crypto.Keccak256Hash(dataBytes) return dataHash, nil } - -// JSONFromBytes for CodecV1 returns empty values. -func (c *DACodecV1) JSONFromBytes(data []byte) ([]byte, error) { - // DACodecV1 doesn't need this, so just return empty values - return nil, nil -} diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 5a03195..239a4f8 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -6,7 +6,6 @@ import ( "encoding/hex" "errors" "fmt" - "math" "math/big" "github.com/scroll-tech/go-ethereum/common" @@ -18,7 +17,9 @@ import ( "github.com/scroll-tech/da-codec/encoding/zstd" ) -type DACodecV2 struct{} +type DACodecV2 struct { + DACodecV1 +} // codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. const codecv2MaxNumChunks = 45 @@ -33,107 +34,6 @@ func (d *DACodecV2) MaxNumChunksPerBatch() uint64 { return codecv2MaxNumChunks } -// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (d *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - if !block.Header.Number.IsUint64() { - return nil, errors.New("block number is not uint64") - } - - // note: numL1Messages includes skipped messages - numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) - if numL1Messages > math.MaxUint16 { - return nil, errors.New("number of L1 messages exceeds max uint16") - } - - // note: numTransactions includes skipped messages - numL2Transactions := block.NumL2Transactions() - numTransactions := numL1Messages + numL2Transactions - if numTransactions > math.MaxUint16 { - return nil, errors.New("number of transactions exceeds max uint16") - } - - daBlock := newDABlockV0( - block.Header.Number.Uint64(), // number - block.Header.Time, // timestamp - block.Header.BaseFee, // baseFee - block.Header.GasLimit, // gasLimit - uint16(numTransactions), // numTransactions - uint16(numL1Messages), // numL1Messages - ) - - return daBlock, nil -} - -// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (d *DACodecV2) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - if chunk == nil { - return nil, errors.New("chunk is nil") - } - - if len(chunk.Blocks) == 0 { - return nil, errors.New("number of blocks is 0") - } - - if len(chunk.Blocks) > 255 { - return nil, errors.New("number of blocks exceeds 1 byte") - } - - var blocks []DABlock - var txs [][]*types.TransactionData - - for _, block := range chunk.Blocks { - b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - blocks = append(blocks, b) - totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) - txs = append(txs, block.Transactions) - } - - daChunk := newDAChunkV1( - blocks, // blocks - txs, // transactions - ) - - return daChunk, nil -} - -// DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. -// Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func (d *DACodecV2) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { - var chunks []*DAChunkRawTx - for _, chunk := range bytes { - if len(chunk) < 1 { - return nil, fmt.Errorf("invalid chunk, length is less than 1") - } - - numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) - } - - blocks := make([]DABlock, numBlocks) - for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + BlockContextByteSize - blocks[i] = &daBlockV0{} - err := blocks[i].Decode(chunk[startIdx:endIdx]) - if err != nil { - return nil, err - } - } - - var transactions []types.Transactions - - chunks = append(chunks, &DAChunkRawTx{ - Blocks: blocks, - Transactions: transactions, // Transactions field is still empty in the phase of DecodeDAChunksRawTx, because txs moved to blobs and filled in DecodeTxsFromBlob method. - }) - } - return chunks, nil -} - // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks func (d *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { compressedBytes := BytesFromBlobCanonical(blob) @@ -143,13 +43,13 @@ func (d *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx if err != nil { return err } - return DecodeTxsFromBytes(batchBytes, chunks, codecv2MaxNumChunks) + return DecodeTxsFromBytes(batchBytes, chunks, int(d.MaxNumChunksPerBatch())) } // NewDABatch creates a DABatch from the provided Batch. func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > codecv2MaxNumChunks { + if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { return nil, errors.New("too many chunks in batch") } @@ -170,7 +70,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, _, err := d.constructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, _, err := d.constructBlobPayload(batch.Chunks, int(d.MaxNumChunksPerBatch()), false /* no mock */) if err != nil { return nil, err } @@ -192,16 +92,16 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // constructBlobPayload constructs the 4844 blob payload. -func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + codecv2MaxNumChunks*4 + metadataLength := 2 + maxNumChunksPerBatch*4 // batchBytes represents the raw (un-compressed and un-padded) blob payload batchBytes := make([]byte, metadataLength) // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+codecv2MaxNumChunks+1)*32) + challengePreimage := make([]byte, (1+maxNumChunksPerBatch+1)*32) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -239,10 +139,10 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than codecv2MaxNumChunks chunks, the rest + // if we have fewer than MaxNumChunksPerBatch chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < codecv2MaxNumChunks; chunkID++ { + for chunkID := len(chunks); chunkID < maxNumChunksPerBatch; chunkID++ { // use the last chunk's data hash as padding copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } @@ -285,7 +185,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+codecv2MaxNumChunks)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+maxNumChunksPerBatch)*32:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) @@ -396,154 +296,3 @@ func (d *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error } return true, nil } - -// EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func (d *DACodecV2) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { - return BlockContextByteSize, nil -} - -// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (d *DACodecV2) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return uint64(BlockContextByteSize * len(c.Blocks)), nil -} - -// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (d *DACodecV2) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { - var totalL1CommitCalldataSize uint64 - for _, chunk := range b.Chunks { - chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) - if err != nil { - return 0, err - } - totalL1CommitCalldataSize += chunkL1CommitCalldataSize - } - return totalL1CommitCalldataSize, nil -} - -// EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (d *DACodecV2) EstimateBlockL1CommitGas(b *Block) (uint64, error) { - var total uint64 - var numL1Messages uint64 - for _, txData := range b.Transactions { - if txData.Type == types.L1MessageTxType { - numL1Messages++ - continue - } - } - - total += CalldataNonZeroByteGas * BlockContextByteSize - - // sload - total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue - - // staticcall - total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue - total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - - total += getMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += getMemoryExpansionCost(36) * numL1Messages // delegatecall to impl - - return total, nil -} - -// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (d *DACodecV2) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - var totalNonSkippedL1Messages uint64 - var totalL1CommitGas uint64 - for _, block := range c.Blocks { - totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() - blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) - if err != nil { - return 0, err - } - totalL1CommitGas += blockL1CommitGas - } - - numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - - totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash - return totalL1CommitGas, nil -} - -// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (d *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { - var totalL1CommitGas uint64 - - // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata - - // adjusting gas: - // add 1 time cold sload (2100 gas) for L1MessageQueue - // add 1 time cold address access (2600 gas) for L1MessageQueue - // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) - totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata - - // adjust batch data hash gas cost - totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) - - totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore - - for _, chunk := range b.Chunks { - chunkL1CommitGas, err := d.EstimateChunkL1CommitGas(chunk) - if err != nil { - return 0, err - } - totalL1CommitGas += chunkL1CommitGas - - totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) - totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - - chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) - if err != nil { - return 0, err - } - totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) - } - - return totalL1CommitGas, nil -} - -// computeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (d *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - var dataBytes []byte - totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore - - for _, chunk := range chunks { - daChunk, err := d.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) - if err != nil { - return common.Hash{}, err - } - totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) - chunkHash, err := daChunk.Hash() - if err != nil { - return common.Hash{}, err - } - dataBytes = append(dataBytes, chunkHash.Bytes()...) - } - - dataHash := crypto.Keccak256Hash(dataBytes) - return dataHash, nil -} - -// JSONFromBytes for CodecV2 returns empty values. -func (c *DACodecV2) JSONFromBytes(data []byte) ([]byte, error) { - // DACodecV2 doesn't need this, so just return empty values - return nil, nil -} diff --git a/encoding/codecv3.go b/encoding/codecv3.go index a1e43e6..5e4b9ba 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -1,156 +1,27 @@ package encoding import ( - "crypto/sha256" "encoding/binary" - "encoding/hex" "encoding/json" "errors" "fmt" - "math" - "math/big" "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/scroll-tech/go-ethereum/log" - - "github.com/scroll-tech/da-codec/encoding/zstd" ) -type DACodecV3 struct{} - -// codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. -const codecv3MaxNumChunks = 45 +type DACodecV3 struct { + DACodecV2 +} // Version returns the codec version. func (d *DACodecV3) Version() CodecVersion { return CodecV3 } -// MaxNumChunksPerBatch returns the maximum number of chunks per batch. -func (d *DACodecV3) MaxNumChunksPerBatch() uint64 { - return codecv3MaxNumChunks -} - -// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (d *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - if !block.Header.Number.IsUint64() { - return nil, errors.New("block number is not uint64") - } - - // note: numL1Messages includes skipped messages - numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) - if numL1Messages > math.MaxUint16 { - return nil, errors.New("number of L1 messages exceeds max uint16") - } - - // note: numTransactions includes skipped messages - numL2Transactions := block.NumL2Transactions() - numTransactions := numL1Messages + numL2Transactions - if numTransactions > math.MaxUint16 { - return nil, errors.New("number of transactions exceeds max uint16") - } - - daBlock := newDABlockV0( - block.Header.Number.Uint64(), // number - block.Header.Time, // timestamp - block.Header.BaseFee, // baseFee - block.Header.GasLimit, // gasLimit - uint16(numTransactions), // numTransactions - uint16(numL1Messages), // numL1Messages - ) - - return daBlock, nil -} - -// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (d *DACodecV3) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - if chunk == nil { - return nil, errors.New("chunk is nil") - } - - if len(chunk.Blocks) == 0 { - return nil, errors.New("number of blocks is 0") - } - - if len(chunk.Blocks) > 255 { - return nil, errors.New("number of blocks exceeds 1 byte") - } - - var blocks []DABlock - var txs [][]*types.TransactionData - - for _, block := range chunk.Blocks { - b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - blocks = append(blocks, b) - totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) - txs = append(txs, block.Transactions) - } - - daChunk := newDAChunkV1( - blocks, // blocks - txs, // transactions - ) - - return daChunk, nil -} - -// DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. -// Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func (d *DACodecV3) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { - var chunks []*DAChunkRawTx - for _, chunk := range bytes { - if len(chunk) < 1 { - return nil, fmt.Errorf("invalid chunk, length is less than 1") - } - - numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) - } - - blocks := make([]DABlock, numBlocks) - for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + BlockContextByteSize - blocks[i] = &daBlockV0{} - err := blocks[i].Decode(chunk[startIdx:endIdx]) - if err != nil { - return nil, err - } - } - - var transactions []types.Transactions - - chunks = append(chunks, &DAChunkRawTx{ - Blocks: blocks, - Transactions: transactions, // Transactions field is still empty in the phase of DecodeDAChunksRawTx, because txs moved to blobs and filled in DecodeTxsFromBlob method. - }) - } - return chunks, nil -} - -// DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func (d *DACodecV3) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { - compressedBytes := BytesFromBlobCanonical(blob) - magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - - batchBytes, err := DecompressScrollBlobToBatch(append(magics, compressedBytes[:]...)) - if err != nil { - return err - } - return DecodeTxsFromBytes(batchBytes, chunks, codecv3MaxNumChunks) -} - // NewDABatch creates a DABatch from the provided Batch. func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > codecv3MaxNumChunks { + if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { return nil, errors.New("too many chunks in batch") } @@ -175,7 +46,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, int(d.MaxNumChunksPerBatch()), false /* no mock */) if err != nil { return nil, err } @@ -199,115 +70,6 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { ) } -// constructBlobPayload constructs the 4844 blob payload. -func (d *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { - // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + codecv3MaxNumChunks*4 - - // batchBytes represents the raw (un-compressed and un-padded) blob payload - batchBytes := make([]byte, metadataLength) - - // challenge digest preimage - // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+codecv3MaxNumChunks+1)*32) - - // the chunk data hash used for calculating the challenge preimage - var chunkDataHash common.Hash - - // blob metadata: num_chunks - binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) - - // encode blob metadata and L2 transactions, - // and simultaneously also build challenge preimage - for chunkID, chunk := range chunks { - currentChunkStartIndex := len(batchBytes) - - for _, block := range chunk.Blocks { - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { - continue - } - - // encode L2 txs into blob payload - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) - if err != nil { - return nil, common.Hash{}, nil, nil, err - } - batchBytes = append(batchBytes, rlpTxData...) - } - } - - // blob metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } - - // challenge: compute chunk data hash - chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) - } - - // if we have fewer than codecv3MaxNumChunks chunks, the rest - // of the blob metadata is correctly initialized to 0, - // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < codecv3MaxNumChunks; chunkID++ { - // use the last chunk's data hash as padding - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) - } - - // challenge: compute metadata hash - hash := crypto.Keccak256Hash(batchBytes[0:metadataLength]) - copy(challengePreimage[0:], hash[:]) - - // blobBytes represents the compressed blob payload (batchBytes) - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return nil, common.Hash{}, nil, nil, err - } - - // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if !useMockTxData && len(batchBytes) > 131072 { - // Check compressed data compatibility. - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, err - } - } - - if len(blobBytes) > 126976 { - log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") - } - - // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) - if err != nil { - return nil, common.Hash{}, nil, nil, err - } - - // compute blob versioned hash - c, err := kzg4844.BlobToCommitment(blob) - if err != nil { - return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") - } - blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) - - // challenge: append blob versioned hash - copy(challengePreimage[(1+codecv3MaxNumChunks)*32:], blobVersionedHash[:]) - - // compute z = challenge_digest % BLS_MODULUS - challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) - pointBytes := pointBigInt.Bytes() - - // the challenge point z - var z kzg4844.Point - start := 32 - len(pointBytes) - copy(z[start:], pointBytes) - - return blob, blobVersionedHash, &z, blobBytes, nil -} - // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { @@ -341,126 +103,6 @@ func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (d *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) - if err != nil { - return 0, 0, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return 0, 0, err - } - return uint64(len(batchBytes)), calculatePaddedBlobSize(uint64(len(blobBytes))), nil -} - -// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (d *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) - if err != nil { - return 0, 0, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return 0, 0, err - } - return uint64(len(batchBytes)), calculatePaddedBlobSize(uint64(len(blobBytes))), nil -} - -// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (d *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) - if err != nil { - return false, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if len(batchBytes) <= 131072 { - return true, nil - } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil -} - -// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (d *DACodecV3) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) - if err != nil { - return false, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if len(batchBytes) <= 131072 { - return true, nil - } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil -} - -// EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func (d *DACodecV3) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { - return BlockContextByteSize, nil -} - -// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (d *DACodecV3) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return uint64(BlockContextByteSize * len(c.Blocks)), nil -} - -// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (d *DACodecV3) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { - var totalL1CommitCalldataSize uint64 - for _, chunk := range b.Chunks { - chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) - if err != nil { - return 0, err - } - totalL1CommitCalldataSize += chunkL1CommitCalldataSize - } - return totalL1CommitCalldataSize, nil -} - -// EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (d *DACodecV3) EstimateBlockL1CommitGas(b *Block) (uint64, error) { - var total uint64 - var numL1Messages uint64 - for _, txData := range b.Transactions { - if txData.Type == types.L1MessageTxType { - numL1Messages++ - continue - } - } - - total += CalldataNonZeroByteGas * BlockContextByteSize - - // sload - total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue - - // staticcall - total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue - total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - - total += getMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += getMemoryExpansionCost(36) * numL1Messages // delegatecall to impl - - return total, nil -} - // estimateChunkL1CommitGasWithoutPointEvaluation calculates the total L1 commit gas without point-evaluation for this chunk approximately. func (d *DACodecV3) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 @@ -540,31 +182,6 @@ func (d *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { return totalL1CommitGas, nil } -// computeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (d *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - var dataBytes []byte - totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore - - for _, chunk := range chunks { - daChunk, err := d.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) - if err != nil { - return common.Hash{}, err - } - totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) - chunkHash, err := daChunk.Hash() - if err != nil { - return common.Hash{}, err - } - dataBytes = append(dataBytes, chunkHash.Bytes()...) - } - - dataHash := crypto.Keccak256Hash(dataBytes) - return dataHash, nil -} - // JSONFromBytes converts the bytes to a daBatchV2 and then marshals it to JSON. func (d *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { batch, err := d.NewDABatchFromBytes(data) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 6c50c0b..b0933f4 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -1,142 +1,26 @@ package encoding import ( - "crypto/sha256" "encoding/binary" - "encoding/hex" - "encoding/json" "errors" "fmt" - "math" - "math/big" "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/scroll-tech/go-ethereum/log" "github.com/scroll-tech/da-codec/encoding/zstd" ) type DACodecV4 struct { + DACodecV3 enableCompress bool } -// codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. -const codecv4MaxNumChunks = 45 - // Version returns the codec version. func (d *DACodecV4) Version() CodecVersion { return CodecV4 } -// MaxNumChunksPerBatch returns the maximum number of chunks per batch. -func (d *DACodecV4) MaxNumChunksPerBatch() uint64 { - return codecv4MaxNumChunks -} - -// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (d *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - if !block.Header.Number.IsUint64() { - return nil, errors.New("block number is not uint64") - } - - // note: numL1Messages includes skipped messages - numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) - if numL1Messages > math.MaxUint16 { - return nil, errors.New("number of L1 messages exceeds max uint16") - } - - // note: numTransactions includes skipped messages - numL2Transactions := block.NumL2Transactions() - numTransactions := numL1Messages + numL2Transactions - if numTransactions > math.MaxUint16 { - return nil, errors.New("number of transactions exceeds max uint16") - } - - daBlock := newDABlockV0( - block.Header.Number.Uint64(), // number - block.Header.Time, // timestamp - block.Header.BaseFee, // baseFee - block.Header.GasLimit, // gasLimit - uint16(numTransactions), // numTransactions - uint16(numL1Messages), // numL1Messages - ) - - return daBlock, nil -} - -// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (d *DACodecV4) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - if chunk == nil { - return nil, errors.New("chunk is nil") - } - - if len(chunk.Blocks) == 0 { - return nil, errors.New("number of blocks is 0") - } - - if len(chunk.Blocks) > 255 { - return nil, errors.New("number of blocks exceeds 1 byte") - } - - var blocks []DABlock - var txs [][]*types.TransactionData - - for _, block := range chunk.Blocks { - b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - blocks = append(blocks, b) - totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) - txs = append(txs, block.Transactions) - } - - daChunk := newDAChunkV1( - blocks, // blocks - txs, // transactions - ) - - return daChunk, nil -} - -// DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. -// Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func (d *DACodecV4) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { - var chunks []*DAChunkRawTx - for _, chunk := range bytes { - if len(chunk) < 1 { - return nil, fmt.Errorf("invalid chunk, length is less than 1") - } - - numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) - } - - blocks := make([]DABlock, numBlocks) - for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + BlockContextByteSize - blocks[i] = &daBlockV0{} - err := blocks[i].Decode(chunk[startIdx:endIdx]) - if err != nil { - return nil, err - } - } - - var transactions []types.Transactions - - chunks = append(chunks, &DAChunkRawTx{ - Blocks: blocks, - Transactions: transactions, // Transactions field is still empty in the phase of DecodeDAChunksRawTx, because txs moved to blobs and filled in DecodeTxsFromBlob method. - }) - } - return chunks, nil -} - // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks func (d *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { rawBytes := BytesFromBlobCanonical(blob) @@ -148,16 +32,16 @@ func (d *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx if err != nil { return err } - return DecodeTxsFromBytes(batchBytes, chunks, codecv4MaxNumChunks) + return DecodeTxsFromBytes(batchBytes, chunks, int(d.MaxNumChunksPerBatch())) } else { - return DecodeTxsFromBytes(rawBytes[1:], chunks, codecv4MaxNumChunks) + return DecodeTxsFromBytes(rawBytes[1:], chunks, int(d.MaxNumChunksPerBatch())) } } // NewDABatch creates a DABatch from the provided Batch. func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > codecv4MaxNumChunks { + if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { return nil, errors.New("too many chunks in batch") } @@ -182,7 +66,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, int(d.MaxNumChunksPerBatch()), false /* no mock */) if err != nil { return nil, err } @@ -211,120 +95,6 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { ) } -// constructBlobPayload constructs the 4844 blob payload. -func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { - // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + codecv4MaxNumChunks*4 - - // batchBytes represents the raw (un-compressed and un-padded) blob payload - batchBytes := make([]byte, metadataLength) - - // challenge digest preimage - // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+codecv4MaxNumChunks+1)*32) - - // the chunk data hash used for calculating the challenge preimage - var chunkDataHash common.Hash - - // blob metadata: num_chunks - binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) - - // encode blob metadata and L2 transactions, - // and simultaneously also build challenge preimage - for chunkID, chunk := range chunks { - currentChunkStartIndex := len(batchBytes) - - for _, block := range chunk.Blocks { - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { - continue - } - - // encode L2 txs into blob payload - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) - if err != nil { - return nil, common.Hash{}, nil, nil, err - } - batchBytes = append(batchBytes, rlpTxData...) - } - } - - // blob metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } - - // challenge: compute chunk data hash - chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) - } - - // if we have fewer than codecv4MaxNumChunks chunks, the rest - // of the blob metadata is correctly initialized to 0, - // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < codecv4MaxNumChunks; chunkID++ { - // use the last chunk's data hash as padding - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) - } - - // challenge: compute metadata hash - hash := crypto.Keccak256Hash(batchBytes[0:metadataLength]) - copy(challengePreimage[0:], hash[:]) - - var blobBytes []byte - if d.enableCompress { - // blobBytes represents the compressed blob payload (batchBytes) - var err error - blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return nil, common.Hash{}, nil, nil, err - } - if !useMockTxData { - // Check compressed data compatibility. - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, err - } - } - blobBytes = append([]byte{1}, blobBytes...) - } else { - blobBytes = append([]byte{0}, batchBytes...) - } - - if len(blobBytes) > 126976 { - log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") - } - - // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) - if err != nil { - return nil, common.Hash{}, nil, nil, err - } - - // compute blob versioned hash - c, err := kzg4844.BlobToCommitment(blob) - if err != nil { - return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") - } - blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) - - // challenge: append blob versioned hash - copy(challengePreimage[(1+codecv4MaxNumChunks)*32:], blobVersionedHash[:]) - - // compute z = challenge_digest % BLS_MODULUS - challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) - pointBytes := pointBigInt.Bytes() - - // the challenge point z - var z kzg4844.Point - start := 32 - len(pointBytes) - copy(z[start:], pointBytes) - - return blob, blobVersionedHash, &z, blobBytes, nil -} - // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { @@ -403,208 +173,3 @@ func (d *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, } return uint64(len(batchBytes)), calculatePaddedBlobSize(blobBytesLength), nil } - -// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (d *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) - if err != nil { - return false, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil -} - -// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (d *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) - if err != nil { - return false, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil -} - -// EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func (d *DACodecV4) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { - return BlockContextByteSize, nil -} - -// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (d *DACodecV4) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return uint64(BlockContextByteSize * len(c.Blocks)), nil -} - -// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (d *DACodecV4) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { - var totalL1CommitCalldataSize uint64 - for _, chunk := range b.Chunks { - chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) - if err != nil { - return 0, err - } - totalL1CommitCalldataSize += chunkL1CommitCalldataSize - } - return totalL1CommitCalldataSize, nil -} - -// EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (d *DACodecV4) EstimateBlockL1CommitGas(b *Block) (uint64, error) { - var total uint64 - var numL1Messages uint64 - for _, txData := range b.Transactions { - if txData.Type == types.L1MessageTxType { - numL1Messages++ - continue - } - } - - total += CalldataNonZeroByteGas * BlockContextByteSize - - // sload - total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue - - // staticcall - total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue - total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - - total += getMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += getMemoryExpansionCost(36) * numL1Messages // delegatecall to impl - - return total, nil -} - -// estimateChunkL1CommitGasWithoutPointEvaluation calculates the total L1 commit gas without point-evaluation for this chunk approximately. -func (d *DACodecV4) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (uint64, error) { - var totalNonSkippedL1Messages uint64 - var totalL1CommitGas uint64 - for _, block := range c.Blocks { - totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() - blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) - if err != nil { - return 0, err - } - totalL1CommitGas += blockL1CommitGas - } - - numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash - - return totalL1CommitGas, nil -} - -// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (d *DACodecV4) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - totalL1CommitGas, err := d.estimateChunkL1CommitGasWithoutPointEvaluation(c) - if err != nil { - return 0, err - } - totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. - return totalL1CommitGas, nil -} - -// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (d *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { - var totalL1CommitGas uint64 - - // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata - - // adjusting gas: - // add 1 time cold sload (2100 gas) for L1MessageQueue - // add 1 time cold address access (2600 gas) for L1MessageQueue - // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) - totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata - - // adjust batch data hash gas cost - totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) - - totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore - - for _, chunk := range b.Chunks { - chunkL1CommitGas, err := d.estimateChunkL1CommitGasWithoutPointEvaluation(chunk) - if err != nil { - return 0, err - } - totalL1CommitGas += chunkL1CommitGas - - totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) - totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - - chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) - if err != nil { - return 0, err - } - totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) - } - - totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. - return totalL1CommitGas, nil -} - -// computeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (d *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - var dataBytes []byte - totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore - - for _, chunk := range chunks { - daChunk, err := d.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) - if err != nil { - return common.Hash{}, err - } - totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) - chunkHash, err := daChunk.Hash() - if err != nil { - return common.Hash{}, err - } - dataBytes = append(dataBytes, chunkHash.Bytes()...) - } - - dataHash := crypto.Keccak256Hash(dataBytes) - return dataHash, nil -} - -// JSONFromBytes converts the bytes to a daBatchV2 and then marshals it to JSON. -func (d *DACodecV4) JSONFromBytes(data []byte) ([]byte, error) { - batch, err := d.NewDABatchFromBytes(data) - if err != nil { - return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) - } - - jsonBytes, err := json.Marshal(batch) - if err != nil { - return nil, fmt.Errorf("failed to marshal DABatch to JSON: %w", err) - } - - return jsonBytes, nil -} From 7891503ea0e1b45e14a9426a0816873a510e10bb Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sat, 12 Oct 2024 03:39:30 +0800 Subject: [PATCH 073/126] remove dablock.go dachunk.go dabatch.go --- encoding/codecv0_types.go | 282 ++++++++++++++++++++++ encoding/codecv1_types.go | 202 ++++++++++++++++ encoding/{dabatch.go => codecv3_types.go} | 204 ---------------- encoding/dablock.go | 89 ------- encoding/dachunk.go | 192 --------------- encoding/interfaces.go | 1 - 6 files changed, 484 insertions(+), 486 deletions(-) create mode 100644 encoding/codecv0_types.go create mode 100644 encoding/codecv1_types.go rename encoding/{dabatch.go => codecv3_types.go} (52%) delete mode 100644 encoding/dablock.go delete mode 100644 encoding/dachunk.go diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go new file mode 100644 index 0000000..e80c5d6 --- /dev/null +++ b/encoding/codecv0_types.go @@ -0,0 +1,282 @@ +package encoding + +import ( + "encoding/binary" + "encoding/hex" + "errors" + "fmt" + "math/big" + "strings" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +// daBlockV0 represents a Data Availability Block. +type daBlockV0 struct { + number uint64 + timestamp uint64 + baseFee *big.Int + gasLimit uint64 + numTransactions uint16 + numL1Messages uint16 +} + +// newDABlockV0 is a constructor function for daBlockV0 that initializes the internal fields. +func newDABlockV0(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *daBlockV0 { + return &daBlockV0{ + number: number, + timestamp: timestamp, + baseFee: baseFee, + gasLimit: gasLimit, + numTransactions: numTransactions, + numL1Messages: numL1Messages, + } +} + +// Encode serializes the DABlock into a slice of bytes. +func (b *daBlockV0) Encode() []byte { + bytes := make([]byte, BlockContextByteSize) + binary.BigEndian.PutUint64(bytes[0:], b.number) + binary.BigEndian.PutUint64(bytes[8:], b.timestamp) + if b.baseFee != nil { + binary.BigEndian.PutUint64(bytes[40:], b.baseFee.Uint64()) + } + binary.BigEndian.PutUint64(bytes[48:], b.gasLimit) + binary.BigEndian.PutUint16(bytes[56:], b.numTransactions) + binary.BigEndian.PutUint16(bytes[58:], b.numL1Messages) + return bytes +} + +// Decode populates the fields of a DABlock from a byte slice. +func (b *daBlockV0) Decode(bytes []byte) error { + if len(bytes) != BlockContextByteSize { + return errors.New("block encoding is not BlockContextByteSize bytes long") + } + + b.number = binary.BigEndian.Uint64(bytes[0:8]) + b.timestamp = binary.BigEndian.Uint64(bytes[8:16]) + b.baseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) + b.gasLimit = binary.BigEndian.Uint64(bytes[48:56]) + b.numTransactions = binary.BigEndian.Uint16(bytes[56:58]) + b.numL1Messages = binary.BigEndian.Uint16(bytes[58:60]) + + return nil +} + +// Number returns the block number. +func (b *daBlockV0) Number() uint64 { + return b.number +} + +// Timestamp returns the block timestamp. +func (b *daBlockV0) Timestamp() uint64 { + return b.timestamp +} + +// BaseFee returns the block base fee. +func (b *daBlockV0) BaseFee() *big.Int { + return b.baseFee +} + +// GasLimit returns the block gas limit. +func (b *daBlockV0) GasLimit() uint64 { + return b.gasLimit +} + +// NumTransactions returns the number of transactions in the block. +func (b *daBlockV0) NumTransactions() uint16 { + return b.numTransactions +} + +// NumL1Messages returns the number of L1 messages in the block. +func (b *daBlockV0) NumL1Messages() uint16 { + return b.numL1Messages +} + +// DAChunkRawTx groups consecutive DABlocks with their L2 transactions, L1 msgs are loaded in another place. +type DAChunkRawTx struct { + Blocks []DABlock + Transactions []types.Transactions +} + +// daChunkV0 groups consecutive DABlocks with their transactions. +type daChunkV0 struct { + blocks []DABlock + transactions [][]*types.TransactionData +} + +// newDAChunkV0 is a constructor for daChunkV0, initializing with blocks and transactions. +func newDAChunkV0(blocks []DABlock, transactions [][]*types.TransactionData) *daChunkV0 { + return &daChunkV0{ + blocks: blocks, + transactions: transactions, + } +} + +// Encode serializes the DAChunk into a slice of bytes. +func (c *daChunkV0) Encode() ([]byte, error) { + if len(c.blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(c.blocks) > 255 { + return nil, errors.New("number of blocks exceeds 1 byte") + } + + var chunkBytes []byte + chunkBytes = append(chunkBytes, byte(len(c.blocks))) + + var l2TxDataBytes []byte + + for _, block := range c.blocks { + chunkBytes = append(chunkBytes, block.Encode()...) + } + + for _, blockTxs := range c.transactions { + for _, txData := range blockTxs { + if txData.Type == types.L1MessageTxType { + continue + } + + var txLen [4]byte + rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) + if err != nil { + return nil, err + } + binary.BigEndian.PutUint32(txLen[:], uint32(len(rlpTxData))) + l2TxDataBytes = append(l2TxDataBytes, txLen[:]...) + l2TxDataBytes = append(l2TxDataBytes, rlpTxData...) + } + } + + chunkBytes = append(chunkBytes, l2TxDataBytes...) + return chunkBytes, nil +} + +// Hash computes the hash of the DAChunk data. +func (c *daChunkV0) Hash() (common.Hash, error) { + chunkBytes, err := c.Encode() + if err != nil { + return common.Hash{}, err + } + + if len(chunkBytes) == 0 { + return common.Hash{}, errors.New("chunk data is empty and cannot be processed") + } + numBlocks := chunkBytes[0] + + // concatenate block contexts + var dataBytes []byte + for i := 0; i < int(numBlocks); i++ { + // only the first 58 bytes of each BlockContext are needed for the hashing process + dataBytes = append(dataBytes, chunkBytes[1+60*i:60*i+59]...) + } + + // concatenate l1 and l2 tx hashes + for _, blockTxs := range c.transactions { + var l1TxHashes []byte + var l2TxHashes []byte + for _, txData := range blockTxs { + txHash := strings.TrimPrefix(txData.TxHash, "0x") + hashBytes, err := hex.DecodeString(txHash) + if err != nil { + return common.Hash{}, fmt.Errorf("failed to decode tx hash from TransactionData: hash=%v, err=%w", txData.TxHash, err) + } + if txData.Type == types.L1MessageTxType { + l1TxHashes = append(l1TxHashes, hashBytes...) + } else { + l2TxHashes = append(l2TxHashes, hashBytes...) + } + } + dataBytes = append(dataBytes, l1TxHashes...) + dataBytes = append(dataBytes, l2TxHashes...) + } + + hash := crypto.Keccak256Hash(dataBytes) + return hash, nil +} + +// BlockRange returns the block range of the DAChunk. +func (c *daChunkV0) BlockRange() (uint64, uint64, error) { + if len(c.blocks) == 0 { + return 0, 0, errors.New("number of blocks is 0") + } + + return c.blocks[0].Number(), c.blocks[len(c.blocks)-1].Number(), nil +} + +// daBatchV0 contains metadata about a batch of DAChunks. +type daBatchV0 struct { + version uint8 + batchIndex uint64 + l1MessagePopped uint64 + totalL1MessagePopped uint64 + dataHash common.Hash + parentBatchHash common.Hash + skippedL1MessageBitmap []byte +} + +// newDABatchV0 is a constructor for daBatchV0. +func newDABatchV0(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash common.Hash, skippedL1MessageBitmap []byte) *daBatchV0 { + return &daBatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + } +} + +// Encode serializes the DABatch into bytes. +func (b *daBatchV0) Encode() []byte { + batchBytes := make([]byte, 89+len(b.skippedL1MessageBitmap)) + batchBytes[0] = b.version + binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped) + copy(batchBytes[25:], b.dataHash[:]) + copy(batchBytes[57:], b.parentBatchHash[:]) + copy(batchBytes[89:], b.skippedL1MessageBitmap[:]) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *daBatchV0) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// Blob returns the blob of the batch. +func (b *daBatchV0) Blob() *kzg4844.Blob { + return nil +} + +// BlobBytes returns the blob bytes of the batch. +func (b *daBatchV0) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *daBatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { + return nil, nil +} + +// Version returns the version of the DABatch. +func (b *daBatchV0) Version() uint8 { + return b.version +} + +// SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. +func (b *daBatchV0) SkippedL1MessageBitmap() []byte { + return b.skippedL1MessageBitmap +} + +// DataHash returns the data hash of the DABatch. +func (b *daBatchV0) DataHash() common.Hash { + return b.dataHash +} diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go new file mode 100644 index 0000000..39347a3 --- /dev/null +++ b/encoding/codecv1_types.go @@ -0,0 +1,202 @@ +package encoding + +import ( + "encoding/binary" + "encoding/hex" + "errors" + "fmt" + "strings" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +// daChunkV1 groups consecutive DABlocks with their transactions. +type daChunkV1 daChunkV0 + +// newDAChunkV1 is a constructor for daChunkV1, initializing with blocks and transactions. +func newDAChunkV1(blocks []DABlock, transactions [][]*types.TransactionData) *daChunkV1 { + return &daChunkV1{ + blocks: blocks, + transactions: transactions, + } +} + +// Encode serializes the DAChunk into a slice of bytes. +func (c *daChunkV1) Encode() ([]byte, error) { + var chunkBytes []byte + chunkBytes = append(chunkBytes, byte(len(c.blocks))) + + for _, block := range c.blocks { + blockBytes := block.Encode() + chunkBytes = append(chunkBytes, blockBytes...) + } + + return chunkBytes, nil +} + +// Hash computes the hash of the DAChunk data. +func (c *daChunkV1) Hash() (common.Hash, error) { + var dataBytes []byte + + // concatenate block contexts + for _, block := range c.blocks { + encodedBlock := block.Encode() + // only the first 58 bytes are used in the hashing process + dataBytes = append(dataBytes, encodedBlock[:58]...) + } + + // concatenate l1 tx hashes + for _, blockTxs := range c.transactions { + for _, txData := range blockTxs { + if txData.Type != types.L1MessageTxType { + continue + } + + txHash := strings.TrimPrefix(txData.TxHash, "0x") + hashBytes, err := hex.DecodeString(txHash) + if err != nil { + return common.Hash{}, err + } + if len(hashBytes) != 32 { + return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) + } + dataBytes = append(dataBytes, hashBytes...) + } + } + + hash := crypto.Keccak256Hash(dataBytes) + return hash, nil +} + +// BlockRange returns the block range of the DAChunk. +func (c *daChunkV1) BlockRange() (uint64, uint64, error) { + if len(c.blocks) == 0 { + return 0, 0, errors.New("number of blocks is 0") + } + + return c.blocks[0].Number(), c.blocks[len(c.blocks)-1].Number(), nil +} + +// daBatchV1 contains metadata about a batch of DAChunks. +type daBatchV1 struct { + daBatchV0 + + blobVersionedHash common.Hash + blob *kzg4844.Blob + z *kzg4844.Point +} + +// newDABatchV1 is a constructor for daBatchV1. +func newDABatchV1(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point) *daBatchV1 { + return &daBatchV1{ + daBatchV0: daBatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + }, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } +} + +// Encode serializes the DABatch into bytes. +func (b *daBatchV1) Encode() []byte { + batchBytes := make([]byte, 121+len(b.skippedL1MessageBitmap)) + batchBytes[0] = b.version + binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped) + copy(batchBytes[25:], b.dataHash[:]) + copy(batchBytes[57:], b.blobVersionedHash[:]) + copy(batchBytes[89:], b.parentBatchHash[:]) + copy(batchBytes[121:], b.skippedL1MessageBitmap[:]) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *daBatchV1) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// BlobDataProof computes the abi-encoded blob verification data. +func (b *daBatchV1) BlobDataProof() ([]byte, error) { + if b.blob == nil { + return nil, errors.New("called BlobDataProof with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProof with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, errors.New("failed to create blob commitment") + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + return BlobDataProofFromValues(*b.z, y, commitment, proof), nil +} + +// Blob returns the blob of the batch. +func (b *daBatchV1) Blob() *kzg4844.Blob { + return b.blob +} + +// BlobVersionedHashes returns the blob versioned hashes of the batch. +func (b *daBatchV1) BlobVersionedHashes() []common.Hash { + return []common.Hash{b.blobVersionedHash} +} + +// BlobBytes returns the blob bytes of the batch. +func (b *daBatchV1) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *daBatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { + if b.blob == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, errors.New("failed to create blob commitment") + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + return BlobDataProofFromValues(*b.z, y, commitment, proof), nil +} + +// Version returns the version of the DABatch. +func (b *daBatchV1) Version() uint8 { + return b.version +} + +// SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. +func (b *daBatchV1) SkippedL1MessageBitmap() []byte { + return b.skippedL1MessageBitmap +} + +// DataHash returns the data hash of the DABatch. +func (b *daBatchV1) DataHash() common.Hash { + return b.dataHash +} diff --git a/encoding/dabatch.go b/encoding/codecv3_types.go similarity index 52% rename from encoding/dabatch.go rename to encoding/codecv3_types.go index 8c26f9c..d9d4e2e 100644 --- a/encoding/dabatch.go +++ b/encoding/codecv3_types.go @@ -12,205 +12,6 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) -// daBatchV0 contains metadata about a batch of DAChunks. -type daBatchV0 struct { - version uint8 - batchIndex uint64 - l1MessagePopped uint64 - totalL1MessagePopped uint64 - dataHash common.Hash - parentBatchHash common.Hash - skippedL1MessageBitmap []byte -} - -// newDABatchV0 is a constructor for daBatchV0. -func newDABatchV0(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash common.Hash, skippedL1MessageBitmap []byte) *daBatchV0 { - return &daBatchV0{ - version: version, - batchIndex: batchIndex, - l1MessagePopped: l1MessagePopped, - totalL1MessagePopped: totalL1MessagePopped, - dataHash: dataHash, - parentBatchHash: parentBatchHash, - skippedL1MessageBitmap: skippedL1MessageBitmap, - } -} - -// Encode serializes the DABatch into bytes. -func (b *daBatchV0) Encode() []byte { - batchBytes := make([]byte, 89+len(b.skippedL1MessageBitmap)) - batchBytes[0] = b.version - binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped) - copy(batchBytes[25:], b.dataHash[:]) - copy(batchBytes[57:], b.parentBatchHash[:]) - copy(batchBytes[89:], b.skippedL1MessageBitmap[:]) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *daBatchV0) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// Blob returns the blob of the batch. -func (b *daBatchV0) Blob() *kzg4844.Blob { - return nil -} - -// BlobVersionedHashes returns the blob versioned hashes of the batch. -func (b *daBatchV0) BlobVersionedHashes() []common.Hash { - return nil -} - -// BlobBytes returns the blob bytes of the batch. -func (b *daBatchV0) BlobBytes() []byte { - return nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *daBatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { - return nil, nil -} - -// Version returns the version of the DABatch. -func (b *daBatchV0) Version() uint8 { - return b.version -} - -// SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. -func (b *daBatchV0) SkippedL1MessageBitmap() []byte { - return b.skippedL1MessageBitmap -} - -// DataHash returns the data hash of the DABatch. -func (b *daBatchV0) DataHash() common.Hash { - return b.dataHash -} - -// daBatchV1 contains metadata about a batch of DAChunks. -type daBatchV1 struct { - daBatchV0 - - blobVersionedHash common.Hash - blob *kzg4844.Blob - z *kzg4844.Point -} - -// newDABatchV1 is a constructor for daBatchV1. -func newDABatchV1(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point) *daBatchV1 { - return &daBatchV1{ - daBatchV0: daBatchV0{ - version: version, - batchIndex: batchIndex, - l1MessagePopped: l1MessagePopped, - totalL1MessagePopped: totalL1MessagePopped, - dataHash: dataHash, - parentBatchHash: parentBatchHash, - skippedL1MessageBitmap: skippedL1MessageBitmap, - }, - blobVersionedHash: blobVersionedHash, - blob: blob, - z: z, - } -} - -// Encode serializes the DABatch into bytes. -func (b *daBatchV1) Encode() []byte { - batchBytes := make([]byte, 121+len(b.skippedL1MessageBitmap)) - batchBytes[0] = b.version - binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped) - copy(batchBytes[25:], b.dataHash[:]) - copy(batchBytes[57:], b.blobVersionedHash[:]) - copy(batchBytes[89:], b.parentBatchHash[:]) - copy(batchBytes[121:], b.skippedL1MessageBitmap[:]) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *daBatchV1) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// BlobDataProof computes the abi-encoded blob verification data. -func (b *daBatchV1) BlobDataProof() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProof with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProof with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - return BlobDataProofFromValues(*b.z, y, commitment, proof), nil -} - -// Blob returns the blob of the batch. -func (b *daBatchV1) Blob() *kzg4844.Blob { - return b.blob -} - -// BlobVersionedHashes returns the blob versioned hashes of the batch. -func (b *daBatchV1) BlobVersionedHashes() []common.Hash { - return []common.Hash{b.blobVersionedHash} -} - -// BlobBytes returns the blob bytes of the batch. -func (b *daBatchV1) BlobBytes() []byte { - return nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *daBatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - return BlobDataProofFromValues(*b.z, y, commitment, proof), nil -} - -// Version returns the version of the DABatch. -func (b *daBatchV1) Version() uint8 { - return b.version -} - -// SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. -func (b *daBatchV1) SkippedL1MessageBitmap() []byte { - return b.skippedL1MessageBitmap -} - -// DataHash returns the data hash of the DABatch. -func (b *daBatchV1) DataHash() common.Hash { - return b.dataHash -} - // daBatchV2 contains metadata about a batch of DAChunks. type daBatchV2 struct { daBatchV0 @@ -358,11 +159,6 @@ func (b *daBatchV2) Blob() *kzg4844.Blob { return b.blob } -// BlobVersionedHashes returns the blob versioned hashes of the batch. -func (b *daBatchV2) BlobVersionedHashes() []common.Hash { - return []common.Hash{b.blobVersionedHash} -} - // BlobBytes returns the blob bytes of the batch. func (b *daBatchV2) BlobBytes() []byte { return b.blobBytes diff --git a/encoding/dablock.go b/encoding/dablock.go deleted file mode 100644 index 0918fed..0000000 --- a/encoding/dablock.go +++ /dev/null @@ -1,89 +0,0 @@ -package encoding - -import ( - "encoding/binary" - "errors" - "math/big" -) - -// daBlockV0 represents a Data Availability Block. -type daBlockV0 struct { - number uint64 - timestamp uint64 - baseFee *big.Int - gasLimit uint64 - numTransactions uint16 - numL1Messages uint16 -} - -// newDABlockV0 is a constructor function for daBlockV0 that initializes the internal fields. -func newDABlockV0(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *daBlockV0 { - return &daBlockV0{ - number: number, - timestamp: timestamp, - baseFee: baseFee, - gasLimit: gasLimit, - numTransactions: numTransactions, - numL1Messages: numL1Messages, - } -} - -// Encode serializes the DABlock into a slice of bytes. -func (b *daBlockV0) Encode() []byte { - bytes := make([]byte, BlockContextByteSize) - binary.BigEndian.PutUint64(bytes[0:], b.number) - binary.BigEndian.PutUint64(bytes[8:], b.timestamp) - if b.baseFee != nil { - binary.BigEndian.PutUint64(bytes[40:], b.baseFee.Uint64()) - } - binary.BigEndian.PutUint64(bytes[48:], b.gasLimit) - binary.BigEndian.PutUint16(bytes[56:], b.numTransactions) - binary.BigEndian.PutUint16(bytes[58:], b.numL1Messages) - return bytes -} - -// Decode populates the fields of a DABlock from a byte slice. -func (b *daBlockV0) Decode(bytes []byte) error { - if len(bytes) != BlockContextByteSize { - return errors.New("block encoding is not BlockContextByteSize bytes long") - } - - b.number = binary.BigEndian.Uint64(bytes[0:8]) - b.timestamp = binary.BigEndian.Uint64(bytes[8:16]) - b.baseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) - b.gasLimit = binary.BigEndian.Uint64(bytes[48:56]) - b.numTransactions = binary.BigEndian.Uint16(bytes[56:58]) - b.numL1Messages = binary.BigEndian.Uint16(bytes[58:60]) - - return nil -} - -// Number returns the block number. -func (b *daBlockV0) Number() uint64 { - return b.number -} - -// Timestamp returns the block timestamp. -func (b *daBlockV0) Timestamp() uint64 { - return b.timestamp -} - -// BaseFee returns the block base fee. -func (b *daBlockV0) BaseFee() *big.Int { - return b.baseFee -} - -// GasLimit returns the block gas limit. -func (b *daBlockV0) GasLimit() uint64 { - return b.gasLimit -} - -// NumTransactions returns the number of transactions in the block. -func (b *daBlockV0) NumTransactions() uint16 { - return b.numTransactions -} - -// NumL1Messages returns the number of L1 messages in the block. -func (b *daBlockV0) NumL1Messages() uint16 { - return b.numL1Messages -} diff --git a/encoding/dachunk.go b/encoding/dachunk.go deleted file mode 100644 index 1e4d98e..0000000 --- a/encoding/dachunk.go +++ /dev/null @@ -1,192 +0,0 @@ -package encoding - -import ( - "encoding/binary" - "encoding/hex" - "errors" - "fmt" - "strings" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" -) - -// daChunkV0 groups consecutive DABlocks with their transactions. -type daChunkV0 struct { - blocks []DABlock - transactions [][]*types.TransactionData -} - -// newDAChunkV0 is a constructor for daChunkV0, initializing with blocks and transactions. -func newDAChunkV0(blocks []DABlock, transactions [][]*types.TransactionData) *daChunkV0 { - return &daChunkV0{ - blocks: blocks, - transactions: transactions, - } -} - -// Encode serializes the DAChunk into a slice of bytes. -func (c *daChunkV0) Encode() ([]byte, error) { - if len(c.blocks) == 0 { - return nil, errors.New("number of blocks is 0") - } - - if len(c.blocks) > 255 { - return nil, errors.New("number of blocks exceeds 1 byte") - } - - var chunkBytes []byte - chunkBytes = append(chunkBytes, byte(len(c.blocks))) - - var l2TxDataBytes []byte - - for _, block := range c.blocks { - chunkBytes = append(chunkBytes, block.Encode()...) - } - - for _, blockTxs := range c.transactions { - for _, txData := range blockTxs { - if txData.Type == types.L1MessageTxType { - continue - } - - var txLen [4]byte - rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) - if err != nil { - return nil, err - } - binary.BigEndian.PutUint32(txLen[:], uint32(len(rlpTxData))) - l2TxDataBytes = append(l2TxDataBytes, txLen[:]...) - l2TxDataBytes = append(l2TxDataBytes, rlpTxData...) - } - } - - chunkBytes = append(chunkBytes, l2TxDataBytes...) - return chunkBytes, nil -} - -// Hash computes the hash of the DAChunk data. -func (c *daChunkV0) Hash() (common.Hash, error) { - chunkBytes, err := c.Encode() - if err != nil { - return common.Hash{}, err - } - - if len(chunkBytes) == 0 { - return common.Hash{}, errors.New("chunk data is empty and cannot be processed") - } - numBlocks := chunkBytes[0] - - // concatenate block contexts - var dataBytes []byte - for i := 0; i < int(numBlocks); i++ { - // only the first 58 bytes of each BlockContext are needed for the hashing process - dataBytes = append(dataBytes, chunkBytes[1+60*i:60*i+59]...) - } - - // concatenate l1 and l2 tx hashes - for _, blockTxs := range c.transactions { - var l1TxHashes []byte - var l2TxHashes []byte - for _, txData := range blockTxs { - txHash := strings.TrimPrefix(txData.TxHash, "0x") - hashBytes, err := hex.DecodeString(txHash) - if err != nil { - return common.Hash{}, fmt.Errorf("failed to decode tx hash from TransactionData: hash=%v, err=%w", txData.TxHash, err) - } - if txData.Type == types.L1MessageTxType { - l1TxHashes = append(l1TxHashes, hashBytes...) - } else { - l2TxHashes = append(l2TxHashes, hashBytes...) - } - } - dataBytes = append(dataBytes, l1TxHashes...) - dataBytes = append(dataBytes, l2TxHashes...) - } - - hash := crypto.Keccak256Hash(dataBytes) - return hash, nil -} - -// BlockRange returns the block range of the DAChunk. -func (c *daChunkV0) BlockRange() (uint64, uint64, error) { - if len(c.blocks) == 0 { - return 0, 0, errors.New("number of blocks is 0") - } - - return c.blocks[0].Number(), c.blocks[len(c.blocks)-1].Number(), nil -} - -// daChunkV1 groups consecutive DABlocks with their transactions. -type daChunkV1 daChunkV0 - -// newDAChunkV1 is a constructor for daChunkV1, initializing with blocks and transactions. -func newDAChunkV1(blocks []DABlock, transactions [][]*types.TransactionData) *daChunkV1 { - return &daChunkV1{ - blocks: blocks, - transactions: transactions, - } -} - -// Encode serializes the DAChunk into a slice of bytes. -func (c *daChunkV1) Encode() ([]byte, error) { - var chunkBytes []byte - chunkBytes = append(chunkBytes, byte(len(c.blocks))) - - for _, block := range c.blocks { - blockBytes := block.Encode() - chunkBytes = append(chunkBytes, blockBytes...) - } - - return chunkBytes, nil -} - -// Hash computes the hash of the DAChunk data. -func (c *daChunkV1) Hash() (common.Hash, error) { - var dataBytes []byte - - // concatenate block contexts - for _, block := range c.blocks { - encodedBlock := block.Encode() - // only the first 58 bytes are used in the hashing process - dataBytes = append(dataBytes, encodedBlock[:58]...) - } - - // concatenate l1 tx hashes - for _, blockTxs := range c.transactions { - for _, txData := range blockTxs { - if txData.Type != types.L1MessageTxType { - continue - } - - txHash := strings.TrimPrefix(txData.TxHash, "0x") - hashBytes, err := hex.DecodeString(txHash) - if err != nil { - return common.Hash{}, err - } - if len(hashBytes) != 32 { - return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) - } - dataBytes = append(dataBytes, hashBytes...) - } - } - - hash := crypto.Keccak256Hash(dataBytes) - return hash, nil -} - -// BlockRange returns the block range of the DAChunk. -func (c *daChunkV1) BlockRange() (uint64, uint64, error) { - if len(c.blocks) == 0 { - return 0, 0, errors.New("number of blocks is 0") - } - - return c.blocks[0].Number(), c.blocks[len(c.blocks)-1].Number(), nil -} - -// DAChunkRawTx groups consecutive DABlocks with their L2 transactions, L1 msgs are loaded in another place. -type DAChunkRawTx struct { - Blocks []DABlock - Transactions []types.Transactions -} diff --git a/encoding/interfaces.go b/encoding/interfaces.go index a9c98f8..d2436a5 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -33,7 +33,6 @@ type DABatch interface { BlobDataProofForPointEvaluation() ([]byte, error) Blob() *kzg4844.Blob BlobBytes() []byte - BlobVersionedHashes() []common.Hash Version() uint8 SkippedL1MessageBitmap() []byte } From 2703f226fb0b86527ef79622be3e9bcef240e726 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 14 Oct 2024 23:29:13 +0800 Subject: [PATCH 074/126] trigger ci From 443f1268c21af013007db50d7952d4cda1159ac8 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 15 Oct 2024 00:12:03 +0800 Subject: [PATCH 075/126] tweak --- encoding/codecv3.go | 2 +- encoding/codecv3_types.go | 21 +++++++-------------- encoding/codecv4.go | 2 +- 3 files changed, 9 insertions(+), 16 deletions(-) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 5e4b9ba..2a936dc 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -81,7 +81,7 @@ func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV3) } - b := NewDABatchV2WithProof( + b := newDABatchV2WithProof( data[0], // Version binary.BigEndian.Uint64(data[1:9]), // BatchIndex binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped diff --git a/encoding/codecv3_types.go b/encoding/codecv3_types.go index d9d4e2e..4b794aa 100644 --- a/encoding/codecv3_types.go +++ b/encoding/codecv3_types.go @@ -25,12 +25,9 @@ type daBatchV2 struct { } // newDABatchV2 is a constructor for daBatchV2 that calls blobDataProofForPICircuit internally. -func newDABatchV2(version uint8, - batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, - dataHash, parentBatchHash, blobVersionedHash common.Hash, - skippedL1MessageBitmap []byte, - blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, -) (*daBatchV2, error) { +func newDABatchV2(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, + dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, + z *kzg4844.Point, blobBytes []byte) (*daBatchV2, error) { daBatch := &daBatchV2{ daBatchV0: daBatchV0{ version: version, @@ -58,14 +55,10 @@ func newDABatchV2(version uint8, return daBatch, nil } -// NewDABatchV2WithProof is a constructor for daBatchV2 that allows directly passing blobDataProof. -func NewDABatchV2WithProof(version uint8, - batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, - dataHash, parentBatchHash, blobVersionedHash common.Hash, - skippedL1MessageBitmap []byte, - blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, - blobDataProof [2]common.Hash, // Accept blobDataProof directly -) *daBatchV2 { +// newDABatchV2WithProof is a constructor for daBatchV2 that allows directly passing blobDataProof. +func newDABatchV2WithProof(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, + dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, + blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, blobDataProof [2]common.Hash) *daBatchV2 { return &daBatchV2{ daBatchV0: daBatchV0{ version: version, diff --git a/encoding/codecv4.go b/encoding/codecv4.go index b0933f4..86a3fa2 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -106,7 +106,7 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV4) } - b := NewDABatchV2WithProof( + b := newDABatchV2WithProof( data[0], // Version binary.BigEndian.Uint64(data[1:9]), // BatchIndex binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped From db773174854e27df71f4454c936ad40b34e4e6a8 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 15 Oct 2024 13:06:09 +0800 Subject: [PATCH 076/126] tweaks --- encoding/codecv0.go | 42 +++++++++---------- encoding/codecv0_types.go | 8 ++-- encoding/codecv1.go | 48 ++++++++++++++++------ encoding/codecv1_types.go | 9 +---- encoding/codecv2.go | 12 +++--- encoding/codecv3.go | 8 ++-- encoding/codecv3_types.go | 2 +- encoding/codecv4.go | 8 ++-- encoding/da.go | 85 +++++++++++---------------------------- encoding/da_test.go | 8 ++-- 10 files changed, 105 insertions(+), 125 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 6919b9f..2c54314 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -102,14 +102,14 @@ func (d *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, e } numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) + if len(chunk) < 1+numBlocks*blockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*blockContextByteSize) } blocks := make([]DABlock, numBlocks) for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + BlockContextByteSize + startIdx := 1 + i*blockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + blockContextByteSize blocks[i] = &daBlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { @@ -118,27 +118,27 @@ func (d *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, e } var transactions []types.Transactions - currentIndex := 1 + numBlocks*BlockContextByteSize + currentIndex := 1 + numBlocks*blockContextByteSize for _, block := range blocks { var blockTransactions types.Transactions // ignore L1 msg transactions from the block, consider only L2 transactions txNum := int(block.NumTransactions() - block.NumL1Messages()) for i := 0; i < txNum; i++ { - if len(chunk) < currentIndex+TxLenByteSize { - return nil, fmt.Errorf("chunk size doesn't match, next tx size is less then 4, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+TxLenByteSize, i) + if len(chunk) < currentIndex+txLenByteSize { + return nil, fmt.Errorf("chunk size doesn't match, next tx size is less then 4, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+txLenByteSize, i) } - txLen := int(binary.BigEndian.Uint32(chunk[currentIndex : currentIndex+TxLenByteSize])) - if len(chunk) < currentIndex+TxLenByteSize+txLen { - return nil, fmt.Errorf("chunk size doesn't match with next tx length, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+TxLenByteSize+txLen, i) + txLen := int(binary.BigEndian.Uint32(chunk[currentIndex : currentIndex+txLenByteSize])) + if len(chunk) < currentIndex+txLenByteSize+txLen { + return nil, fmt.Errorf("chunk size doesn't match with next tx length, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+txLenByteSize+txLen, i) } - txData := chunk[currentIndex+TxLenByteSize : currentIndex+TxLenByteSize+txLen] + txData := chunk[currentIndex+txLenByteSize : currentIndex+txLenByteSize+txLen] tx := &types.Transaction{} err := tx.UnmarshalBinary(txData) if err != nil { return nil, fmt.Errorf("failed to unmarshal tx, pos of tx in chunk bytes: %d. tx num without l1 msgs: %d, err: %w", currentIndex, i, err) } blockTransactions = append(blockTransactions, tx) - currentIndex += TxLenByteSize + txLen + currentIndex += txLenByteSize + txLen } transactions = append(transactions, blockTransactions) } @@ -244,7 +244,7 @@ func (d *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) } size += txPayloadLength } - size += BlockContextByteSize + size += blockContextByteSize return size, nil } @@ -262,12 +262,12 @@ func (d *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { if err != nil { return 0, err } - total += CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero - total += CalldataNonZeroByteGas * 4 // 4 bytes payload length + total += calldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero + total += calldataNonZeroByteGas * 4 // 4 bytes payload length total += getKeccak256Gas(txPayloadLength) // l2 tx hash } - total += CalldataNonZeroByteGas * BlockContextByteSize + total += calldataNonZeroByteGas * blockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -313,8 +313,8 @@ func (d *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { numBlocks := uint64(len(c.Blocks)) totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * BlockContextByteSize // numBlocks of BlockContext in chunk + totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += calldataNonZeroByteGas * numBlocks * blockContextByteSize // numBlocks of BlockContext in chunk totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil @@ -329,7 +329,7 @@ func (d *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch totalL1CommitGas += 20000 // 1 time sstore totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += calldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue @@ -337,7 +337,7 @@ func (d *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += calldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) @@ -354,7 +354,7 @@ func (d *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += calldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index e80c5d6..36c4e51 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -38,7 +38,7 @@ func newDABlockV0(number uint64, timestamp uint64, baseFee *big.Int, gasLimit ui // Encode serializes the DABlock into a slice of bytes. func (b *daBlockV0) Encode() []byte { - bytes := make([]byte, BlockContextByteSize) + bytes := make([]byte, blockContextByteSize) binary.BigEndian.PutUint64(bytes[0:], b.number) binary.BigEndian.PutUint64(bytes[8:], b.timestamp) if b.baseFee != nil { @@ -52,8 +52,8 @@ func (b *daBlockV0) Encode() []byte { // Decode populates the fields of a DABlock from a byte slice. func (b *daBlockV0) Decode(bytes []byte) error { - if len(bytes) != BlockContextByteSize { - return errors.New("block encoding is not BlockContextByteSize bytes long") + if len(bytes) != blockContextByteSize { + return errors.New("block encoding is not blockContextByteSize bytes long") } b.number = binary.BigEndian.Uint64(bytes[0:8]) @@ -142,7 +142,7 @@ func (c *daChunkV0) Encode() ([]byte, error) { } var txLen [4]byte - rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) + rlpTxData, err := convertTxDataToRLPEncoding(txData, false /* no mock */) if err != nil { return nil, err } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 2236c51..490fe92 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -67,14 +67,14 @@ func (d *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) } numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) + if len(chunk) < 1+numBlocks*blockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*blockContextByteSize) } blocks := make([]DABlock, numBlocks) for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + BlockContextByteSize + startIdx := 1 + i*blockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + blockContextByteSize blocks[i] = &daBlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { @@ -94,8 +94,8 @@ func (d *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks func (d *DACodecV1) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { - batchBytes := BytesFromBlobCanonical(blob) - return DecodeTxsFromBytes(batchBytes[:], chunks, int(d.MaxNumChunksPerBatch())) + batchBytes := bytesFromBlobCanonical(blob) + return decodeTxsFromBytes(batchBytes[:], chunks, int(d.MaxNumChunksPerBatch())) } // NewDABatch creates a DABatch from the provided Batch. @@ -173,7 +173,7 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i } // encode L2 txs into blob payload - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := convertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, err } @@ -204,7 +204,7 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i copy(challengePreimage[0:], hash[:]) // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := makeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, err } @@ -221,7 +221,7 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), blsModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -267,7 +267,7 @@ func (d *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { continue } - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, false /* no mock */) + rlpTxData, err := convertTxDataToRLPEncoding(tx, false /* no mock */) if err != nil { return 0, err } @@ -288,7 +288,7 @@ func (d *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { } } - total += CalldataNonZeroByteGas * BlockContextByteSize + total += calldataNonZeroByteGas * blockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -306,14 +306,36 @@ func (d *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { return total, nil } +// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. +func (d *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + var totalTxNum uint64 + var totalL1CommitGas uint64 + for _, block := range c.Blocks { + totalTxNum += uint64(len(block.Transactions)) + blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) + if err != nil { + return 0, err + } + totalL1CommitGas += blockL1CommitGas + } + + numBlocks := uint64(len(c.Blocks)) + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += calldataNonZeroByteGas * numBlocks * blockContextByteSize // numBlocks of BlockContext in chunk + + totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash + return totalL1CommitGas, nil +} + // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. func (d *DACodecV1) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { - return BlockContextByteSize, nil + return blockContextByteSize, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. func (d *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return uint64(BlockContextByteSize * len(c.Blocks)), nil + return uint64(blockContextByteSize * len(c.Blocks)), nil } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go index 39347a3..b696380 100644 --- a/encoding/codecv1_types.go +++ b/encoding/codecv1_types.go @@ -146,7 +146,7 @@ func (b *daBatchV1) BlobDataProof() ([]byte, error) { return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) } - return BlobDataProofFromValues(*b.z, y, commitment, proof), nil + return blobDataProofFromValues(*b.z, y, commitment, proof), nil } // Blob returns the blob of the batch. @@ -154,11 +154,6 @@ func (b *daBatchV1) Blob() *kzg4844.Blob { return b.blob } -// BlobVersionedHashes returns the blob versioned hashes of the batch. -func (b *daBatchV1) BlobVersionedHashes() []common.Hash { - return []common.Hash{b.blobVersionedHash} -} - // BlobBytes returns the blob bytes of the batch. func (b *daBatchV1) BlobBytes() []byte { return nil @@ -183,7 +178,7 @@ func (b *daBatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) } - return BlobDataProofFromValues(*b.z, y, commitment, proof), nil + return blobDataProofFromValues(*b.z, y, commitment, proof), nil } // Version returns the version of the DABatch. diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 239a4f8..79ffb6f 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -36,14 +36,14 @@ func (d *DACodecV2) MaxNumChunksPerBatch() uint64 { // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks func (d *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { - compressedBytes := BytesFromBlobCanonical(blob) + compressedBytes := bytesFromBlobCanonical(blob) magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - batchBytes, err := DecompressScrollBlobToBatch(append(magics, compressedBytes[:]...)) + batchBytes, err := decompressScrollBlobToBatch(append(magics, compressedBytes[:]...)) if err != nil { return err } - return DecodeTxsFromBytes(batchBytes, chunks, int(d.MaxNumChunksPerBatch())) + return decodeTxsFromBytes(batchBytes, chunks, int(d.MaxNumChunksPerBatch())) } // NewDABatch creates a DABatch from the provided Batch. @@ -121,7 +121,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i } // encode L2 txs into blob payload - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := convertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -172,7 +172,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i } // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := makeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -189,7 +189,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), blsModulus) pointBytes := pointBigInt.Bytes() // the challenge point z diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 2a936dc..04ceb3e 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -118,7 +118,7 @@ func (d *DACodecV3) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (ui numBlocks := uint64(len(c.Blocks)) totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas, nil @@ -143,7 +143,7 @@ func (d *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch totalL1CommitGas += 20000 // 1 time sstore totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += calldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue @@ -151,7 +151,7 @@ func (d *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += calldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) @@ -168,7 +168,7 @@ func (d *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += calldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) diff --git a/encoding/codecv3_types.go b/encoding/codecv3_types.go index 4b794aa..5b4d755 100644 --- a/encoding/codecv3_types.go +++ b/encoding/codecv3_types.go @@ -144,7 +144,7 @@ func (b *daBatchV2) BlobDataProofForPointEvaluation() ([]byte, error) { return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) } - return BlobDataProofFromValues(*b.z, y, commitment, proof), nil + return blobDataProofFromValues(*b.z, y, commitment, proof), nil } // Blob returns the blob of the batch. diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 86a3fa2..134e5fd 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -23,18 +23,18 @@ func (d *DACodecV4) Version() CodecVersion { // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks func (d *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { - rawBytes := BytesFromBlobCanonical(blob) + rawBytes := bytesFromBlobCanonical(blob) // if first byte is 1 - data compressed, 0 - not compressed if rawBytes[0] == 0x1 { magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - batchBytes, err := DecompressScrollBlobToBatch(append(magics, rawBytes[1:]...)) + batchBytes, err := decompressScrollBlobToBatch(append(magics, rawBytes[1:]...)) if err != nil { return err } - return DecodeTxsFromBytes(batchBytes, chunks, int(d.MaxNumChunksPerBatch())) + return decodeTxsFromBytes(batchBytes, chunks, int(d.MaxNumChunksPerBatch())) } else { - return DecodeTxsFromBytes(rawBytes[1:], chunks, int(d.MaxNumChunksPerBatch())) + return decodeTxsFromBytes(rawBytes[1:], chunks, int(d.MaxNumChunksPerBatch())) } } diff --git a/encoding/da.go b/encoding/da.go index eb709f1..1bbd28d 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -15,17 +15,17 @@ import ( "github.com/scroll-tech/go-ethereum/params" ) -// BLSModulus is the BLS modulus defined in EIP-4844. -var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) +// blsModulus is the BLS modulus defined in EIP-4844. +var blsModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) -// CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. -const CalldataNonZeroByteGas = 16 +// calldataNonZeroByteGas is the gas consumption per non zero byte in calldata. +const calldataNonZeroByteGas = 16 -// BlockContextByteSize is the size of the block context in bytes. -const BlockContextByteSize = 60 +// blockContextByteSize is the size of the block context in bytes. +const blockContextByteSize = 60 -// TxLenByteSize is the size of the transaction length in bytes. -const TxLenByteSize = 4 +// txLenByteSize is the size of the transaction length in bytes. +const txLenByteSize = 4 // Block represents an L2 block. type Block struct { @@ -89,8 +89,8 @@ func (c *Chunk) NumL1Messages(totalL1MessagePoppedBefore uint64) uint64 { return numL1Messages } -// ConvertTxDataToRLPEncoding transforms []*TransactionData into []*types.Transaction. -func ConvertTxDataToRLPEncoding(txData *types.TransactionData, useMockTxData bool) ([]byte, error) { +// convertTxDataToRLPEncoding transforms []*TransactionData into []*types.Transaction. +func convertTxDataToRLPEncoding(txData *types.TransactionData, useMockTxData bool) ([]byte, error) { data, err := hexutil.Decode(txData.Data) if err != nil { return nil, fmt.Errorf("failed to decode txData.Data: data=%v, err=%w", txData.Data, err) @@ -206,8 +206,8 @@ func (c *Chunk) NumL2Transactions() uint64 { return totalTxNum } -// L2GasUsed calculates the total gas of L2 transactions in a Chunk. -func (c *Chunk) L2GasUsed() uint64 { +// TotalGasUsed calculates the total gas of transactions in a Chunk. +func (c *Chunk) TotalGasUsed() uint64 { var totalGasUsed uint64 for _, block := range c.Blocks { totalGasUsed += block.Header.GasUsed @@ -235,43 +235,6 @@ func (b *Batch) WithdrawRoot() common.Hash { return b.Chunks[len(b.Chunks)-1].Blocks[lastChunkBlockNum-1].WithdrawRoot } -// TxsToTxsData converts transactions to a TransactionData array. -func TxsToTxsData(txs types.Transactions) []*types.TransactionData { - txsData := make([]*types.TransactionData, len(txs)) - for i, tx := range txs { - v, r, s := tx.RawSignatureValues() - - nonce := tx.Nonce() - - // We need QueueIndex in `NewBatchHeader`. However, `TransactionData` - // does not have this field. Since `L1MessageTx` do not have a nonce, - // we reuse this field for storing the queue index. - if msg := tx.AsL1MessageTx(); msg != nil { - nonce = msg.QueueIndex - } - - txsData[i] = &types.TransactionData{ - Type: tx.Type(), - TxHash: tx.Hash().String(), - Nonce: nonce, - ChainId: (*hexutil.Big)(tx.ChainId()), - Gas: tx.Gas(), - GasPrice: (*hexutil.Big)(tx.GasPrice()), - GasTipCap: (*hexutil.Big)(tx.GasTipCap()), - GasFeeCap: (*hexutil.Big)(tx.GasFeeCap()), - To: tx.To(), - Value: (*hexutil.Big)(tx.Value()), - Data: hexutil.Encode(tx.Data()), - IsCreate: tx.To() == nil, - AccessList: tx.AccessList(), - V: (*hexutil.Big)(v), - R: (*hexutil.Big)(r), - S: (*hexutil.Big)(s), - } - } - return txsData -} - // Fast testing if the compressed data is compatible with our circuit // (require specified frame header and each block is compressed) func CheckCompressedDataCompatibility(data []byte) error { @@ -322,8 +285,8 @@ func CheckCompressedDataCompatibility(data []byte) error { return nil } -// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. -func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { +// makeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. +func makeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { // blob contains 131072 bytes but we can only utilize 31/32 of these if len(blobBytes) > 126976 { return nil, fmt.Errorf("oversized batch payload, blob bytes length: %v, max length: %v", len(blobBytes), 126976) @@ -347,8 +310,8 @@ func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { return &blob, nil } -// BytesFromBlobCanonical converts the canonical blob representation into the raw blob data -func BytesFromBlobCanonical(blob *kzg4844.Blob) [126976]byte { +// bytesFromBlobCanonical converts the canonical blob representation into the raw blob data +func bytesFromBlobCanonical(blob *kzg4844.Blob) [126976]byte { var blobBytes [126976]byte for from := 0; from < len(blob); from += 32 { copy(blobBytes[from/32*31:], blob[from+1:from+32]) @@ -356,8 +319,8 @@ func BytesFromBlobCanonical(blob *kzg4844.Blob) [126976]byte { return blobBytes } -// DecompressScrollBlobToBatch decompresses the given blob bytes into scroll batch bytes -func DecompressScrollBlobToBatch(compressedBytes []byte) ([]byte, error) { +// decompressScrollBlobToBatch decompresses the given blob bytes into scroll batch bytes +func decompressScrollBlobToBatch(compressedBytes []byte) ([]byte, error) { // decompress data in stream and in batches of bytes, because we don't know actual length of compressed data var res []byte readBatchSize := 131072 @@ -420,7 +383,7 @@ func constructBatchPayloadInBlob(chunks []*Chunk, codec Codec) ([]byte, error) { } // encode L2 txs into batch payload - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, false /* no mock */) + rlpTxData, err := convertTxDataToRLPEncoding(tx, false /* no mock */) if err != nil { return nil, err } @@ -450,20 +413,20 @@ func getMemoryExpansionCost(memoryByteSize uint64) uint64 { // getTxPayloadLength calculates the length of the transaction payload. func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { - rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) + rlpTxData, err := convertTxDataToRLPEncoding(txData, false /* no mock */) if err != nil { return 0, err } return uint64(len(rlpTxData)), nil } -// BlobDataProofFromValues creates the blob data proof from the given values. +// blobDataProofFromValues creates the blob data proof from the given values. // Memory layout of ``_blobDataProof``: // | z | y | kzg_commitment | kzg_proof | // |---------|---------|----------------|-----------| // | bytes32 | bytes32 | bytes48 | bytes48 | -func BlobDataProofFromValues(z kzg4844.Point, y kzg4844.Claim, commitment kzg4844.Commitment, proof kzg4844.Proof) []byte { +func blobDataProofFromValues(z kzg4844.Point, y kzg4844.Claim, commitment kzg4844.Commitment, proof kzg4844.Proof) []byte { result := make([]byte, 32+32+48+48) copy(result[0:32], z[:]) @@ -529,8 +492,8 @@ func getNextTx(bytes []byte, index int) (*types.Transaction, int, error) { return tx, nextIndex, nil } -// DecodeTxsFromBytes decodes txs from blob bytes and writes to chunks -func DecodeTxsFromBytes(blobBytes []byte, chunks []*DAChunkRawTx, maxNumChunks int) error { +// decodeTxsFromBytes decodes txs from blob bytes and writes to chunks +func decodeTxsFromBytes(blobBytes []byte, chunks []*DAChunkRawTx, maxNumChunks int) error { numChunks := int(binary.BigEndian.Uint16(blobBytes[0:2])) if numChunks != len(chunks) { return fmt.Errorf("blob chunk number is not same as calldata, blob num chunks: %d, calldata num chunks: %d", numChunks, len(chunks)) diff --git a/encoding/da_test.go b/encoding/da_test.go index 0481597..2953e71 100644 --- a/encoding/da_test.go +++ b/encoding/da_test.go @@ -55,7 +55,7 @@ func TestUtilFunctions(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(11), crc1Max) assert.Equal(t, uint64(3), chunk1.NumTransactions()) - assert.Equal(t, uint64(1194994), chunk1.L2GasUsed()) + assert.Equal(t, uint64(1194994), chunk1.TotalGasUsed()) assert.Equal(t, uint64(42), chunk2.NumL1Messages(0)) assert.Equal(t, uint64(1), chunk2.NumL2Transactions()) @@ -63,7 +63,7 @@ func TestUtilFunctions(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(0), crc2Max) assert.Equal(t, uint64(7), chunk2.NumTransactions()) - assert.Equal(t, uint64(144000), chunk2.L2GasUsed()) + assert.Equal(t, uint64(144000), chunk2.TotalGasUsed()) assert.Equal(t, uint64(257), chunk3.NumL1Messages(0)) assert.Equal(t, uint64(0), chunk3.NumL2Transactions()) @@ -73,7 +73,7 @@ func TestUtilFunctions(t *testing.T) { assert.EqualError(t, err, "block (17, 0x003fee335455c0c293dda17ea9365fe0caa94071ed7216baf61f7aeb808e8a28) has nil RowConsumption") assert.Equal(t, uint64(0), crc3Max) assert.Equal(t, uint64(5), chunk3.NumTransactions()) - assert.Equal(t, uint64(240000), chunk3.L2GasUsed()) + assert.Equal(t, uint64(240000), chunk3.TotalGasUsed()) // Test Batch methods assert.Equal(t, block6.Header.Root, batch.StateRoot()) @@ -96,7 +96,7 @@ func TestConvertTxDataToRLPEncoding(t *testing.T) { continue } - rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) + rlpTxData, err := convertTxDataToRLPEncoding(txData, false /* no mock */) assert.NoError(t, err) var tx types.Transaction err = tx.UnmarshalBinary(rlpTxData) From 05e42d9e74520ed9cca61aeb0b51fe1d76e5a899 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 15 Oct 2024 13:23:04 +0800 Subject: [PATCH 077/126] add back TxsToTxsData --- encoding/da.go | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/encoding/da.go b/encoding/da.go index 1bbd28d..b12d8fd 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -235,6 +235,43 @@ func (b *Batch) WithdrawRoot() common.Hash { return b.Chunks[len(b.Chunks)-1].Blocks[lastChunkBlockNum-1].WithdrawRoot } +// TxsToTxsData converts transactions to a TransactionData array. +func TxsToTxsData(txs types.Transactions) []*types.TransactionData { + txsData := make([]*types.TransactionData, len(txs)) + for i, tx := range txs { + v, r, s := tx.RawSignatureValues() + + nonce := tx.Nonce() + + // We need QueueIndex in `NewBatchHeader`. However, `TransactionData` + // does not have this field. Since `L1MessageTx` do not have a nonce, + // we reuse this field for storing the queue index. + if msg := tx.AsL1MessageTx(); msg != nil { + nonce = msg.QueueIndex + } + + txsData[i] = &types.TransactionData{ + Type: tx.Type(), + TxHash: tx.Hash().String(), + Nonce: nonce, + ChainId: (*hexutil.Big)(tx.ChainId()), + Gas: tx.Gas(), + GasPrice: (*hexutil.Big)(tx.GasPrice()), + GasTipCap: (*hexutil.Big)(tx.GasTipCap()), + GasFeeCap: (*hexutil.Big)(tx.GasFeeCap()), + To: tx.To(), + Value: (*hexutil.Big)(tx.Value()), + Data: hexutil.Encode(tx.Data()), + IsCreate: tx.To() == nil, + AccessList: tx.AccessList(), + V: (*hexutil.Big)(v), + R: (*hexutil.Big)(r), + S: (*hexutil.Big)(s), + } + } + return txsData +} + // Fast testing if the compressed data is compatible with our circuit // (require specified frame header and each block is compressed) func CheckCompressedDataCompatibility(data []byte) error { From 35a7d9b201964afeba8d965ecd6d918991fd1802 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 15 Oct 2024 13:35:10 +0800 Subject: [PATCH 078/126] fix --- encoding/codecv1.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 490fe92..b704710 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -320,9 +320,8 @@ func (d *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += calldataNonZeroByteGas * numBlocks * blockContextByteSize // numBlocks of BlockContext in chunk + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil From 1ab5de1b8fbcb219c36970d956ad4fa0bdc5150b Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 15 Oct 2024 13:35:56 +0800 Subject: [PATCH 079/126] fix --- encoding/codecv0.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 2c54314..ea311e4 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -312,9 +312,8 @@ func (d *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += calldataNonZeroByteGas * numBlocks * blockContextByteSize // numBlocks of BlockContext in chunk + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil From ca48609875267495b99436dbe7939426d83c4ba1 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 15 Oct 2024 13:50:10 +0800 Subject: [PATCH 080/126] fix --- encoding/codecv1.go | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index b704710..e3aa294 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -11,6 +11,7 @@ import ( "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/rollup/types/encoding" ) type DACodecV1 struct { @@ -307,11 +308,11 @@ func (d *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (d *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - var totalTxNum uint64 +func (d *DACodecV1) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { + var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { - totalTxNum += uint64(len(block.Transactions)) + totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) if err != nil { return 0, err @@ -323,7 +324,7 @@ func (d *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash + totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas, nil } From 7a81bc24fcbf30f4f92a3dc496849e0e04eafbc5 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 15 Oct 2024 14:01:54 +0800 Subject: [PATCH 081/126] fix --- encoding/codecv1.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index e3aa294..dd03b1e 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -11,7 +11,6 @@ import ( "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/scroll-tech/go-ethereum/rollup/types/encoding" ) type DACodecV1 struct { @@ -308,7 +307,7 @@ func (d *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (d *DACodecV1) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { +func (d *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { From ca3a6d46dd6055760ccdb19d5d9702538cb0f2f7 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 15 Oct 2024 14:14:06 +0800 Subject: [PATCH 082/126] fix --- encoding/codecv1.go | 47 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index dd03b1e..198923c 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -327,6 +327,53 @@ func (d *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { return totalL1CommitGas, nil } +// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. +func (d *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { + var totalL1CommitGas uint64 + + // Add extra gas costs + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += calldataNonZeroByteGas // version in calldata + + // adjusting gas: + // add 1 time cold sload (2100 gas) for L1MessageQueue + // add 1 time cold address access (2600 gas) for L1MessageQueue + // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) + totalL1CommitGas += (2100 + 2600 - 100 - 100) + totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += calldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + + // adjust batch data hash gas cost + totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) + + totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore + + for _, chunk := range b.Chunks { + chunkL1CommitGas, err := d.EstimateChunkL1CommitGas(chunk) + if err != nil { + return 0, err + } + totalL1CommitGas += chunkL1CommitGas + + totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) + totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk + + totalL1CommitGas += calldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) + } + + return totalL1CommitGas, nil +} + // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. func (d *DACodecV1) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { return blockContextByteSize, nil From e36ee98736a0a580a88cfe9d35ab3d9a30c249fa Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 15 Oct 2024 14:37:57 +0800 Subject: [PATCH 083/126] fix --- encoding/codecv1.go | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 198923c..35a1617 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -384,6 +384,19 @@ func (d *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) return uint64(blockContextByteSize * len(c.Blocks)), nil } +// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. +func (d *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { + var totalL1CommitCalldataSize uint64 + for _, chunk := range b.Chunks { + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + } + return totalL1CommitCalldataSize, nil +} + // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (d *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { metadataSize := 2 + 4*d.MaxNumChunksPerBatch() From 931bb50bb1f44121a47e679a51bef75aafa4a47b Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 15 Oct 2024 16:21:33 +0800 Subject: [PATCH 084/126] tweak --- encoding/bitmap.go | 4 ++-- encoding/codecv0.go | 2 +- encoding/codecv1.go | 2 +- encoding/codecv2.go | 2 +- encoding/codecv3.go | 2 +- encoding/codecv4.go | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/encoding/bitmap.go b/encoding/bitmap.go index 5631983..fedec12 100644 --- a/encoding/bitmap.go +++ b/encoding/bitmap.go @@ -7,8 +7,8 @@ import ( "github.com/scroll-tech/go-ethereum/core/types" ) -// ConstructSkippedBitmap constructs skipped L1 message bitmap of the batch. -func ConstructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePoppedBefore uint64) ([]byte, uint64, error) { +// constructSkippedBitmap constructs skipped L1 message bitmap of the batch. +func constructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePoppedBefore uint64) ([]byte, uint64, error) { // skipped L1 message bitmap, an array of 256-bit bitmaps var skippedBitmap []*big.Int diff --git a/encoding/codecv0.go b/encoding/codecv0.go index ea311e4..c058a81 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -189,7 +189,7 @@ func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { dataHash := crypto.Keccak256Hash(dataBytes) // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 35a1617..ccfb81e 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -116,7 +116,7 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 79ffb6f..f5821ca 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -64,7 +64,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 04ceb3e..acb3e34 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -40,7 +40,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 134e5fd..312df3c 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -60,7 +60,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } From 5c3c05577aba31501e6146f4633144005951c34d Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 00:04:17 +0800 Subject: [PATCH 085/126] add back test block encode and test chunk encode unit tests --- encoding/codecv0_test.go | 122 ++++++++++++++++++++++++++++++++ encoding/codecv1_test.go | 145 +++++++++++++++++++++++++++++++++++++++ encoding/codecv2_test.go | 145 +++++++++++++++++++++++++++++++++++++++ encoding/codecv3_test.go | 144 ++++++++++++++++++++++++++++++++++++++ encoding/codecv4_test.go | 145 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 701 insertions(+) create mode 100644 encoding/codecv0_test.go create mode 100644 encoding/codecv1_test.go create mode 100644 encoding/codecv2_test.go create mode 100644 encoding/codecv3_test.go create mode 100644 encoding/codecv4_test.go diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go new file mode 100644 index 0000000..603cc3a --- /dev/null +++ b/encoding/codecv0_test.go @@ -0,0 +1,122 @@ +package encoding + +import ( + "encoding/hex" + "testing" + + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/stretchr/testify/assert" +) + +func TestCodecV0BlockEncode(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + assert.NoError(t, err) + + block := &daBlockV0{} + encoded := hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + daBlock, err := codecv0.NewDABlock(block2, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + daBlock, err = codecv0.NewDABlock(block3, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + daBlock, err = codecv0.NewDABlock(block4, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + daBlock, err = codecv0.NewDABlock(block5, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + daBlock, err = codecv0.NewDABlock(block6, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + daBlock, err = codecv0.NewDABlock(block7, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} + +func TestCodecV0ChunkEncode(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + assert.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + daChunkV0 := &daChunkV0{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + + encodedBytes, err := daChunkV0.Encode() + assert.NoError(t, err) + encoded := hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e81840002000000000073f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8b00000073f87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e5000100000000163102f9162d82cf5502843b9b0a17843b9b0a17831197e28080b915d260806040523480156200001157600080fd5b50604051620014b2380380620014b2833981810160405260a08110156200003757600080fd5b815160208301516040808501805191519395929483019291846401000000008211156200006357600080fd5b9083019060208201858111156200007957600080fd5b82516401000000008111828201881017156200009457600080fd5b82525081516020918201929091019080838360005b83811015620000c3578181015183820152602001620000a9565b50505050905090810190601f168015620000f15780820380516001836020036101000a031916815260200191505b50604052602001805160405193929190846401000000008211156200011557600080fd5b9083019060208201858111156200012b57600080fd5b82516401000000008111828201881017156200014657600080fd5b82525081516020918201929091019080838360005b83811015620001755781810151838201526020016200015b565b50505050905090810190601f168015620001a35780820380516001836020036101000a031916815260200191505b5060405260209081015185519093508592508491620001c8916003918501906200026b565b508051620001de9060049060208401906200026b565b50506005805461ff001960ff1990911660121716905550600680546001600160a01b038088166001600160a01b0319928316179092556007805492871692909116919091179055620002308162000255565b50506005805462010000600160b01b0319163362010000021790555062000307915050565b6005805460ff191660ff92909216919091179055565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f10620002ae57805160ff1916838001178555620002de565b82800160010185558215620002de579182015b82811115620002de578251825591602001919060010190620002c1565b50620002ec929150620002f0565b5090565b5b80821115620002ec5760008155600101620002f1565b61119b80620003176000396000f3fe608060405234801561001057600080fd5b506004361061010b5760003560e01c80635c975abb116100a257806395d89b411161007157806395d89b41146103015780639dc29fac14610309578063a457c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d5761010b565b80635c975abb1461029d57806370a08231146102a55780638456cb59146102cb5780638e50817a146102d35761010b565b8063313ce567116100de578063313ce5671461021d578063395093511461023b5780633f4ba83a1461026757806340c10f19146102715761010b565b806306fdde0314610110578063095ea7b31461018d57806318160ddd146101cd57806323b872dd146101e7575b600080fd5b6101186103bb565b6040805160208082528351818301528351919283929083019185019080838360005b8381101561015257818101518382015260200161013a565b50505050905090810190601f16801561017f5780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b6101b9600480360360408110156101a357600080fd5b506001600160a01b038135169060200135610451565b604080519115158252519081900360200190f35b6101d561046e565b60408051918252519081900360200190f35b6101b9600480360360608110156101fd57600080fd5b506001600160a01b03813581169160208101359091169060400135610474565b6102256104fb565b6040805160ff9092168252519081900360200190f35b6101b96004803603604081101561025157600080fd5b506001600160a01b038135169060200135610504565b61026f610552565b005b61026f6004803603604081101561028757600080fd5b506001600160a01b0381351690602001356105a9565b6101b9610654565b6101d5600480360360208110156102bb57600080fd5b50356001600160a01b0316610662565b61026f61067d565b61026f600480360360408110156102e957600080fd5b506001600160a01b03813581169160200135166106d2565b610118610757565b61026f6004803603604081101561031f57600080fd5b506001600160a01b0381351690602001356107b8565b6101b96004803603604081101561034b57600080fd5b506001600160a01b03813516906020013561085f565b6101b96004803603604081101561037757600080fd5b506001600160a01b0381351690602001356108c7565b6101d5600480360360408110156103a357600080fd5b506001600160a01b03813581169160200135166108db565b60038054604080516020601f60026000196101006001881615020190951694909404938401819004810282018101909252828152606093909290918301828280156104475780601f1061041c57610100808354040283529160200191610447565b820191906000526020600020905b81548152906001019060200180831161042a57829003601f168201915b5050505050905090565b600061046561045e610906565b848461090a565b50600192915050565b60025490565b60006104818484846109f6565b6104f18461048d610906565b6104ec85604051806060016040528060288152602001611085602891396001600160a01b038a166000908152600160205260408120906104cb610906565b6001600160a01b031681526020810191909152604001600020549190610b51565b61090a565b5060019392505050565b60055460ff1690565b6000610465610511610906565b846104ec8560016000610522610906565b6001600160a01b03908116825260208083019390935260409182016000908120918c168152925290205490610be8565b6007546001600160a01b0316331461059f576040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a7610c49565b565b600554610100900460ff16156105f9576040805162461bcd60e51b815260206004820152601060248201526f14185d5cd8589b194e881c185d5cd95960821b604482015290519081900360640190fd5b6006546001600160a01b03163314610646576040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b604482015290519081900360640190fd5b6106508282610ced565b5050565b600554610100900460ff1690565b6001600160a01b031660009081526020819052604090205490565b6007546001600160a01b031633146106ca576040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a7610ddd565b6005546201000090046001600160a01b03163314610726576040805162461bcd60e51b815260206004820152600c60248201526b6f6e6c7920466163746f727960a01b604482015290519081900360640190fd5b600780546001600160a01b039283166001600160a01b03199182161790915560068054939092169216919091179055565b60048054604080516020601f60026000196101006001881615020190951694909404938401819004810282018101909252828152606093909290918301828280156104475780601f1061041c57610100808354040283529160200191610447565b600554610100900460ff1615610808576040805162461bcd60e51b815260206004820152601060248201526f14185d5cd8589b194e881c185d5cd95960821b604482015290519081900360640190fd5b6006546001600160a01b03163314610855576040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b604482015290519081900360640190fd5b6106508282610e65565b600061046561086c610906565b846104ec856040518060600160405280602581526020016111176025913960016000610896610906565b6001600160a01b03908116825260208083019390935260409182016000908120918d16815292529020549190610b51565b60006104656108d4610906565b84846109f6565b6001600160a01b03918216600090815260016020908152604080832093909416825291909152205490565b3390565b6001600160a01b03831661094f5760405162461bcd60e51b81526004018080602001828103825260248152602001806110f36024913960400191505060405180910390fd5b6001600160a01b0382166109945760405162461bcd60e51b815260040180806020018281038252602281526020018061103d6022913960400191505060405180910390fd5b6001600160a01b03808416600081815260016020908152604080832094871680845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9259281900390910190a3505050565b6001600160a01b038316610a3b5760405162461bcd60e51b81526004018080602001828103825260258152602001806110ce6025913960400191505060405180910390fd5b6001600160a01b038216610a805760405162461bcd60e51b8152600401808060200182810382526023815260200180610ff86023913960400191505060405180910390fd5b610a8b838383610f61565b610ac88160405180606001604052806026815260200161105f602691396001600160a01b0386166000908152602081905260409020549190610b51565b6001600160a01b038085166000908152602081905260408082209390935590841681522054610af79082610be8565b6001600160a01b038084166000818152602081815260409182902094909455805185815290519193928716927fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef92918290030190a3505050565b60008184841115610be05760405162461bcd60e51b81526004018080602001828103825283818151815260200191508051906020019080838360005b83811015610ba5578181015183820152602001610b8d565b50505050905090810190601f168015610bd25780820380516001836020036101000a031916815260200191505b509250505060405180910390fd5b505050900390565b600082820183811015610c42576040805162461bcd60e51b815260206004820152601b60248201527f536166654d6174683a206164646974696f6e206f766572666c6f770000000000604482015290519081900360640190fd5b9392505050565b600554610100900460ff16610c9c576040805162461bcd60e51b815260206004820152601460248201527314185d5cd8589b194e881b9bdd081c185d5cd95960621b604482015290519081900360640190fd5b6005805461ff00191690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0610906565b604080516001600160a01b039092168252519081900360200190a1565b6001600160a01b038216610d48576040805162461bcd60e51b815260206004820152601f60248201527f45524332303a206d696e7420746f20746865207a65726f206164647265737300604482015290519081900360640190fd5b610d5460008383610f61565b600254610d619082610be8565b6002556001600160a01b038216600090815260208190526040902054610d879082610be8565b6001600160a01b0383166000818152602081815260408083209490945583518581529351929391927fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9281900390910190a35050565b600554610100900460ff1615610e2d576040805162461bcd60e51b815260206004820152601060248201526f14185d5cd8589b194e881c185d5cd95960821b604482015290519081900360640190fd5b6005805461ff0019166101001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc544b05a258610cd0610906565b6001600160a01b038216610eaa5760405162461bcd60e51b81526004018080602001828103825260218152602001806110ad6021913960400191505060405180910390fd5b610eb682600083610f61565b610ef38160405180606001604052806022815260200161101b602291396001600160a01b0385166000908152602081905260409020549190610b51565b6001600160a01b038316600090815260208190526040902055600254610f199082610fb5565b6002556040805182815290516000916001600160a01b038516917fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9181900360200190a35050565b610f6c838383610fb0565b610f74610654565b15610fb05760405162461bcd60e51b815260040180806020018281038252602a81526020018061113c602a913960400191505060405180910390fd5b505050565b6000610c4283836040518060400160405280601e81526020017f536166654d6174683a207375627472616374696f6e206f766572666c6f770000815250610b5156fe45524332303a207472616e7366657220746f20746865207a65726f206164647265737345524332303a206275726e20616d6f756e7420657863656564732062616c616e636545524332303a20617070726f766520746f20746865207a65726f206164647265737345524332303a207472616e7366657220616d6f756e7420657863656564732062616c616e636545524332303a207472616e7366657220616d6f756e74206578636565647320616c6c6f77616e636545524332303a206275726e2066726f6d20746865207a65726f206164647265737345524332303a207472616e736665722066726f6d20746865207a65726f206164647265737345524332303a20617070726f76652066726f6d20746865207a65726f206164647265737345524332303a2064656372656173656420616c6c6f77616e63652062656c6f77207a65726f45524332305061757361626c653a20746f6b656e207472616e73666572207768696c6520706175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00330000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b630000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b6300000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000095745544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000045745544800000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda52095d44b8a9af7", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b00000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e1058080808080", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go new file mode 100644 index 0000000..d498868 --- /dev/null +++ b/encoding/codecv1_test.go @@ -0,0 +1,145 @@ +package encoding + +import ( + "encoding/hex" + "testing" + + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/stretchr/testify/assert" +) + +func TestCodecV1BlockEncode(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + daBlockV0 := &daBlockV0{} + encoded := hex.EncodeToString(daBlockV0.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + daBlock, err := codecv1.NewDABlock(block2, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + daBlock, err = codecv1.NewDABlock(block3, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + daBlock, err = codecv1.NewDABlock(block4, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + daBlock, err = codecv1.NewDABlock(block5, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + daBlock, err = codecv1.NewDABlock(block6, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + daBlock, err = codecv1.NewDABlock(block7, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) + + codecV0, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + // sanity check: v0 and v1 block encodings are identical + for _, block := range []*Block{block2, block3, block4, block5, block6, block7} { + blockv0, err := codecV0.NewDABlock(block, 0) + assert.NoError(t, err) + encodedv0 := hex.EncodeToString(blockv0.Encode()) + + blockv1, err := codecv1.NewDABlock(block, 0) + assert.NoError(t, err) + encodedv1 := hex.EncodeToString(blockv1.Encode()) + + assert.Equal(t, encodedv0, encodedv1) + } +} + +func TestCodecV1ChunkEncode(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + daChunkV1 := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + + encodedBytes, err := daChunkV1.Encode() + assert.NoError(t, err) + encoded := hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + // transactions are not part of the encoding + daChunkV1.transactions[0] = append(daChunkV1.transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) + encodedBytes, err = daChunkV1.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go new file mode 100644 index 0000000..e3870e2 --- /dev/null +++ b/encoding/codecv2_test.go @@ -0,0 +1,145 @@ +package encoding + +import ( + "encoding/hex" + "testing" + + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/stretchr/testify/assert" +) + +func TestCodecV2BlockEncode(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + block := &daBlockV0{} + encoded := hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + daBlock, err := codecv2.NewDABlock(block2, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + daBlock, err = codecv2.NewDABlock(block3, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + daBlock, err = codecv2.NewDABlock(block4, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + daBlock, err = codecv2.NewDABlock(block5, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + daBlock, err = codecv2.NewDABlock(block6, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + daBlock, err = codecv2.NewDABlock(block7, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) + + codecV0, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + // sanity check: v0 and v2 block encodings are identical + for _, trace := range []*Block{block2, block3, block4, block5, block6, block7} { + blockv0, err := codecV0.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv0 := hex.EncodeToString(blockv0.Encode()) + + blockv2, err := codecv2.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv2 := hex.EncodeToString(blockv2.Encode()) + + assert.Equal(t, encodedv0, encodedv2) + } +} + +func TestCodecV2ChunkEncode(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + daChunkV1 := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + + encodedBytes, err := daChunkV1.Encode() + assert.NoError(t, err) + encoded := hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + // transactions are not part of the encoding + daChunkV1.transactions[0] = append(daChunkV1.transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) + encodedBytes, err = daChunkV1.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go new file mode 100644 index 0000000..65cd914 --- /dev/null +++ b/encoding/codecv3_test.go @@ -0,0 +1,144 @@ +package encoding + +import ( + "encoding/hex" + "testing" + + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/stretchr/testify/assert" +) + +func TestCodecV3BlockEncode(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + block := &daBlockV0{} + encoded := hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + daBlock, err := codecv3.NewDABlock(block2, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + daBlock, err = codecv3.NewDABlock(block3, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + daBlock, err = codecv3.NewDABlock(block4, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + daBlock, err = codecv3.NewDABlock(block5, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + daBlock, err = codecv3.NewDABlock(block6, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + daBlock, err = codecv3.NewDABlock(block7, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) + + codecV0, err := CodecFromVersion(CodecV0) + assert.NoError(t, err) + + // sanity check: v0 and v3 block encodings are identical + for _, trace := range []*Block{block2, block3, block4, block5, block6, block7} { + blockv0, err := codecV0.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv0 := hex.EncodeToString(blockv0.Encode()) + + blockv3, err := codecv3.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv3 := hex.EncodeToString(blockv3.Encode()) + + assert.Equal(t, encodedv0, encodedv3) + } +} +func TestCodecV3ChunkEncode(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + daChunkV1 := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + + encodedBytes, err := daChunkV1.Encode() + assert.NoError(t, err) + encoded := hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + // transactions are not part of the encoding + daChunkV1.transactions[0] = append(daChunkV1.transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) + encodedBytes, err = daChunkV1.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go new file mode 100644 index 0000000..6256921 --- /dev/null +++ b/encoding/codecv4_test.go @@ -0,0 +1,145 @@ +package encoding + +import ( + "encoding/hex" + "testing" + + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/stretchr/testify/assert" +) + +func TestCodecV4BlockEncode(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + block := &daBlockV0{} + encoded := hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + daBlock, err := codecv4.NewDABlock(block2, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + daBlock, err = codecv4.NewDABlock(block3, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + daBlock, err = codecv4.NewDABlock(block4, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + daBlock, err = codecv4.NewDABlock(block5, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + daBlock, err = codecv4.NewDABlock(block6, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + daBlock, err = codecv4.NewDABlock(block7, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) + + codecV0, err := CodecFromVersion(CodecV0) + assert.NoError(t, err) + + // sanity check: v0 and v4 block encodings are identical + for _, trace := range []*Block{block2, block3, block4, block5, block6, block7} { + blockv0, err := codecV0.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv0 := hex.EncodeToString(blockv0.Encode()) + + blockv4, err := codecv4.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv4 := hex.EncodeToString(blockv4.Encode()) + + assert.Equal(t, encodedv0, encodedv4) + } +} + +func TestCodecV4ChunkEncode(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + daChunkV1 := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + + encodedBytes, err := daChunkV1.Encode() + assert.NoError(t, err) + encoded := hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + // transactions are not part of the encoding + daChunkV1.transactions[0] = append(daChunkV1.transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) + encodedBytes, err = daChunkV1.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} From 3e092dd566d7297bb9ef15ea760e8a0e64f3c3bf Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 11:35:16 +0800 Subject: [PATCH 086/126] replace some constants with meaningful vars --- encoding/codecv0.go | 2 +- encoding/codecv0_types.go | 3 ++- encoding/codecv1.go | 3 ++- encoding/codecv1_types.go | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index c058a81..5a192dc 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -71,7 +71,7 @@ func (d *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of blocks is 0") } - if len(chunk.Blocks) > 255 { + if len(chunk.Blocks) > math.MaxUint8 { return nil, errors.New("number of blocks exceeds 1 byte") } diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index 36c4e51..5cd512b 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -5,6 +5,7 @@ import ( "encoding/hex" "errors" "fmt" + "math" "math/big" "strings" @@ -122,7 +123,7 @@ func (c *daChunkV0) Encode() ([]byte, error) { return nil, errors.New("number of blocks is 0") } - if len(c.blocks) > 255 { + if len(c.blocks) > math.MaxUint8 { return nil, errors.New("number of blocks exceeds 1 byte") } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index ccfb81e..7a55602 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -5,6 +5,7 @@ import ( "encoding/binary" "errors" "fmt" + "math" "math/big" "github.com/scroll-tech/go-ethereum/common" @@ -35,7 +36,7 @@ func (d *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of blocks is 0") } - if len(chunk.Blocks) > 255 { + if len(chunk.Blocks) > math.MaxUint8 { return nil, errors.New("number of blocks exceeds 1 byte") } diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go index b696380..d2860dc 100644 --- a/encoding/codecv1_types.go +++ b/encoding/codecv1_types.go @@ -60,7 +60,7 @@ func (c *daChunkV1) Hash() (common.Hash, error) { if err != nil { return common.Hash{}, err } - if len(hashBytes) != 32 { + if len(hashBytes) != common.HashLength { return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) } dataBytes = append(dataBytes, hashBytes...) From 558d0290d7f85a91a385b28c80344b4b411ef6e0 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 11:38:27 +0800 Subject: [PATCH 087/126] rename daBatchV2 to daBatchV3 --- encoding/codecv3.go | 6 ++--- encoding/codecv3_types.go | 46 +++++++++++++++++++-------------------- encoding/codecv4.go | 4 ++-- 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index acb3e34..3efe523 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -54,7 +54,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - return newDABatchV2( + return newDABatchV3( uint8(CodecV3), // version batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped @@ -81,7 +81,7 @@ func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV3) } - b := newDABatchV2WithProof( + b := newDABatchV3WithProof( data[0], // Version binary.BigEndian.Uint64(data[1:9]), // BatchIndex binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped @@ -182,7 +182,7 @@ func (d *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { return totalL1CommitGas, nil } -// JSONFromBytes converts the bytes to a daBatchV2 and then marshals it to JSON. +// JSONFromBytes converts the bytes to a daBatchV3 and then marshals it to JSON. func (d *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { batch, err := d.NewDABatchFromBytes(data) if err != nil { diff --git a/encoding/codecv3_types.go b/encoding/codecv3_types.go index 5b4d755..5249fdc 100644 --- a/encoding/codecv3_types.go +++ b/encoding/codecv3_types.go @@ -12,8 +12,8 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) -// daBatchV2 contains metadata about a batch of DAChunks. -type daBatchV2 struct { +// daBatchV3 contains metadata about a batch of DAChunks. +type daBatchV3 struct { daBatchV0 blobVersionedHash common.Hash @@ -24,11 +24,11 @@ type daBatchV2 struct { blobBytes []byte } -// newDABatchV2 is a constructor for daBatchV2 that calls blobDataProofForPICircuit internally. -func newDABatchV2(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, +// newDABatchV3 is a constructor for daBatchV3 that calls blobDataProofForPICircuit internally. +func newDABatchV3(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, - z *kzg4844.Point, blobBytes []byte) (*daBatchV2, error) { - daBatch := &daBatchV2{ + z *kzg4844.Point, blobBytes []byte) (*daBatchV3, error) { + daBatch := &daBatchV3{ daBatchV0: daBatchV0{ version: version, batchIndex: batchIndex, @@ -55,11 +55,11 @@ func newDABatchV2(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopp return daBatch, nil } -// newDABatchV2WithProof is a constructor for daBatchV2 that allows directly passing blobDataProof. -func newDABatchV2WithProof(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, +// newDABatchV3WithProof is a constructor for daBatchV3 that allows directly passing blobDataProof. +func newDABatchV3WithProof(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, - blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, blobDataProof [2]common.Hash) *daBatchV2 { - return &daBatchV2{ + blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, blobDataProof [2]common.Hash) *daBatchV3 { + return &daBatchV3{ daBatchV0: daBatchV0{ version: version, batchIndex: batchIndex, @@ -79,7 +79,7 @@ func newDABatchV2WithProof(version uint8, batchIndex, l1MessagePopped, totalL1Me } // Encode serializes the DABatch into bytes. -func (b *daBatchV2) Encode() []byte { +func (b *daBatchV3) Encode() []byte { batchBytes := make([]byte, 193) batchBytes[0] = b.version binary.BigEndian.PutUint64(batchBytes[1:9], b.batchIndex) @@ -95,13 +95,13 @@ func (b *daBatchV2) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *daBatchV2) Hash() common.Hash { +func (b *daBatchV3) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *daBatchV2) blobDataProofForPICircuit() ([2]common.Hash, error) { +func (b *daBatchV3) blobDataProofForPICircuit() ([2]common.Hash, error) { if b.blob == nil { return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") } @@ -126,7 +126,7 @@ func (b *daBatchV2) blobDataProofForPICircuit() ([2]common.Hash, error) { } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *daBatchV2) BlobDataProofForPointEvaluation() ([]byte, error) { +func (b *daBatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") } @@ -148,19 +148,19 @@ func (b *daBatchV2) BlobDataProofForPointEvaluation() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *daBatchV2) Blob() *kzg4844.Blob { +func (b *daBatchV3) Blob() *kzg4844.Blob { return b.blob } // BlobBytes returns the blob bytes of the batch. -func (b *daBatchV2) BlobBytes() []byte { +func (b *daBatchV3) BlobBytes() []byte { return b.blobBytes } -// MarshalJSON implements the custom JSON serialization for daBatchV2. +// MarshalJSON implements the custom JSON serialization for daBatchV3. // This method is designed to provide prover with batch info in snake_case format. -func (b *daBatchV2) MarshalJSON() ([]byte, error) { - type daBatchV2JSON struct { +func (b *daBatchV3) MarshalJSON() ([]byte, error) { + type daBatchV3JSON struct { Version uint8 `json:"version"` BatchIndex uint64 `json:"batch_index"` L1MessagePopped uint64 `json:"l1_message_popped"` @@ -174,7 +174,7 @@ func (b *daBatchV2) MarshalJSON() ([]byte, error) { BlobDataProof [2]string `json:"blob_data_proof"` } - return json.Marshal(&daBatchV2JSON{ + return json.Marshal(&daBatchV3JSON{ Version: b.version, BatchIndex: b.batchIndex, L1MessagePopped: b.l1MessagePopped, @@ -193,16 +193,16 @@ func (b *daBatchV2) MarshalJSON() ([]byte, error) { } // Version returns the version of the DABatch. -func (b *daBatchV2) Version() uint8 { +func (b *daBatchV3) Version() uint8 { return b.version } // SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. -func (b *daBatchV2) SkippedL1MessageBitmap() []byte { +func (b *daBatchV3) SkippedL1MessageBitmap() []byte { return b.skippedL1MessageBitmap } // DataHash returns the data hash of the DABatch. -func (b *daBatchV2) DataHash() common.Hash { +func (b *daBatchV3) DataHash() common.Hash { return b.dataHash } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 312df3c..82fe7c0 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -79,7 +79,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - return newDABatchV2( + return newDABatchV3( uint8(CodecV4), // version batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped @@ -106,7 +106,7 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV4) } - b := newDABatchV2WithProof( + b := newDABatchV3WithProof( data[0], // Version binary.BigEndian.Uint64(data[1:9]), // BatchIndex binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped From 24da9052af49729f3092791ecc702bc64f460ee8 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 12:37:05 +0800 Subject: [PATCH 088/126] add chunk hash unit tests --- encoding/codecv0_test.go | 65 ++++++++++++++++++++++++++++++ encoding/codecv1_test.go | 86 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv2_test.go | 86 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv3_test.go | 86 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv4_test.go | 86 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 409 insertions(+) diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index 603cc3a..0ea6f06 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -120,3 +120,68 @@ func TestCodecV0ChunkEncode(t *testing.T) { encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) } + +func TestCodecV0ChunkHash(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + assert.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + chunk := &daChunkV0{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + hash, err := chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) + + // invalid hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + _, err = chunk.Hash() + assert.Error(t, err) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xde642c68122634b33fa1e6e4243b17be3bfd0dc6f996f204ef6d7522516bd840", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xde29f4371cc396b2e7c536cdc7a7c20ac5c728cbb8af3247074c746ff452632b", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x9e643c8a9203df542e39d9bfdcb07c99575b3c3d557791329fef9d83cc4147d0", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) +} diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index d498868..4ea2065 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -143,3 +143,89 @@ func TestCodecV1ChunkEncode(t *testing.T) { encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) } + +func TestCodecV1ChunkHash(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + chunk := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + hash, err := chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) + + // L1 transactions are part of the hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // L2 transactions are not part of the hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // numL1Messages are not part of the hash + daBlock = chunk.blocks[0].(*daBlockV0) + daBlock.numL1Messages = 1 + chunk.blocks[0] = daBlock + + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // invalid hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + _, err = chunk.Hash() + assert.Error(t, err) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) +} diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index e3870e2..1e6357d 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -143,3 +143,89 @@ func TestCodecV2ChunkEncode(t *testing.T) { encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) } + +func TestCodecV2ChunkHash(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + chunk := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + hash, err := chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) + + // L1 transactions are part of the hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // L2 transactions are not part of the hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // numL1Messages are not part of the hash + daBlock = chunk.blocks[0].(*daBlockV0) + daBlock.numL1Messages = 1 + chunk.blocks[0] = daBlock + + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // invalid hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + _, err = chunk.Hash() + assert.Error(t, err) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 65cd914..46ac01c 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -142,3 +142,89 @@ func TestCodecV3ChunkEncode(t *testing.T) { encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) } + +func TestCodecV3ChunkHash(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + chunk := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + hash, err := chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) + + // L1 transactions are part of the hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // L2 transactions are not part of the hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // numL1Messages are not part of the hash + daBlock = chunk.blocks[0].(*daBlockV0) + daBlock.numL1Messages = 1 + chunk.blocks[0] = daBlock + + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // invalid hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + _, err = chunk.Hash() + assert.Error(t, err) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 6256921..648ef99 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -143,3 +143,89 @@ func TestCodecV4ChunkEncode(t *testing.T) { encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) } + +func TestCodecV4ChunkHash(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + chunk := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + hash, err := chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) + + // L1 transactions are part of the hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // L2 transactions are not part of the hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // numL1Messages are not part of the hash + daBlock = chunk.blocks[0].(*daBlockV0) + daBlock.numL1Messages = 1 + chunk.blocks[0] = daBlock + + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // invalid hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + _, err = chunk.Hash() + assert.Error(t, err) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) +} From aa46689891a3c246f812ae874869cd8dfc4acdfb Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 13:51:49 +0800 Subject: [PATCH 089/126] add batch encode --- encoding/codecv0_test.go | 76 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv1_test.go | 76 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv2_test.go | 76 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv3_test.go | 76 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv4_test.go | 76 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 380 insertions(+) diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index 0ea6f06..95dca2f 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -185,3 +185,79 @@ func TestCodecV0ChunkHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) } + +func TestCodecV0BatchEncode(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + assert.NoError(t, err) + + // empty batch + batch := &daBatchV1{ + daBatchV0: daBatchV0{ + version: uint8(CodecV0), + }, + } + encoded := hex.EncodeToString(batch.Encode()) + assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000008fbc5eecfefc5bd9d1618ecef1fed160a7838448383595a2257d4c9bd5c5fa3e0000000000000000000000000000000000000000000000000000000000000000", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "0000000000000000000000000000000000000000000000000019d1fad630fcc61bd49949fa01e58d198f67a58f1c4aea43f32714ceaa9e0e760000000000000000000000000000000000000000000000000000000000000000", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "000000000000000000000000000000000b000000000000000b34f419ce7e882295bdb5aec6cce56ffa788a5fed4744d7fbd77e4acbf409f1ca000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "000000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "000000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "00000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d52080000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "000000000000000000000000000000002a000000000000002a908c20b6255fd8cd8fb3a7995e9980007ebedcfe359cee2d8e899aefe319836e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "000000000000000000000000000000002a000000000000002a1f9b3d942a6ee14e7afc52225c91fa44faa0a7ec511df9a2d9348d33bcd142fc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) +} diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index 4ea2065..f764313 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -229,3 +229,79 @@ func TestCodecV1ChunkHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) } + +func TestCodecV1BatchEncode(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + // empty batch + batch := &daBatchV1{ + daBatchV0: daBatchV0{ + version: uint8(CodecV1), + }, + } + encoded := hex.EncodeToString(batch.Encode()) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "010000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101af944924715b48be6ce3c35aef7500a50e909265599bd2b3e544ac59fc75530000000000000000000000000000000000000000000000000000000000000000", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "01000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6010c54fa675ed1b78f269827177019b0814a4ac4d269c68037e2c41cf08f94110000000000000000000000000000000000000000000000000000000000000000", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "010000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01ea66c4de196d36e2c3a5d7c0045100b9e46ef65be8f7a921ef20e6f2e99ebd000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "010000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b401a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "010000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "01000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d520801a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a60000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "010000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8014ae5927a983081a8bcdbcce19e926c9e4c56e2dc89c91c32c034b875b8a1ca00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "010000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e13476701b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) +} diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 1e6357d..b012ea4 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -229,3 +229,79 @@ func TestCodecV2ChunkHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) } + +func TestCodecV2BatchEncode(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + // empty batch + batch := &daBatchV1{ + daBatchV0: daBatchV0{ + version: uint8(CodecV2), + }, + } + encoded := hex.EncodeToString(batch.Encode()) + assert.Equal(t, "02000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "020000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd70000000000000000000000000000000000000000000000000000000000000000", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "02000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad40000000000000000000000000000000000000000000000000000000000000000", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "020000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "020000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc53394137000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "020000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "02000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc5339413700000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "020000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa002900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "020000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb3363200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 46ac01c..57f4d3d 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -228,3 +228,79 @@ func TestCodecV3ChunkHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) } + +func TestCodecV3BatchEncode(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + // empty batch + batch := &daBatchV3{ + daBatchV0: daBatchV0{ + version: uint8(CodecV3), + }, + } + encoded := hex.EncodeToString(batch.Encode()) + assert.Equal(t, "03000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "030000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "03000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "030000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "030000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "030000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "03000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "030000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 648ef99..1643983 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -229,3 +229,79 @@ func TestCodecV4ChunkHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) } + +func TestCodecV4BatchEncode(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + // empty batch + batch := &daBatchV3{ + daBatchV0: daBatchV0{ + version: uint8(CodecV4), + }, + } + encoded := hex.EncodeToString(batch.Encode()) + assert.Equal(t, "04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "040000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "04000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "040000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "040000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "04000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) +} From 22c3772f7eb413e75afe569a9184ea335a2c9fb5 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 15:06:07 +0800 Subject: [PATCH 090/126] add batch hash unit tests --- encoding/codecv0_test.go | 68 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv1_test.go | 68 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv2_test.go | 68 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv3_test.go | 68 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv4_test.go | 68 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 340 insertions(+) diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index 95dca2f..4c40f94 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -4,6 +4,7 @@ import ( "encoding/hex" "testing" + "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" ) @@ -261,3 +262,70 @@ func TestCodecV0BatchEncode(t *testing.T) { encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "000000000000000000000000000000002a000000000000002a1f9b3d942a6ee14e7afc52225c91fa44faa0a7ec511df9a2d9348d33bcd142fc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) } + +func TestCodecV0BatchHash(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + assert.NoError(t, err) + + // empty batch + batch := &daBatchV1{ + daBatchV0: daBatchV0{ + version: uint8(CodecV0), + }, + } + assert.Equal(t, common.HexToHash("0x7f74e58579672e582998264e7e8191c51b6b8981afd0f9bf1a2ffc3abb39e678"), batch.Hash()) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x4605465b7470c8565b123330d7186805caf9a7f2656d8e9e744b62e14ca22c3d"), daBatch.Hash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x922e004553d563bde6560a827c6449200bfd84f92917dfa14d740f26e52c59bc"), daBatch.Hash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xfbb081f25d6d06aefd76f062eee50885faf5bb050c8f31d533fc8560e655b690"), daBatch.Hash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x99f9648e4d090f1222280bec95a3f1e39c6cbcd4bff21eb2ae94b1536bb23acc"), daBatch.Hash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xe0950d500d47df4e9c443978682bcccfc8d50983f99ec9232067333a7d32a9d2"), daBatch.Hash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x745a74773cdc7cd0b86b50305f6373c7efeaf051b38a71ea561333708e8a90d9"), daBatch.Hash()) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x85b5c152c5c0b25731bfab6f4d309e94a42ddf0f4c9235189e5cd19c5c008522"), daBatch.Hash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc5e787fa6a83374135c3b95bd8325bcc0440cd5eb2d71bb31ddca67dd2d44f64"), daBatch.Hash()) +} diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index f764313..1b04a35 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -4,6 +4,7 @@ import ( "encoding/hex" "testing" + "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" ) @@ -305,3 +306,70 @@ func TestCodecV1BatchEncode(t *testing.T) { encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e13476701b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) } + +func TestCodecV1BatchHash(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + // empty batch + batch := &daBatchV1{ + daBatchV0: daBatchV0{ + version: uint8(CodecV1), + }, + } + assert.Equal(t, common.HexToHash("0x4b6fe410f63051f6e93532087b42ece79fb7b966e2ba5845e6cd1c091f27e564"), batch.Hash()) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xd557b02638c0385d5124f7fc188a025b33f8819b7f78c000751404997148ab8b"), daBatch.Hash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xf13c7e249d00941c59fe4cd970241bbd6753eede8e043c438165674031792b3b"), daBatch.Hash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xb64208f07fab641f7ebf831686d05ad667da0c7bfabcbd9c878cc22cbc8032fd"), daBatch.Hash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x4f7426d164e885574a661838406083f5292b0a1bc6dc20c51129eed0723b8a27"), daBatch.Hash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xfce89ec2aed85cebeb20eea722e3ae4ec622bff49218dbe249a2d358e2e85451"), daBatch.Hash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x8fc063179b709bab338674278bb7b70dce2879a4e11ea857b3a202fb3313559f"), daBatch.Hash()) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xf1c94cdf45967bc60bfccd599edd8cb07fd0201f41ab068637834f86140f62bf"), daBatch.Hash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xfef0b56bd889529e3a1d884c88dd1c867e084fdc1369496907be8f865f43f0e0"), daBatch.Hash()) +} diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index b012ea4..2b480e0 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -4,6 +4,7 @@ import ( "encoding/hex" "testing" + "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" ) @@ -305,3 +306,70 @@ func TestCodecV2BatchEncode(t *testing.T) { encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb3363200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) } + +func TestCodecV2BatchHash(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + // empty batch + batch := &daBatchV1{ + daBatchV0: daBatchV0{ + version: uint8(CodecV2), + }, + } + assert.Equal(t, common.HexToHash("0x8839b8a7b8dfebdc8e829f6fe543578ccdc8da1307e1e1581541a1e2a8fa5592"), batch.Hash()) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x57553c35f981626b4d1a73c816aa8d8fad83c460fc049c5792581763f7e21b13"), daBatch.Hash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x0f8e5b5205c5d809bf09047f37b558f4eb388c9c4eb23291cd97810d06654409"), daBatch.Hash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc59155dc0ae7d7d3fc29f0a9c6042f14dc58e3a1f9c0417f52bac2c4a8b33014"), daBatch.Hash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x417509641fb0c0d1c07d80e64aab13934f828cb4f09608722bf8126a68c04617"), daBatch.Hash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xe9c82b48e2a54c9206f57897cb870536bd22066d2af3d03aafe8a6a39add7635"), daBatch.Hash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x5e3d20c5b3f56cc5a28e7431241b3ce3d484b12cfb0b3228f378b196beeb3a53"), daBatch.Hash()) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x19b99491401625d92e16f7df6705219cc55e48e4b08db7bc4020e6934076f5f7"), daBatch.Hash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc5daf2ea5a3107c13b2994fb547336a7dca25cd352c051b6d9b9759d77e95fd2"), daBatch.Hash()) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 57f4d3d..680b276 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -4,6 +4,7 @@ import ( "encoding/hex" "testing" + "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" ) @@ -304,3 +305,70 @@ func TestCodecV3BatchEncode(t *testing.T) { encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) } + +func TestCodecV3BatchHash(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + // empty batch + batch := &daBatchV3{ + daBatchV0: daBatchV0{ + version: uint8(CodecV3), + }, + } + assert.Equal(t, common.HexToHash("0x9f059299e02cd1ccaed5bbcc821843000ae6b992b68b55ff59a51252478681b0"), batch.Hash()) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc5065afb8f29f620ae1edb4c6ebaf7380faf4226fb83ee920d70d489fe51c5c2"), daBatch.Hash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9ec8eabaa13229ec9c9d0687133afd7435afcfe260fc4c73fea052c0911522ac"), daBatch.Hash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xda944b66dcaa6dc1442be2230233e97286ee1ed3c51cde155a36643b293b07c4"), daBatch.Hash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x20e2324fac82e484c569eb286a221c61151c2b3c38a63b289f6ef6c30fb31e49"), daBatch.Hash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc962bce28a34a4eb9ec81393edcf2e6367e84aad9c4fc5641da6f18f54053ed5"), daBatch.Hash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x405e0fc4b7efbe5b6d1dcc63c1f3253bbb6fbefedd1afe6b2067629f9da1f1cc"), daBatch.Hash()) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x3d5d24c951cb55e56f3b4e2defcd8f32d6d048565e6723ac7cdff7ed5e580e3a"), daBatch.Hash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb"), daBatch.Hash()) +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 1643983..ea96c0e 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -4,6 +4,7 @@ import ( "encoding/hex" "testing" + "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" ) @@ -305,3 +306,70 @@ func TestCodecV4BatchEncode(t *testing.T) { encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) } + +func TestCodecV4BatchHash(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + // empty batch + batch := &daBatchV3{ + daBatchV0: daBatchV0{ + version: uint8(CodecV4), + }, + } + assert.Equal(t, common.HexToHash("0xdaf0827d02b32d41458aea0d5796dd0072d0a016f9834a2cb1a964d2c6ee135c"), batch.Hash()) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x0684ec7f79e1950823f3aff20c6a3cde03357eb72027ee663347ac77d46f7565"), daBatch.Hash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x094d6fb43f3bc4af387c8494f5b4d7ba82c9895a0122fbbe34f6a02cb512c564"), daBatch.Hash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xda211c0402d1546fe4964c0e3d61621f6f020c851fd255a2c55419a4e091eae9"), daBatch.Hash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x6bc8b8a6125c4f82afcbf8f190cefd002a61606fb751aca04b99f34a7459f678"), daBatch.Hash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x985335548d6816cf955f39baa9e62ffec7068ac8591f7c7ea5a39d5b4929c29f"), daBatch.Hash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x4ae2e62edaf78d4c9278c170b3ea2b0fb81c95c8875f523dbe889fe5035791ab"), daBatch.Hash()) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc4120eefe42981b827f5c9a67b8f9b14007bf624694e5702f0f8a665b68832ad"), daBatch.Hash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xa152b2e1594032ce70be219cfa9781f0840120ac2bf3fcad24893d77c4b74077"), daBatch.Hash()) +} From 86a2092ba481372a40358bed57d8e77e586ba94a Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 17:45:25 +0800 Subject: [PATCH 091/126] add batch data hash & json marshal unit tests --- encoding/codecv0_test.go | 59 +++++++++++ encoding/codecv1_test.go | 59 +++++++++++ encoding/codecv2_test.go | 59 +++++++++++ encoding/codecv3_test.go | 213 ++++++++++++++++++++++++++++++++++++-- encoding/codecv3_types.go | 38 +++---- encoding/codecv4_test.go | 213 ++++++++++++++++++++++++++++++++++++-- 6 files changed, 608 insertions(+), 33 deletions(-) diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index 4c40f94..a0bfb93 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -329,3 +329,62 @@ func TestCodecV0BatchHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc5e787fa6a83374135c3b95bd8325bcc0440cd5eb2d71bb31ddca67dd2d44f64"), daBatch.Hash()) } + +func TestCodecV0BatchDataHash(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x8fbc5eecfefc5bd9d1618ecef1fed160a7838448383595a2257d4c9bd5c5fa3e"), daBatch.DataHash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x19d1fad630fcc61bd49949fa01e58d198f67a58f1c4aea43f32714ceaa9e0e76"), daBatch.DataHash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x34f419ce7e882295bdb5aec6cce56ffa788a5fed4744d7fbd77e4acbf409f1ca"), daBatch.DataHash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x908c20b6255fd8cd8fb3a7995e9980007ebedcfe359cee2d8e899aefe319836e"), daBatch.DataHash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x1f9b3d942a6ee14e7afc52225c91fa44faa0a7ec511df9a2d9348d33bcd142fc"), daBatch.DataHash()) +} diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index 1b04a35..1e48f1a 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -373,3 +373,62 @@ func TestCodecV1BatchHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xfef0b56bd889529e3a1d884c88dd1c867e084fdc1369496907be8f865f43f0e0"), daBatch.Hash()) } + +func TestCodecV1BatchDataHash(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541"), daBatch.DataHash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6"), daBatch.DataHash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f"), daBatch.DataHash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8"), daBatch.DataHash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) +} diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 2b480e0..aa0f83a 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -373,3 +373,62 @@ func TestCodecV2BatchHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc5daf2ea5a3107c13b2994fb547336a7dca25cd352c051b6d9b9759d77e95fd2"), daBatch.Hash()) } + +func TestCodecV2BatchDataHash(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541"), daBatch.DataHash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6"), daBatch.DataHash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f"), daBatch.DataHash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8"), daBatch.DataHash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 680b276..d03095d 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -2,11 +2,13 @@ package encoding import ( "encoding/hex" + "encoding/json" "testing" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestCodecV3BlockEncode(t *testing.T) { @@ -234,13 +236,13 @@ func TestCodecV3BatchEncode(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) assert.NoError(t, err) - // empty batch - batch := &daBatchV3{ + // empty daBatch + daBatchV3 := &daBatchV3{ daBatchV0: daBatchV0{ version: uint8(CodecV3), }, } - encoded := hex.EncodeToString(batch.Encode()) + encoded := hex.EncodeToString(daBatchV3.Encode()) assert.Equal(t, "03000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") @@ -310,13 +312,13 @@ func TestCodecV3BatchHash(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) assert.NoError(t, err) - // empty batch - batch := &daBatchV3{ + // empty daBatch + daBatchV3 := &daBatchV3{ daBatchV0: daBatchV0{ version: uint8(CodecV3), }, } - assert.Equal(t, common.HexToHash("0x9f059299e02cd1ccaed5bbcc821843000ae6b992b68b55ff59a51252478681b0"), batch.Hash()) + assert.Equal(t, common.HexToHash("0x9f059299e02cd1ccaed5bbcc821843000ae6b992b68b55ff59a51252478681b0"), daBatchV3.Hash()) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -372,3 +374,202 @@ func TestCodecV3BatchHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb"), daBatch.Hash()) } + +func TestCodecV3BatchDataHash(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541"), daBatch.DataHash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6"), daBatch.DataHash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f"), daBatch.DataHash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8"), daBatch.DataHash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) +} + +func TestCodecV3DABatchJSONMarshalUnmarshal(t *testing.T) { + t.Run("Case 1", func(t *testing.T) { + expectedJsonStr := `{ + "version": 3, + "batch_index": 293212, + "l1_message_popped": 7, + "total_l1_message_popped": 904750, + "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", + "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", + "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", + "last_block_timestamp": 1721130505, + "blob_data_proof": [ + "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", + "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" + ] + }` + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 3, + batchIndex: 293212, + l1MessagePopped: 7, + totalL1MessagePopped: 904750, + dataHash: common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), + parentBatchHash: common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), + }, + blobVersionedHash: common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), + lastBlockTimestamp: 1721130505, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), + common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), + }, + } + + data, err := json.Marshal(&daBatch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJson, actualJson map[string]interface{} + err = json.Unmarshal([]byte(expectedJsonStr), &expectedJson) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJson) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") + }) + + t.Run("Case 2", func(t *testing.T) { + expectedJsonStr := `{ + "version": 4, + "batch_index": 123, + "l1_message_popped": 0, + "total_l1_message_popped": 0, + "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", + "last_block_timestamp": 1720174236, + "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", + "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", + "blob_data_proof": [ + "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", + "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" + ] + }` + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 4, + batchIndex: 123, + l1MessagePopped: 0, + totalL1MessagePopped: 0, + dataHash: common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), + parentBatchHash: common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), + }, + blobVersionedHash: common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), + lastBlockTimestamp: 1720174236, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), + common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), + }, + } + + data, err := json.Marshal(&daBatch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJson, actualJson map[string]interface{} + err = json.Unmarshal([]byte(expectedJsonStr), &expectedJson) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJson) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") + }) + + t.Run("Case 3", func(t *testing.T) { + expectedJsonStr := `{ + "version": 3, + "batch_index": 293205, + "l1_message_popped": 0, + "total_l1_message_popped": 904737, + "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", + "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", + "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", + "last_block_timestamp": 1721129563, + "blob_data_proof": [ + "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", + "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" + ] + }` + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 3, + batchIndex: 293205, + l1MessagePopped: 0, + totalL1MessagePopped: 904737, + dataHash: common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), + parentBatchHash: common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), + }, + blobVersionedHash: common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), + lastBlockTimestamp: 1721129563, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), + common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), + }, + } + + data, err := json.Marshal(&daBatch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJson, actualJson map[string]interface{} + err = json.Unmarshal([]byte(expectedJsonStr), &expectedJson) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJson) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") + }) +} diff --git a/encoding/codecv3_types.go b/encoding/codecv3_types.go index 5249fdc..5650185 100644 --- a/encoding/codecv3_types.go +++ b/encoding/codecv3_types.go @@ -161,30 +161,26 @@ func (b *daBatchV3) BlobBytes() []byte { // This method is designed to provide prover with batch info in snake_case format. func (b *daBatchV3) MarshalJSON() ([]byte, error) { type daBatchV3JSON struct { - Version uint8 `json:"version"` - BatchIndex uint64 `json:"batch_index"` - L1MessagePopped uint64 `json:"l1_message_popped"` - TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` - DataHash string `json:"data_hash"` - ParentBatchHash string `json:"parent_batch_hash"` - SkippedL1MessageBitmap string `json:"skipped_l1_message_bitmap"` - BlobVersionedHash string `json:"blob_versioned_hash"` - LastBlockTimestamp uint64 `json:"last_block_timestamp"` - BlobBytes string `json:"blob_bytes"` - BlobDataProof [2]string `json:"blob_data_proof"` + Version uint8 `json:"version"` + BatchIndex uint64 `json:"batch_index"` + L1MessagePopped uint64 `json:"l1_message_popped"` + TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` + DataHash string `json:"data_hash"` + ParentBatchHash string `json:"parent_batch_hash"` + BlobVersionedHash string `json:"blob_versioned_hash"` + LastBlockTimestamp uint64 `json:"last_block_timestamp"` + BlobDataProof [2]string `json:"blob_data_proof"` } return json.Marshal(&daBatchV3JSON{ - Version: b.version, - BatchIndex: b.batchIndex, - L1MessagePopped: b.l1MessagePopped, - TotalL1MessagePopped: b.totalL1MessagePopped, - DataHash: b.dataHash.Hex(), - ParentBatchHash: b.parentBatchHash.Hex(), - SkippedL1MessageBitmap: common.Bytes2Hex(b.skippedL1MessageBitmap), - BlobVersionedHash: b.blobVersionedHash.Hex(), - LastBlockTimestamp: b.lastBlockTimestamp, - BlobBytes: common.Bytes2Hex(b.blobBytes), + Version: b.version, + BatchIndex: b.batchIndex, + L1MessagePopped: b.l1MessagePopped, + TotalL1MessagePopped: b.totalL1MessagePopped, + DataHash: b.dataHash.Hex(), + ParentBatchHash: b.parentBatchHash.Hex(), + BlobVersionedHash: b.blobVersionedHash.Hex(), + LastBlockTimestamp: b.lastBlockTimestamp, BlobDataProof: [2]string{ b.blobDataProof[0].Hex(), b.blobDataProof[1].Hex(), diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index ea96c0e..20f257e 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -2,11 +2,13 @@ package encoding import ( "encoding/hex" + "encoding/json" "testing" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestCodecV4BlockEncode(t *testing.T) { @@ -235,13 +237,13 @@ func TestCodecV4BatchEncode(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) assert.NoError(t, err) - // empty batch - batch := &daBatchV3{ + // empty daBatch + daBatchV3 := &daBatchV3{ daBatchV0: daBatchV0{ version: uint8(CodecV4), }, } - encoded := hex.EncodeToString(batch.Encode()) + encoded := hex.EncodeToString(daBatchV3.Encode()) assert.Equal(t, "04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") @@ -311,13 +313,13 @@ func TestCodecV4BatchHash(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) assert.NoError(t, err) - // empty batch - batch := &daBatchV3{ + // empty daBatch + daBatchV3 := &daBatchV3{ daBatchV0: daBatchV0{ version: uint8(CodecV4), }, } - assert.Equal(t, common.HexToHash("0xdaf0827d02b32d41458aea0d5796dd0072d0a016f9834a2cb1a964d2c6ee135c"), batch.Hash()) + assert.Equal(t, common.HexToHash("0xdaf0827d02b32d41458aea0d5796dd0072d0a016f9834a2cb1a964d2c6ee135c"), daBatchV3.Hash()) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -373,3 +375,202 @@ func TestCodecV4BatchHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xa152b2e1594032ce70be219cfa9781f0840120ac2bf3fcad24893d77c4b74077"), daBatch.Hash()) } + +func TestCodecV4BatchDataHash(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541"), daBatch.DataHash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6"), daBatch.DataHash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f"), daBatch.DataHash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) + + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8"), daBatch.DataHash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) +} + +func TestCodecV4DABatchJSONMarshalUnmarshal(t *testing.T) { + t.Run("Case 1", func(t *testing.T) { + expectedJsonStr := `{ + "version": 4, + "batch_index": 293212, + "l1_message_popped": 7, + "total_l1_message_popped": 904750, + "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", + "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", + "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", + "last_block_timestamp": 1721130505, + "blob_data_proof": [ + "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", + "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" + ] + }` + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 4, + batchIndex: 293212, + l1MessagePopped: 7, + totalL1MessagePopped: 904750, + dataHash: common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), + parentBatchHash: common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), + }, + blobVersionedHash: common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), + lastBlockTimestamp: 1721130505, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), + common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), + }, + } + + data, err := json.Marshal(&daBatch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJson, actualJson map[string]interface{} + err = json.Unmarshal([]byte(expectedJsonStr), &expectedJson) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJson) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") + }) + + t.Run("Case 2", func(t *testing.T) { + jsonStr := `{ + "version": 5, + "batch_index": 123, + "l1_message_popped": 0, + "total_l1_message_popped": 0, + "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", + "last_block_timestamp": 1720174236, + "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", + "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", + "blob_data_proof": [ + "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", + "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" + ] + }` + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 5, + batchIndex: 123, + l1MessagePopped: 0, + totalL1MessagePopped: 0, + dataHash: common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), + parentBatchHash: common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), + }, + blobVersionedHash: common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), + lastBlockTimestamp: 1720174236, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), + common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), + }, + } + + data, err := json.Marshal(&daBatch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJson, actualJson map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &expectedJson) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJson) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") + }) + + t.Run("Case 3", func(t *testing.T) { + jsonStr := `{ + "version": 4, + "batch_index": 293205, + "l1_message_popped": 0, + "total_l1_message_popped": 904737, + "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", + "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", + "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", + "last_block_timestamp": 1721129563, + "blob_data_proof": [ + "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", + "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" + ] + }` + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 4, + batchIndex: 293205, + l1MessagePopped: 0, + totalL1MessagePopped: 904737, + dataHash: common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), + parentBatchHash: common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), + }, + blobVersionedHash: common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), + lastBlockTimestamp: 1721129563, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), + common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), + }, + } + + data, err := json.Marshal(&daBatch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJson, actualJson map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &expectedJson) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJson) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") + }) +} From 99231acf2a9c5e9fc10a0543bbacbd573ae46da8 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 18:18:19 +0800 Subject: [PATCH 092/126] add calldata size unit tests --- encoding/codecv0_test.go | 48 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv1_test.go | 48 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv2_test.go | 48 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv3_test.go | 48 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv4_test.go | 48 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 240 insertions(+) diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index a0bfb93..10c6a6d 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -388,3 +388,51 @@ func TestCodecV0BatchDataHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x1f9b3d942a6ee14e7afc52225c91fa44faa0a7ec511df9a2d9348d33bcd142fc"), daBatch.DataHash()) } + +func TestCodecV0CalldataSizeEstimation(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2CalldataSize, err := codecv0.EstimateChunkL1CommitCalldataSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(298), chunk2CalldataSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2CalldataSize, err := codecv0.EstimateBatchL1CommitCalldataSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(298), batch2CalldataSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3CalldataSize, err := codecv0.EstimateChunkL1CommitCalldataSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(5745), chunk3CalldataSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3CalldataSize, err := codecv0.EstimateBatchL1CommitCalldataSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(5745), batch3CalldataSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4CalldataSize, err := codecv0.EstimateChunkL1CommitCalldataSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(96), chunk4CalldataSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4CalldataSize, err := codecv0.EstimateBatchL1CommitCalldataSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(96), batch4CalldataSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5CalldataSize, err := codecv0.EstimateChunkL1CommitCalldataSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(6043), chunk5CalldataSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6CalldataSize, err := codecv0.EstimateChunkL1CommitCalldataSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(96), chunk6CalldataSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5CalldataSize, err := codecv0.EstimateBatchL1CommitCalldataSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(6139), batch5CalldataSize) +} diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index 1e48f1a..a8b5672 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -432,3 +432,51 @@ func TestCodecV1BatchDataHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) } + +func TestCodecV1CalldataSizeEstimation(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2CalldataSize, err := codecv1.EstimateChunkL1CommitCalldataSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk2CalldataSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2CalldataSize, err := codecv1.EstimateBatchL1CommitCalldataSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch2CalldataSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3CalldataSize, err := codecv1.EstimateChunkL1CommitCalldataSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk3CalldataSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3CalldataSize, err := codecv1.EstimateBatchL1CommitCalldataSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch3CalldataSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4CalldataSize, err := codecv1.EstimateChunkL1CommitCalldataSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk4CalldataSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4CalldataSize, err := codecv1.EstimateBatchL1CommitCalldataSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch4CalldataSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5CalldataSize, err := codecv1.EstimateChunkL1CommitCalldataSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(120), chunk5CalldataSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6CalldataSize, err := codecv1.EstimateChunkL1CommitCalldataSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk6CalldataSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5CalldataSize, err := codecv1.EstimateBatchL1CommitCalldataSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(180), batch5CalldataSize) +} diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index aa0f83a..8d57757 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -432,3 +432,51 @@ func TestCodecV2BatchDataHash(t *testing.T) { assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) } + +func TestCodecV2CalldataSizeEstimation(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2CalldataSize, err := codecv2.EstimateChunkL1CommitCalldataSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk2CalldataSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2CalldataSize, err := codecv2.EstimateBatchL1CommitCalldataSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch2CalldataSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3CalldataSize, err := codecv2.EstimateChunkL1CommitCalldataSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk3CalldataSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3CalldataSize, err := codecv2.EstimateBatchL1CommitCalldataSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch3CalldataSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4CalldataSize, err := codecv2.EstimateChunkL1CommitCalldataSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk4CalldataSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4CalldataSize, err := codecv2.EstimateBatchL1CommitCalldataSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch4CalldataSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5CalldataSize, err := codecv2.EstimateChunkL1CommitCalldataSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(120), chunk5CalldataSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6CalldataSize, err := codecv2.EstimateChunkL1CommitCalldataSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk6CalldataSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5CalldataSize, err := codecv2.EstimateBatchL1CommitCalldataSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(180), batch5CalldataSize) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index d03095d..11783be 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -573,3 +573,51 @@ func TestCodecV3DABatchJSONMarshalUnmarshal(t *testing.T) { assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") }) } + +func TestCodecV3CalldataSizeEstimation(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2CalldataSize, err := codecv3.EstimateChunkL1CommitCalldataSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk2CalldataSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2CalldataSize, err := codecv3.EstimateBatchL1CommitCalldataSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch2CalldataSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3CalldataSize, err := codecv3.EstimateChunkL1CommitCalldataSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk3CalldataSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3CalldataSize, err := codecv3.EstimateBatchL1CommitCalldataSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch3CalldataSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4CalldataSize, err := codecv3.EstimateChunkL1CommitCalldataSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk4CalldataSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4CalldataSize, err := codecv3.EstimateBatchL1CommitCalldataSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch4CalldataSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5CalldataSize, err := codecv3.EstimateChunkL1CommitCalldataSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(120), chunk5CalldataSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6CalldataSize, err := codecv3.EstimateChunkL1CommitCalldataSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk6CalldataSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5CalldataSize, err := codecv3.EstimateBatchL1CommitCalldataSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(180), batch5CalldataSize) +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 20f257e..c4538e9 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -574,3 +574,51 @@ func TestCodecV4DABatchJSONMarshalUnmarshal(t *testing.T) { assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") }) } + +func TestCodecV4CalldataSizeEstimation(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2CalldataSize, err := codecv4.EstimateChunkL1CommitCalldataSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk2CalldataSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2CalldataSize, err := codecv4.EstimateBatchL1CommitCalldataSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch2CalldataSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3CalldataSize, err := codecv4.EstimateChunkL1CommitCalldataSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk3CalldataSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3CalldataSize, err := codecv4.EstimateBatchL1CommitCalldataSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch3CalldataSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4CalldataSize, err := codecv4.EstimateChunkL1CommitCalldataSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk4CalldataSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4CalldataSize, err := codecv4.EstimateBatchL1CommitCalldataSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch4CalldataSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5CalldataSize, err := codecv4.EstimateChunkL1CommitCalldataSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(120), chunk5CalldataSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6CalldataSize, err := codecv4.EstimateChunkL1CommitCalldataSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk6CalldataSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5CalldataSize, err := codecv4.EstimateBatchL1CommitCalldataSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(180), batch5CalldataSize) +} From c62b2d0b27f752df24548ade0d5d9ae2896fe035 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 18:54:12 +0800 Subject: [PATCH 093/126] add commit gas estimation --- encoding/codecv0_test.go | 49 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv1_test.go | 48 +++++++++++++++++++++++++++++++++++++++ encoding/codecv2_test.go | 48 +++++++++++++++++++++++++++++++++++++++ encoding/codecv3_test.go | 48 +++++++++++++++++++++++++++++++++++++++ encoding/codecv4_test.go | 48 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 241 insertions(+) diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index 10c6a6d..f11074f 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -436,3 +436,52 @@ func TestCodecV0CalldataSizeEstimation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(6139), batch5CalldataSize) } + +func TestCodecV0CommitGasEstimation(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + assert.NoError(t, err) + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2Gas, err := codecv0.EstimateChunkL1CommitGas(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(5082), chunk2Gas) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2Gas, err := codecv0.EstimateBatchL1CommitGas(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(161631), batch2Gas) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3Gas, err := codecv0.EstimateChunkL1CommitGas(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(93786), chunk3Gas) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3Gas, err := codecv0.EstimateBatchL1CommitGas(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(250908), batch3Gas) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4Gas, err := codecv0.EstimateChunkL1CommitGas(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(4369), chunk4Gas) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4Gas, err := codecv0.EstimateBatchL1CommitGas(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(160929), batch4Gas) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5Gas, err := codecv0.EstimateChunkL1CommitGas(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(98822), chunk5Gas) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6Gas, err := codecv0.EstimateChunkL1CommitGas(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(4369), chunk6Gas) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5Gas, err := codecv0.EstimateBatchL1CommitGas(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(260958), batch5Gas) +} diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index a8b5672..0e4a39d 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -480,3 +480,51 @@ func TestCodecV1CalldataSizeEstimation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(180), batch5CalldataSize) } + +func TestCodecV1CommitGasEstimation(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2Gas, err := codecv1.EstimateChunkL1CommitGas(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(1124), chunk2Gas) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2Gas, err := codecv1.EstimateBatchL1CommitGas(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(157649), batch2Gas) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3Gas, err := codecv1.EstimateChunkL1CommitGas(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(1124), chunk3Gas) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3Gas, err := codecv1.EstimateBatchL1CommitGas(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(157649), batch3Gas) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4Gas, err := codecv1.EstimateChunkL1CommitGas(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(3745), chunk4Gas) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4Gas, err := codecv1.EstimateBatchL1CommitGas(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(160302), batch4Gas) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5Gas, err := codecv1.EstimateChunkL1CommitGas(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(2202), chunk5Gas) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6Gas, err := codecv1.EstimateChunkL1CommitGas(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(3745), chunk6Gas) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5Gas, err := codecv1.EstimateBatchL1CommitGas(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(163087), batch5Gas) +} diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 8d57757..38efda3 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -480,3 +480,51 @@ func TestCodecV2CalldataSizeEstimation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(180), batch5CalldataSize) } + +func TestCodecV2CommitGasEstimation(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2Gas, err := codecv2.EstimateChunkL1CommitGas(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(1124), chunk2Gas) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2Gas, err := codecv2.EstimateBatchL1CommitGas(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(157649), batch2Gas) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3Gas, err := codecv2.EstimateChunkL1CommitGas(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(1124), chunk3Gas) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3Gas, err := codecv2.EstimateBatchL1CommitGas(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(157649), batch3Gas) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4Gas, err := codecv2.EstimateChunkL1CommitGas(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(3745), chunk4Gas) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4Gas, err := codecv2.EstimateBatchL1CommitGas(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(160302), batch4Gas) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5Gas, err := codecv2.EstimateChunkL1CommitGas(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(2202), chunk5Gas) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6Gas, err := codecv2.EstimateChunkL1CommitGas(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(3745), chunk6Gas) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5Gas, err := codecv2.EstimateBatchL1CommitGas(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(163087), batch5Gas) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 11783be..2f448f8 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -621,3 +621,51 @@ func TestCodecV3CalldataSizeEstimation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(180), batch5CalldataSize) } + +func TestCodecV3CommitGasEstimation(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2Gas, err := codecv3.EstimateChunkL1CommitGas(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(51124), chunk2Gas) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2Gas, err := codecv3.EstimateBatchL1CommitGas(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(207649), batch2Gas) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3Gas, err := codecv3.EstimateChunkL1CommitGas(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(51124), chunk3Gas) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3Gas, err := codecv3.EstimateBatchL1CommitGas(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(207649), batch3Gas) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4Gas, err := codecv3.EstimateChunkL1CommitGas(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(53745), chunk4Gas) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4Gas, err := codecv3.EstimateBatchL1CommitGas(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(210302), batch4Gas) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5Gas, err := codecv3.EstimateChunkL1CommitGas(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(52202), chunk5Gas) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6Gas, err := codecv3.EstimateChunkL1CommitGas(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(53745), chunk6Gas) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5Gas, err := codecv3.EstimateBatchL1CommitGas(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(213087), batch5Gas) +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index c4538e9..2313830 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -622,3 +622,51 @@ func TestCodecV4CalldataSizeEstimation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(180), batch5CalldataSize) } + +func TestCodecV4CommitGasEstimation(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2Gas, err := codecv4.EstimateChunkL1CommitGas(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(51124), chunk2Gas) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2Gas, err := codecv4.EstimateBatchL1CommitGas(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(207649), batch2Gas) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3Gas, err := codecv4.EstimateChunkL1CommitGas(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(51124), chunk3Gas) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3Gas, err := codecv4.EstimateBatchL1CommitGas(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(207649), batch3Gas) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4Gas, err := codecv4.EstimateChunkL1CommitGas(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(53745), chunk4Gas) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4Gas, err := codecv4.EstimateBatchL1CommitGas(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(210302), batch4Gas) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5Gas, err := codecv4.EstimateChunkL1CommitGas(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(52202), chunk5Gas) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6Gas, err := codecv4.EstimateChunkL1CommitGas(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(53745), chunk6Gas) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5Gas, err := codecv4.EstimateBatchL1CommitGas(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(213087), batch5Gas) +} From 7df1b271ba27acdb8e8ef2b07c751961e8f5845f Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 19:12:37 +0800 Subject: [PATCH 094/126] add BatchSizeAndBlobSizeEstimation unit tests --- encoding/codecv1_test.go | 57 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv2_test.go | 57 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv3_test.go | 57 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv4_test.go | 57 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 228 insertions(+) diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index 0e4a39d..c26581f 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -528,3 +528,60 @@ func TestCodecV1CommitGasEstimation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(163087), batch5Gas) } + +func TestCodecV1BatchSizeAndBlobSizeEstimation(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2BatchBytesSize, chunk2BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(302), chunk2BatchBytesSize) + assert.Equal(t, uint64(302), chunk2BlobSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2BatchBytesSize, batch2BlobSize, err := codecv1.EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(302), batch2BatchBytesSize) + assert.Equal(t, uint64(302), batch2BlobSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3BatchBytesSize, chunk3BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(5929), chunk3BatchBytesSize) + assert.Equal(t, uint64(5929), chunk3BlobSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3BatchBytesSize, batch3BlobSize, err := codecv1.EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(5929), batch3BatchBytesSize) + assert.Equal(t, uint64(5929), batch3BlobSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4BatchBytesSize, chunk4BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(98), chunk4BatchBytesSize) + assert.Equal(t, uint64(98), chunk4BlobSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + blob4BatchBytesSize, batch4BlobSize, err := codecv1.EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(98), blob4BatchBytesSize) + assert.Equal(t, uint64(98), batch4BlobSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5BatchBytesSize, chunk5BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(6166), chunk5BatchBytesSize) + assert.Equal(t, uint64(6166), chunk5BlobSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6BatchBytesSize, chunk6BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(98), chunk6BatchBytesSize) + assert.Equal(t, uint64(98), chunk6BlobSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5BatchBytesSize, batch5BlobSize, err := codecv1.EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(6199), batch5BatchBytesSize) + assert.Equal(t, uint64(6199), batch5BlobSize) +} diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 38efda3..228c4b8 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -528,3 +528,60 @@ func TestCodecV2CommitGasEstimation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(163087), batch5Gas) } + +func TestCodecV2BatchSizeAndBlobSizeEstimation(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2BatchBytesSize, chunk2BlobSize, err := codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(412), chunk2BatchBytesSize) + assert.Equal(t, uint64(237), chunk2BlobSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2BatchBytesSize, batch2BlobSize, err := codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(412), batch2BatchBytesSize) + assert.Equal(t, uint64(237), batch2BlobSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3BatchBytesSize, chunk3BlobSize, err := codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), chunk3BatchBytesSize) + assert.Equal(t, uint64(2933), chunk3BlobSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3BatchBytesSize, batch3BlobSize, err := codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), batch3BatchBytesSize) + assert.Equal(t, uint64(2933), batch3BlobSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4BatchBytesSize, chunk4BlobSize, err := codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk4BatchBytesSize) + assert.Equal(t, uint64(54), chunk4BlobSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + blob4BatchBytesSize, batch4BlobSize, err := codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(214), blob4BatchBytesSize) + assert.Equal(t, uint64(54), batch4BlobSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5BatchBytesSize, chunk5BlobSize, err := codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(6093), chunk5BatchBytesSize) + assert.Equal(t, uint64(3149), chunk5BlobSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6BatchBytesSize, chunk6BlobSize, err := codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk6BatchBytesSize) + assert.Equal(t, uint64(54), chunk6BlobSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5BatchBytesSize, batch5BlobSize, err := codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(6125), batch5BatchBytesSize) + assert.Equal(t, uint64(3186), batch5BlobSize) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 2f448f8..916ac4a 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -669,3 +669,60 @@ func TestCodecV3CommitGasEstimation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(213087), batch5Gas) } + +func TestCodecV3BatchSizeAndBlobSizeEstimation(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2BatchBytesSize, chunk2BlobSize, err := codecv3.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(412), chunk2BatchBytesSize) + assert.Equal(t, uint64(237), chunk2BlobSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2BatchBytesSize, batch2BlobSize, err := codecv3.EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(412), batch2BatchBytesSize) + assert.Equal(t, uint64(237), batch2BlobSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3BatchBytesSize, chunk3BlobSize, err := codecv3.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), chunk3BatchBytesSize) + assert.Equal(t, uint64(2933), chunk3BlobSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3BatchBytesSize, batch3BlobSize, err := codecv3.EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), batch3BatchBytesSize) + assert.Equal(t, uint64(2933), batch3BlobSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4BatchBytesSize, chunk4BlobSize, err := codecv3.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk4BatchBytesSize) + assert.Equal(t, uint64(54), chunk4BlobSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + blob4BatchBytesSize, batch4BlobSize, err := codecv3.EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(214), blob4BatchBytesSize) + assert.Equal(t, uint64(54), batch4BlobSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5BatchBytesSize, chunk5BlobSize, err := codecv3.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(6093), chunk5BatchBytesSize) + assert.Equal(t, uint64(3149), chunk5BlobSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6BatchBytesSize, chunk6BlobSize, err := codecv3.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk6BatchBytesSize) + assert.Equal(t, uint64(54), chunk6BlobSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5BatchBytesSize, batch5BlobSize, err := codecv3.EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(6125), batch5BatchBytesSize) + assert.Equal(t, uint64(3186), batch5BlobSize) +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 2313830..67e697d 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -670,3 +670,60 @@ func TestCodecV4CommitGasEstimation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(213087), batch5Gas) } + +func TestCodecV4BatchSizeAndBlobSizeEstimation(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2BatchBytesSize, chunk2BlobSize, err := codecv4.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(412), chunk2BatchBytesSize) + assert.Equal(t, uint64(238), chunk2BlobSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2BatchBytesSize, batch2BlobSize, err := codecv4.EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(412), batch2BatchBytesSize) + assert.Equal(t, uint64(238), batch2BlobSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3BatchBytesSize, chunk3BlobSize, err := codecv4.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), chunk3BatchBytesSize) + assert.Equal(t, uint64(2934), chunk3BlobSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3BatchBytesSize, batch3BlobSize, err := codecv4.EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), batch3BatchBytesSize) + assert.Equal(t, uint64(2934), batch3BlobSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4BatchBytesSize, chunk4BlobSize, err := codecv4.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk4BatchBytesSize) + assert.Equal(t, uint64(55), chunk4BlobSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + blob4BatchBytesSize, batch4BlobSize, err := codecv4.EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(214), blob4BatchBytesSize) + assert.Equal(t, uint64(55), batch4BlobSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5BatchBytesSize, chunk5BlobSize, err := codecv4.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(6093), chunk5BatchBytesSize) + assert.Equal(t, uint64(3150), chunk5BlobSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6BatchBytesSize, chunk6BlobSize, err := codecv4.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk6BatchBytesSize) + assert.Equal(t, uint64(55), chunk6BlobSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5BatchBytesSize, batch5BlobSize, err := codecv4.EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(6125), batch5BatchBytesSize) + assert.Equal(t, uint64(3187), batch5BlobSize) +} From f3cb3a25bc05a80eb3eba42e46ef492d06eed5c1 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 19:33:14 +0800 Subject: [PATCH 095/126] add L1MessagePopped unit tests --- encoding/codecv0_test.go | 79 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv1_test.go | 79 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv2_test.go | 79 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv3_test.go | 79 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv4_test.go | 79 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 395 insertions(+) diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index f11074f..0b4a7fd 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -485,3 +485,82 @@ func TestCodecV0CommitGasEstimation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(260958), batch5Gas) } + +func TestCodecV0BatchL1MessagePopped(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV0).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV0).totalL1MessagePopped) + + trace3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{trace3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV0).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV0).totalL1MessagePopped) + + trace4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{trace4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(11), daBatch.(*daBatchV0).l1MessagePopped) + assert.Equal(t, uint64(11), daBatch.(*daBatchV0).totalL1MessagePopped) + + trace5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{trace5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV0).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(5), daBatch.(*daBatchV0).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) + + trace6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{trace6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(10), daBatch.(*daBatchV0).l1MessagePopped) // skip 7, include 3 + assert.Equal(t, uint64(10), daBatch.(*daBatchV0).totalL1MessagePopped) + + trace7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{trace7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(257), daBatch.(*daBatchV0).l1MessagePopped) // skip 255, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV0).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(256), daBatch.(*daBatchV0).l1MessagePopped) // skip 254, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV0).totalL1MessagePopped) + + chunk8 := &Chunk{Blocks: []*Block{block2, trace3, trace4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{trace5}} // queue index 37-41 + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV0).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv0.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(32), daBatch.(*daBatchV0).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) +} diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index c26581f..e404156 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -585,3 +585,82 @@ func TestCodecV1BatchSizeAndBlobSizeEstimation(t *testing.T) { assert.Equal(t, uint64(6199), batch5BatchBytesSize) assert.Equal(t, uint64(6199), batch5BlobSize) } + +func TestCodecV1BatchL1MessagePopped(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) + + trace3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{trace3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) + + trace4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{trace4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(11), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(11), daBatch.(*daBatchV1).totalL1MessagePopped) + + trace5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{trace5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(5), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) + + trace6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{trace6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(10), daBatch.(*daBatchV1).l1MessagePopped) // skip 7, include 3 + assert.Equal(t, uint64(10), daBatch.(*daBatchV1).totalL1MessagePopped) + + trace7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{trace7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(257), daBatch.(*daBatchV1).l1MessagePopped) // skip 255, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(256), daBatch.(*daBatchV1).l1MessagePopped) // skip 254, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) + + chunk8 := &Chunk{Blocks: []*Block{block2, trace3, trace4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{trace5}} // queue index 37-41 + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(32), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) +} diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 228c4b8..d1582bf 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -585,3 +585,82 @@ func TestCodecV2BatchSizeAndBlobSizeEstimation(t *testing.T) { assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) } + +func TestCodecV2BatchL1MessagePopped(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) + + trace3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{trace3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) + + trace4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{trace4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(11), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(11), daBatch.(*daBatchV1).totalL1MessagePopped) + + trace5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{trace5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(5), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) + + trace6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{trace6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(10), daBatch.(*daBatchV1).l1MessagePopped) // skip 7, include 3 + assert.Equal(t, uint64(10), daBatch.(*daBatchV1).totalL1MessagePopped) + + trace7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{trace7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(257), daBatch.(*daBatchV1).l1MessagePopped) // skip 255, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(256), daBatch.(*daBatchV1).l1MessagePopped) // skip 254, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) + + chunk8 := &Chunk{Blocks: []*Block{block2, trace3, trace4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{trace5}} // queue index 37-41 + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(32), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 916ac4a..19d027c 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -726,3 +726,82 @@ func TestCodecV3BatchSizeAndBlobSizeEstimation(t *testing.T) { assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) } + +func TestCodecV3BatchL1MessagePopped(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) + + trace3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{trace3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) + + trace4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{trace4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(11), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(11), daBatch.(*daBatchV3).totalL1MessagePopped) + + trace5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{trace5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(5), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) + + trace6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{trace6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(10), daBatch.(*daBatchV3).l1MessagePopped) // skip 7, include 3 + assert.Equal(t, uint64(10), daBatch.(*daBatchV3).totalL1MessagePopped) + + trace7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{trace7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(257), daBatch.(*daBatchV3).l1MessagePopped) // skip 255, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(256), daBatch.(*daBatchV3).l1MessagePopped) // skip 254, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) + + chunk8 := &Chunk{Blocks: []*Block{block2, trace3, trace4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{trace5}} // queue index 37-41 + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(32), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 67e697d..ccc9e2b 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -727,3 +727,82 @@ func TestCodecV4BatchSizeAndBlobSizeEstimation(t *testing.T) { assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3187), batch5BlobSize) } + +func TestCodecV4BatchL1MessagePopped(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) + + trace3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{trace3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) + + trace4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{trace4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(11), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(11), daBatch.(*daBatchV3).totalL1MessagePopped) + + trace5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{trace5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(5), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) + + trace6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{trace6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(10), daBatch.(*daBatchV3).l1MessagePopped) // skip 7, include 3 + assert.Equal(t, uint64(10), daBatch.(*daBatchV3).totalL1MessagePopped) + + trace7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{trace7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(257), daBatch.(*daBatchV3).l1MessagePopped) // skip 255, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(256), daBatch.(*daBatchV3).l1MessagePopped) // skip 254, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) + + chunk8 := &Chunk{Blocks: []*Block{block2, trace3, trace4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{trace5}} // queue index 37-41 + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) + + originalBatch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + assert.Equal(t, uint64(32), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) +} From 4709903ae337ce2cbc9fe41f241bd660d2a226c4 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 19:58:59 +0800 Subject: [PATCH 096/126] add BlobEncodingAndHashing unit tests --- encoding/codecv0_test.go | 24 ++++----- encoding/codecv1_test.go | 112 ++++++++++++++++++++++++++++++++++----- encoding/codecv2_test.go | 106 +++++++++++++++++++++++++++++++----- encoding/codecv3_test.go | 106 +++++++++++++++++++++++++++++++----- encoding/codecv4_test.go | 106 +++++++++++++++++++++++++++++++----- 5 files changed, 386 insertions(+), 68 deletions(-) diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index 0b4a7fd..6a45107 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -498,24 +498,24 @@ func TestCodecV0BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(0), daBatch.(*daBatchV0).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV0).totalL1MessagePopped) - trace3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") - chunk3 := &Chunk{Blocks: []*Block{trace3}} + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} originalBatch = &Batch{Chunks: []*Chunk{chunk3}} daBatch, err = codecv0.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV0).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV0).totalL1MessagePopped) - trace4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") - chunk4 := &Chunk{Blocks: []*Block{trace4}} + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} originalBatch = &Batch{Chunks: []*Chunk{chunk4}} daBatch, err = codecv0.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(11), daBatch.(*daBatchV0).l1MessagePopped) assert.Equal(t, uint64(11), daBatch.(*daBatchV0).totalL1MessagePopped) - trace5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") - chunk5 := &Chunk{Blocks: []*Block{trace5}} + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} originalBatch = &Batch{Chunks: []*Chunk{chunk5}} daBatch, err = codecv0.NewDABatch(originalBatch) assert.NoError(t, err) @@ -528,16 +528,16 @@ func TestCodecV0BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(5), daBatch.(*daBatchV0).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) - trace6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") - chunk6 := &Chunk{Blocks: []*Block{trace6}} + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} originalBatch = &Batch{Chunks: []*Chunk{chunk6}} daBatch, err = codecv0.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(10), daBatch.(*daBatchV0).l1MessagePopped) // skip 7, include 3 assert.Equal(t, uint64(10), daBatch.(*daBatchV0).totalL1MessagePopped) - trace7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") - chunk7 := &Chunk{Blocks: []*Block{trace7}} + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} originalBatch = &Batch{Chunks: []*Chunk{chunk7}} daBatch, err = codecv0.NewDABatch(originalBatch) assert.NoError(t, err) @@ -550,8 +550,8 @@ func TestCodecV0BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(256), daBatch.(*daBatchV0).l1MessagePopped) // skip 254, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV0).totalL1MessagePopped) - chunk8 := &Chunk{Blocks: []*Block{block2, trace3, trace4}} // queue index 10 - chunk9 := &Chunk{Blocks: []*Block{trace5}} // queue index 37-41 + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} daBatch, err = codecv0.NewDABatch(originalBatch) assert.NoError(t, err) diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index e404156..2116a13 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -2,6 +2,7 @@ package encoding import ( "encoding/hex" + "strings" "testing" "github.com/scroll-tech/go-ethereum/common" @@ -53,12 +54,12 @@ func TestCodecV1BlockEncode(t *testing.T) { encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - codecV0, err := CodecFromVersion(CodecV1) + codecv0, err := CodecFromVersion(CodecV0) assert.NoError(t, err) // sanity check: v0 and v1 block encodings are identical for _, block := range []*Block{block2, block3, block4, block5, block6, block7} { - blockv0, err := codecV0.NewDABlock(block, 0) + blockv0, err := codecv0.NewDABlock(block, 0) assert.NoError(t, err) encodedv0 := hex.EncodeToString(blockv0.Encode()) @@ -598,24 +599,24 @@ func TestCodecV1BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) - trace3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") - chunk3 := &Chunk{Blocks: []*Block{trace3}} + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} originalBatch = &Batch{Chunks: []*Chunk{chunk3}} daBatch, err = codecv1.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) - trace4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") - chunk4 := &Chunk{Blocks: []*Block{trace4}} + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} originalBatch = &Batch{Chunks: []*Chunk{chunk4}} daBatch, err = codecv1.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(11), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(11), daBatch.(*daBatchV1).totalL1MessagePopped) - trace5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") - chunk5 := &Chunk{Blocks: []*Block{trace5}} + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} originalBatch = &Batch{Chunks: []*Chunk{chunk5}} daBatch, err = codecv1.NewDABatch(originalBatch) assert.NoError(t, err) @@ -628,16 +629,16 @@ func TestCodecV1BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(5), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) - trace6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") - chunk6 := &Chunk{Blocks: []*Block{trace6}} + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} originalBatch = &Batch{Chunks: []*Chunk{chunk6}} daBatch, err = codecv1.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(10), daBatch.(*daBatchV1).l1MessagePopped) // skip 7, include 3 assert.Equal(t, uint64(10), daBatch.(*daBatchV1).totalL1MessagePopped) - trace7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") - chunk7 := &Chunk{Blocks: []*Block{trace7}} + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} originalBatch = &Batch{Chunks: []*Chunk{chunk7}} daBatch, err = codecv1.NewDABatch(originalBatch) assert.NoError(t, err) @@ -650,8 +651,8 @@ func TestCodecV1BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(256), daBatch.(*daBatchV1).l1MessagePopped) // skip 254, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) - chunk8 := &Chunk{Blocks: []*Block{block2, trace3, trace4}} // queue index 10 - chunk9 := &Chunk{Blocks: []*Block{trace5}} // queue index 37-41 + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} daBatch, err = codecv1.NewDABatch(originalBatch) assert.NoError(t, err) @@ -664,3 +665,86 @@ func TestCodecV1BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(32), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) } + +func TestCodecV1BlobEncodingAndHashing(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + batch, err := codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded := strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, // metadata + "00"+"0001"+"000000e6"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00"+"00"+"000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+ + // tx payload + "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1", encoded) + assert.Equal(t, common.HexToHash("0x01af944924715b48be6ce3c35aef7500a50e909265599bd2b3e544ac59fc7553"), batch.(*daBatchV1).blobVersionedHash) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + batch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "000001000016310000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002f9162d82cf5502843b9b0a17843b9b0a17831197e28080b915d26080604000523480156200001157600080fd5b50604051620014b2380380620014b283390081810160405260a08110156200003757600080fd5b8151602083015160408000850180519151939592948301929184640100000000821115620000635760000080fd5b9083019060208201858111156200007957600080fd5b8251640100000000008111828201881017156200009457600080fd5b8252508151602091820100929091019080838360005b83811015620000c357818101518382015260200100620000a9565b50505050905090810190601f168015620000f1578082038051006001836020036101000a031916815260200191505b5060405260200180516000405193929190846401000000008211156200011557600080fd5b908301906000208201858111156200012b57600080fd5b8251640100000000811182820188001017156200014657600080fd5b8252508151602091820192909101908083830060005b83811015620001755781810151838201526020016200015b565b5050005050905090810190601f168015620001a3578082038051600183602003610100000a031916815260200191505b506040526020908101518551909350859250008491620001c8916003918501906200026b565b508051620001de906004906000208401906200026b565b50506005805461ff001960ff199091166012171690005550600680546001600160a01b038088166001600160a01b031992831617900092556007805492871692909116919091179055620002308162000255565b5000506005805462010000600160b01b031916336201000002179055506200030700915050565b6005805460ff191660ff92909216919091179055565b82805460000181600116156101000203166002900490600052602060002090601f01602000900481019282601f10620002ae57805160ff1916838001178555620002de56005b82800160010185558215620002de579182015b82811115620002de57825100825591602001919060010190620002c1565b50620002ec929150620002f056005b5090565b5b80821115620002ec5760008155600101620002f1565b61119b0080620003176000396000f3fe608060405234801561001057600080fd5b50600004361061010b5760003560e01c80635c975abb116100a257806395d89b41110061007157806395d89b41146103015780639dc29fac14610309578063a457c200d714610335578063a9059cbb14610361578063dd62ed3e1461038d5761010b00565b80635c975abb1461029d57806370a08231146102a55780638456cb5914006102cb5780638e50817a146102d35761010b565b8063313ce567116100de57008063313ce5671461021d578063395093511461023b5780633f4ba83a146102006757806340c10f19146102715761010b565b806306fdde031461011057806300095ea7b31461018d57806318160ddd146101cd57806323b872dd146101e757005b600080fd5b6101186103bb565b604080516020808252835181830152835100919283929083019185019080838360005b838110156101525781810151838200015260200161013a565b50505050905090810190601f16801561017f578082000380516001836020036101000a031916815260200191505b50925050506040005180910390f35b6101b9600480360360408110156101a357600080fd5b50600001600160a01b038135169060200135610451565b60408051911515825251900081900360200190f35b6101d561046e565b6040805191825251908190036020000190f35b6101b9600480360360608110156101fd57600080fd5b50600160010060a01b03813581169160208101359091169060400135610474565b610225610004fb565b6040805160ff9092168252519081900360200190f35b6101b9600400803603604081101561025157600080fd5b506001600160a01b03813516906000200135610504565b61026f610552565b005b61026f600480360360408110150061028757600080fd5b506001600160a01b0381351690602001356105a9565b006101b9610654565b6101d5600480360360208110156102bb57600080fd5b5000356001600160a01b0316610662565b61026f61067d565b61026f60048036030060408110156102e957600080fd5b506001600160a01b0381358116916020010035166106d2565b610118610757565b61026f6004803603604081101561031f0057600080fd5b506001600160a01b0381351690602001356107b8565b6101b9006004803603604081101561034b57600080fd5b506001600160a01b0381351600906020013561085f565b6101b96004803603604081101561037757600080fd005b506001600160a01b0381351690602001356108c7565b6101d560048036030060408110156103a357600080fd5b506001600160a01b0381358116916020010035166108db565b60038054604080516020601f600260001961010060018816001502019095169490940493840181900481028201810190925282815260609300909290918301828280156104475780601f1061041c5761010080835404028300529160200191610447565b820191906000526020600020905b8154815290600001019060200180831161042a57829003601f168201915b505050505090509000565b600061046561045e610906565b848461090a565b50600192915050565b0060025490565b60006104818484846109f6565b6104f18461048d610906565b006104ec8560405180606001604052806028815260200161108560289139600100600160a01b038a166000908152600160205260408120906104cb610906565b006001600160a01b031681526020810191909152604001600020549190610b5100565b61090a565b5060019392505050565b60055460ff1690565b600061046500610511610906565b846104ec8560016000610522610906565b6001600160a0001b03908116825260208083019390935260409182016000908120918c16815200925290205490610be8565b6007546001600160a01b0316331461059f57604000805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0818005b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a7610c0049565b565b600554610100900460ff16156105f9576040805162461bcd60e5001b815260206004820152601060248201526f14185d5cd8589b194e881c185d005cd95960821b604482015290519081900360640190fd5b600654600160016000a01b03163314610646576040805162461bcd60e51b81526020600482015260000b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908190000360640190fd5b6106508282610ced565b5050565b600554610100900460ff001690565b6001600160a01b031660009081526020819052604090205490565b006007546001600160a01b031633146106ca576040805162461bcd60e51b81520060206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b60440082015290519081900360640190fd5b6105a7610ddd565b600554620100009000046001600160a01b03163314610726576040805162461bcd60e51b81526020006004820152600c60248201526b6f6e6c7920466163746f727960a01b60448200015290519081900360640190fd5b600780546001600160a01b03928316600100600160a01b0319918216179091556006805493909216921691909117905556005b60048054604080516020601f600260001961010060018816150201909516009490940493840181900481028201810190925282815260609390929091830100828280156104475780601f1061041c5761010080835404028352916020019100610447565b600554610100900460ff1615610808576040805162461bcd60e5001b815260206004820152601060248201526f14185d5cd8589b194e881c185d005cd95960821b604482015290519081900360640190fd5b600654600160016000a01b03163314610855576040805162461bcd60e51b81526020600482015260000b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908190000360640190fd5b6106508282610e65565b600061046561086c610906565b84006104ec85604051806060016040528060258152602001611117602591396001006000610896610906565b6001600160a01b0390811682526020808301939093005260409182016000908120918d16815292529020549190610b51565b6000610004656108d4610906565b84846109f6565b6001600160a01b0391821660009000815260016020908152604080832093909416825291909152205490565b339000565b6001600160a01b03831661094f5760405162461bcd60e51b8152600401008080602001828103825260248152602001806110f3602491396040019150500060405180910390fd5b6001600160a01b0382166109945760405162461bcd6000e51b815260040180806020018281038252602281526020018061103d602291003960400191505060405180910390fd5b6001600160a01b038084166000818100526001602090815260408083209487168084529482529182902085905581510085815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b00200ac8c7c3b9259281900390910190a3505050565b6001600160a01b03831600610a3b5760405162461bcd60e51b8152600401808060200182810382526025008152602001806110ce6025913960400191505060405180910390fd5b600160000160a01b038216610a805760405162461bcd60e51b815260040180806020010082810382526023815260200180610ff8602391396040019150506040518091000390fd5b610a8b838383610f61565b610ac8816040518060600160405280600026815260200161105f602691396001600160a01b038616600090815260208100905260409020549190610b51565b6001600160a01b03808516600090815260002081905260408082209390935590841681522054610af79082610be8565b600001600160a01b03808416600081815260208181526040918290209490945580005185815290519193928716927fddf252ad1be2c89b69c2b068fc378daa952b00a7f163c4a11628f55a4df523b3ef92918290030190a3505050565b6000818400841115610be05760405162461bcd60e51b8152600401808060200182810382005283818151815260200191508051906020019080838360005b83811015610b00a5578181015183820152602001610b8d565b50505050905090810190601f16008015610bd25780820380516001836020036101000a03191681526020019150005b509250505060405180910390fd5b505050900390565b60008282018381100015610c42576040805162461bcd60e51b815260206004820152601b6024820100527f536166654d6174683a206164646974696f6e206f766572666c6f77000000000000604482015290519081900360640190fd5b9392505050565b60055461000100900460ff16610c9c576040805162461bcd60e51b81526020600482015200601460248201527314185d5cd8589b194e881b9bdd081c185d5cd95960621b00604482015290519081900360640190fd5b6005805461ff00191690557f5db900ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa61000cd0610906565b604080516001600160a01b03909216825251908190036020000190a1565b6001600160a01b038216610d48576040805162461bcd60e51b81005260206004820152601f60248201527f45524332303a206d696e7420746f2000746865207a65726f20616464726573730060448201529051908190036064010090fd5b610d5460008383610f61565b600254610d619082610be8565b600255006001600160a01b038216600090815260208190526040902054610d87908261000be8565b6001600160a01b038316600081815260208181526040808320949000945583518581529351929391927fddf252ad1be2c89b69c2b068fc378daa95002ba7f163c4a11628f55a4df523b3ef9281900390910190a35050565b60055400610100900460ff1615610e2d576040805162461bcd60e51b81526020600482000152601060248201526f14185d5cd8589b194e881c185d5cd95960821b60440082015290519081900360640190fd5b6005805461ff0019166101001790557f0062e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc544b05a20058610cd0610906565b6001600160a01b038216610eaa5760405162461bcd6000e51b81526004018080602001828103825260218152602001806110ad602191003960400191505060405180910390fd5b610eb682600083610f61565b610ef3008160405180606001604052806022815260200161101b60229139600160016000a01b0385166000908152602081905260409020549190610b51565b600160010060a01b038316600090815260208190526040902055600254610f199082610f00b5565b6002556040805182815290516000916001600160a01b038516917fdd00f252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef009181900360200190a35050565b610f6c838383610fb0565b610f7461065456005b15610fb05760405162461bcd60e51b81526004018080602001828103825200602a81526020018061113c602a913960400191505060405180910390fd5b50005050565b6000610c4283836040518060400160405280601e81526020017f53006166654d6174683a207375627472616374696f6e206f766572666c6f77000000815250610b5156fe45524332303a207472616e7366657220746f2074686520007a65726f206164647265737345524332303a206275726e20616d6f756e742000657863656564732062616c616e636545524332303a20617070726f76652074006f20746865207a65726f206164647265737345524332303a207472616e736600657220616d6f756e7420657863656564732062616c616e636545524332303a00207472616e7366657220616d6f756e74206578636565647320616c6c6f7761006e636545524332303a206275726e2066726f6d20746865207a65726f20616400647265737345524332303a207472616e736665722066726f6d20746865207a0065726f206164647265737345524332303a20617070726f76652066726f6d2000746865207a65726f206164647265737345524332303a206465637265617365006420616c6c6f77616e63652062656c6f77207a65726f4552433230506175730061626c653a20746f6b656e207472616e73666572207768696c652070617573006564a2646970667358221220e96342bec8f6c2bf72815a39998973b64c3bed0057770f402e9a7b7eeda0265d4c64736f6c634300060c0033000000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b63000000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000000009570045544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e173700f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bd00a52095d44b8a9af7", encoded) + assert.Equal(t, common.HexToHash("0x010c54fa675ed1b78f269827177019b0814a4ac4d269c68037e2c41cf08f9411"), batch.(*daBatchV1).blobVersionedHash) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + batch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "0000010000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e10580808080008", encoded) + assert.Equal(t, common.HexToHash("0x01ea66c4de196d36e2c3a5d7c0045100b9e46ef65be8f7a921ef20e6f2e99ebd"), batch.(*daBatchV1).blobVersionedHash) + + // this batch only contains L1 txs + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + batch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "000001", encoded) + assert.Equal(t, common.HexToHash("0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6"), batch.(*daBatchV1).blobVersionedHash) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + batch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "000001", encoded) + assert.Equal(t, common.HexToHash("0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6"), batch.(*daBatchV1).blobVersionedHash) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + batch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "000001", encoded) + assert.Equal(t, common.HexToHash("0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6"), batch.(*daBatchV1).blobVersionedHash) + + // 15 chunks + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + batch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, // metadata + "00"+"000f"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"00"+"00"+"0000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+ + // tx payload + "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea003f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ece00a0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86d00f514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288b00baf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf000d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f0010c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f002b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b009aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d0002c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b00219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d199600b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a120940100bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000800083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393e00b095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f87938000aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b600e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae9900c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cb00d19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8007101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce941100ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b002cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec005bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e830700a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de10200513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c57008fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e900a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea000f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f7730016a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6e00ed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2ade00ceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7b00a5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd7300e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9a00ec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d0200c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c400c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808300019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a0041e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfa00ec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc060015b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f03998586600d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e0081065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f8710080843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca2008a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e9000cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c004d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a100209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e260004393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f00879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6a00cb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab0007ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df51400a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf4002a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d6900ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c100be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b460004bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec002e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c700e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b001de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b500243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae600bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000808301009ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb09500b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac100c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb009e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67a00a78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19f00eacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710100843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a00152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cac00e28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd400aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a1200094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d0056548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd700f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03f00b2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e008307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e1004af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bd00e27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483590096fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b100bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d825006f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e8106005f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f8718084003b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a15002d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc3002b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d190096b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a120940001bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e260439003eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f87930080aac1c09c6eed32f1", encoded) + assert.Equal(t, common.HexToHash("0x01521b20f341588dea5978efb00d7b077a986598a6001fc2e5859d77f3ffc284"), batch.(*daBatchV1).blobVersionedHash) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + batch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "0000020000173700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a17843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380620014b2833981810160405260a0811015620000375760000080fd5b81516020830151604080850180519151939592948301929184640100000000008211156200006357600080fd5b908301906020820185811115620000007957600080fd5b8251640100000000811182820188101715620000945760000080fd5b82525081516020918201929091019080838360005b8381101562000000c3578181015183820152602001620000a9565b50505050905090810190601f00168015620000f15780820380516001836020036101000a03191681526020010091505b5060405260200180516040519392919084640100000000821115620000011557600080fd5b9083019060208201858111156200012b57600080fd5b8200516401000000008111828201881017156200014657600080fd5b8252508151006020918201929091019080838360005b8381101562000175578181015183820001526020016200015b565b50505050905090810190601f168015620001a3570080820380516001836020036101000a031916815260200191505b506040526000209081015185519093508592508491620001c8916003918501906200026b56005b508051620001de9060049060208401906200026b565b50506005805461ff00001960ff1990911660121716905550600680546001600160a01b03808816600001600160a01b031992831617909255600780549287169290911691909117900055620002308162000255565b50506005805462010000600160b01b031916330062010000021790555062000307915050565b6005805460ff191660ff9290920016919091179055565b82805460018160011615610100020316600290049060000052602060002090601f016020900481019282601f10620002ae57805160ff001916838001178555620002de565b82800160010185558215620002de57918200015b82811115620002de578251825591602001919060010190620002c1565b0050620002ec929150620002f0565b5090565b5b80821115620002ec576000810055600101620002f1565b61119b80620003176000396000f3fe60806040523400801561001057600080fd5b506004361061010b5760003560e01c80635c975a00bb116100a257806395d89b411161007157806395d89b41146103015780639d00c29fac14610309578063a457c2d714610335578063a9059cbb1461036157800063dd62ed3e1461038d5761010b565b80635c975abb1461029d57806370a0820031146102a55780638456cb59146102cb5780638e50817a146102d35761010b00565b8063313ce567116100de578063313ce5671461021d57806339509351140061023b5780633f4ba83a1461026757806340c10f19146102715761010b565b00806306fdde0314610110578063095ea7b31461018d57806318160ddd14610100cd57806323b872dd146101e7575b600080fd5b6101186103bb565b604080510060208082528351818301528351919283929083019185019080838360005b830081101561015257818101518382015260200161013a565b5050505090509081000190601f16801561017f5780820380516001836020036101000a03191681520060200191505b509250505060405180910390f35b6101b960048036036040810010156101a357600080fd5b506001600160a01b03813516906020013561045100565b604080519115158252519081900360200190f35b6101d561046e565b6000408051918252519081900360200190f35b6101b960048036036060811015610001fd57600080fd5b506001600160a01b0381358116916020810135909116900060400135610474565b6102256104fb565b6040805160ff909216825251908100900360200190f35b6101b96004803603604081101561025157600080fd5b50006001600160a01b038135169060200135610504565b61026f610552565b005b0061026f6004803603604081101561028757600080fd5b506001600160a01b030081351690602001356105a9565b6101b9610654565b6101d560048036036020008110156102bb57600080fd5b50356001600160a01b0316610662565b61026f0061067d565b61026f600480360360408110156102e957600080fd5b50600160000160a01b03813581169160200135166106d2565b610118610757565b61026f006004803603604081101561031f57600080fd5b506001600160a01b038135160090602001356107b8565b6101b96004803603604081101561034b57600080fd005b506001600160a01b03813516906020013561085f565b6101b9600480360300604081101561037757600080fd5b506001600160a01b038135169060200135006108c7565b6101d5600480360360408110156103a357600080fd5b50600160000160a01b03813581169160200135166108db565b6003805460408051602060001f6002600019610100600188161502019095169490940493840181900481020082018101909252828152606093909290918301828280156104475780601f100061041c57610100808354040283529160200191610447565b82019190600052006020600020905b81548152906001019060200180831161042a57829003601f00168201915b5050505050905090565b600061046561045e610906565b84846100090a565b50600192915050565b60025490565b60006104818484846109f656005b6104f18461048d610906565b6104ec8560405180606001604052806028810052602001611085602891396001600160a01b038a16600090815260016020520060408120906104cb610906565b6001600160a01b03168152602081019190910052604001600020549190610b51565b61090a565b5060019392505050565b6000055460ff1690565b6000610465610511610906565b846104ec856001600061000522610906565b6001600160a01b0390811682526020808301939093526040009182016000908120918c168152925290205490610be8565b600754600160010060a01b0316331461059f576040805162461bcd60e51b81526020600482015200600b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908100900360640190fd5b6105a7610c49565b565b600554610100900460ff1615610005f9576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610646576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610ced565b500050565b600554610100900460ff1690565b6001600160a01b03166000908152006020819052604090205490565b6007546001600160a01b031633146106ca57006040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0008185b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a700610ddd565b6005546201000090046001600160a01b0316331461072657604000805162461bcd60e51b815260206004820152600c60248201526b6f6e6c792000466163746f727960a01b604482015290519081900360640190fd5b60078054006001600160a01b039283166001600160a01b0319918216179091556006805400939092169216919091179055565b60048054604080516020601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c5761010000808354040283529160200191610447565b600554610100900460ff161561000808576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610855576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610e65565b60000061046561086c610906565b846104ec85604051806060016040528060258100526020016111176025913960016000610896610906565b6001600160a01b0300908116825260208083019390935260409182016000908120918d1681529252009020549190610b51565b60006104656108d4610906565b84846109f6565b600001600160a01b0391821660009081526001602090815260408083209390941600825291909152205490565b3390565b6001600160a01b03831661094f576040005162461bcd60e51b8152600401808060200182810382526024815260200180006110f36024913960400191505060405180910390fd5b6001600160a01b038200166109945760405162461bcd60e51b81526004018080602001828103825260002281526020018061103d6022913960400191505060405180910390fd5b600100600160a01b0380841660008181526001602090815260408083209487168084005294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f7142007d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9259281900390910190a350500050565b6001600160a01b038316610a3b5760405162461bcd60e51b8152600400018080602001828103825260258152602001806110ce602591396040019150005060405180910390fd5b6001600160a01b038216610a805760405162461bcd0060e51b8152600401808060200182810382526023815260200180610ff8602300913960400191505060405180910390fd5b610a8b838383610f61565b610ac8008160405180606001604052806026815260200161105f60269139600160016000a01b0386166000908152602081905260409020549190610b51565b600160010060a01b03808516600090815260208190526040808220939093559084168152002054610af79082610be8565b6001600160a01b03808416600081815260208100815260409182902094909455805185815290519193928716927fddf252ad1b00e2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9291829000030190a3505050565b60008184841115610be05760405162461bcd60e51b810052600401808060200182810382528381815181526020019150805190602001009080838360005b83811015610ba5578181015183820152602001610b8d565b0050505050905090810190601f168015610bd2578082038051600183602003610001000a031916815260200191505b509250505060405180910390fd5b50505000900390565b600082820183811015610c42576040805162461bcd60e51b81520060206004820152601b60248201527f536166654d6174683a20616464697469006f6e206f766572666c6f77000000000060448201529051908190036064019000fd5b9392505050565b600554610100900460ff16610c9c576040805162461b00cd60e51b815260206004820152601460248201527314185d5cd8589b194e88001b9bdd081c185d5cd95960621b604482015290519081900360640190fd5b600005805461ff00191690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a500e8aa4e537bd38aeae4b073aa610cd0610906565b604080516001600160a01b00039092168252519081900360200190a1565b6001600160a01b038216610d4800576040805162461bcd60e51b815260206004820152601f60248201527f4552004332303a206d696e7420746f20746865207a65726f20616464726573730060004482015290519081900360640190fd5b610d5460008383610f61565b60025400610d619082610be8565b6002556001600160a01b03821660009081526020810090526040902054610d879082610be8565b6001600160a01b038316600081810052602081815260408083209490945583518581529351929391927fddf252ad001be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef928190000390910190a35050565b600554610100900460ff1615610e2d57604080516200461bcd60e51b815260206004820152601060248201526f14185d5cd8589b19004e881c185d5cd95960821b604482015290519081900360640190fd5b600580005461ff0019166101001790557f62e78cea01bee320cd4e420270b5ea74000d0011b0c9f74754ebdbfc544b05a258610cd0610906565b6001600160a01b03820016610eaa5760405162461bcd60e51b8152600401808060200182810382526000218152602001806110ad6021913960400191505060405180910390fd5b610e00b682600083610f61565b610ef3816040518060600160405280602281526020000161101b602291396001600160a01b038516600090815260208190526040900020549190610b51565b6001600160a01b03831660009081526020819052604000902055600254610f199082610fb5565b600255604080518281529051600091006001600160a01b038516917fddf252ad1be2c89b69c2b068fc378daa952ba700f163c4a11628f55a4df523b3ef9181900360200190a35050565b610f6c83830083610fb0565b610f74610654565b15610fb05760405162461bcd60e51b81520060040180806020018281038252602a81526020018061113c602a91396040010091505060405180910390fd5b505050565b6000610c428383604051806040010060405280601e81526020017f536166654d6174683a20737562747261637469006f6e206f766572666c6f770000815250610b5156fe45524332303a20747261006e7366657220746f20746865207a65726f206164647265737345524332303a00206275726e20616d6f756e7420657863656564732062616c616e63654552430032303a20617070726f766520746f20746865207a65726f20616464726573730045524332303a207472616e7366657220616d6f756e742065786365656473200062616c616e636545524332303a207472616e7366657220616d6f756e7420650078636565647320616c6c6f77616e636545524332303a206275726e2066726f006d20746865207a65726f206164647265737345524332303a207472616e73660065722066726f6d20746865207a65726f206164647265737345524332303a2000617070726f76652066726f6d20746865207a65726f20616464726573734552004332303a2064656372656173656420616c6c6f77616e63652062656c6f7720007a65726f45524332305061757361626c653a20746f6b656e207472616e7366006572207768696c6520706175736564a2646970667358221220e96342bec8f600c2bf72815a39998973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c63004300060c00330000000000000000000000001c5a77d9fa7ef466951b2f01f70024bca3a5820b630000000000000000000000001c5a77d9fa7ef466951b2f0100f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000095745544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a9002e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e7400229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a258d0017bf244c4df02d40343a7626a9d321e105808080808", encoded) + assert.Equal(t, common.HexToHash("0x01b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf"), batch.(*daBatchV1).blobVersionedHash) +} diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index d1582bf..f959834 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -2,6 +2,7 @@ package encoding import ( "encoding/hex" + "strings" "testing" "github.com/scroll-tech/go-ethereum/common" @@ -53,12 +54,12 @@ func TestCodecV2BlockEncode(t *testing.T) { encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - codecV0, err := CodecFromVersion(CodecV1) + codecv0, err := CodecFromVersion(CodecV0) assert.NoError(t, err) // sanity check: v0 and v2 block encodings are identical for _, trace := range []*Block{block2, block3, block4, block5, block6, block7} { - blockv0, err := codecV0.NewDABlock(trace, 0) + blockv0, err := codecv0.NewDABlock(trace, 0) assert.NoError(t, err) encodedv0 := hex.EncodeToString(blockv0.Encode()) @@ -598,24 +599,24 @@ func TestCodecV2BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) - trace3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") - chunk3 := &Chunk{Blocks: []*Block{trace3}} + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} originalBatch = &Batch{Chunks: []*Chunk{chunk3}} daBatch, err = codecv2.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) - trace4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") - chunk4 := &Chunk{Blocks: []*Block{trace4}} + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} originalBatch = &Batch{Chunks: []*Chunk{chunk4}} daBatch, err = codecv2.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(11), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(11), daBatch.(*daBatchV1).totalL1MessagePopped) - trace5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") - chunk5 := &Chunk{Blocks: []*Block{trace5}} + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} originalBatch = &Batch{Chunks: []*Chunk{chunk5}} daBatch, err = codecv2.NewDABatch(originalBatch) assert.NoError(t, err) @@ -628,16 +629,16 @@ func TestCodecV2BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(5), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) - trace6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") - chunk6 := &Chunk{Blocks: []*Block{trace6}} + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} originalBatch = &Batch{Chunks: []*Chunk{chunk6}} daBatch, err = codecv2.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(10), daBatch.(*daBatchV1).l1MessagePopped) // skip 7, include 3 assert.Equal(t, uint64(10), daBatch.(*daBatchV1).totalL1MessagePopped) - trace7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") - chunk7 := &Chunk{Blocks: []*Block{trace7}} + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} originalBatch = &Batch{Chunks: []*Chunk{chunk7}} daBatch, err = codecv2.NewDABatch(originalBatch) assert.NoError(t, err) @@ -650,8 +651,8 @@ func TestCodecV2BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(256), daBatch.(*daBatchV1).l1MessagePopped) // skip 254, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) - chunk8 := &Chunk{Blocks: []*Block{block2, trace3, trace4}} // queue index 10 - chunk9 := &Chunk{Blocks: []*Block{trace5}} // queue index 37-41 + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} daBatch, err = codecv2.NewDABatch(originalBatch) assert.NoError(t, err) @@ -664,3 +665,80 @@ func TestCodecV2BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(32), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) } + +func TestCodecV2BlobEncodingAndHashing(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + batch, err := codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded := strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) + assert.Equal(t, common.HexToHash("0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7"), batch.(*daBatchV1).blobVersionedHash) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + batch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) + assert.Equal(t, common.HexToHash("0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4"), batch.(*daBatchV1).blobVersionedHash) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + batch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) + assert.Equal(t, common.HexToHash("0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c"), batch.(*daBatchV1).blobVersionedHash) + + // this batch only contains L1 txs + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + batch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV1).blobVersionedHash) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + batch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV1).blobVersionedHash) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + batch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV1).blobVersionedHash) + + // 45 chunks + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + batch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "006024281d0700140d002d000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f100040041e1491b3e82c9b61d60d39a727", encoded) + assert.Equal(t, common.HexToHash("0x01fc79efca1213db1aa0183865b0a360dc152662cde34ee6a34e7607b96c1c89"), batch.(*daBatchV1).blobVersionedHash) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + batch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) + assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), batch.(*daBatchV1).blobVersionedHash) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 19d027c..0b609c6 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -3,6 +3,7 @@ package encoding import ( "encoding/hex" "encoding/json" + "strings" "testing" "github.com/scroll-tech/go-ethereum/common" @@ -55,12 +56,12 @@ func TestCodecV3BlockEncode(t *testing.T) { encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - codecV0, err := CodecFromVersion(CodecV0) + codecv0, err := CodecFromVersion(CodecV0) assert.NoError(t, err) // sanity check: v0 and v3 block encodings are identical for _, trace := range []*Block{block2, block3, block4, block5, block6, block7} { - blockv0, err := codecV0.NewDABlock(trace, 0) + blockv0, err := codecv0.NewDABlock(trace, 0) assert.NoError(t, err) encodedv0 := hex.EncodeToString(blockv0.Encode()) @@ -739,24 +740,24 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) - trace3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") - chunk3 := &Chunk{Blocks: []*Block{trace3}} + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} originalBatch = &Batch{Chunks: []*Chunk{chunk3}} daBatch, err = codecv3.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) - trace4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") - chunk4 := &Chunk{Blocks: []*Block{trace4}} + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} originalBatch = &Batch{Chunks: []*Chunk{chunk4}} daBatch, err = codecv3.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(11), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(11), daBatch.(*daBatchV3).totalL1MessagePopped) - trace5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") - chunk5 := &Chunk{Blocks: []*Block{trace5}} + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} originalBatch = &Batch{Chunks: []*Chunk{chunk5}} daBatch, err = codecv3.NewDABatch(originalBatch) assert.NoError(t, err) @@ -769,16 +770,16 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(5), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) - trace6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") - chunk6 := &Chunk{Blocks: []*Block{trace6}} + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} originalBatch = &Batch{Chunks: []*Chunk{chunk6}} daBatch, err = codecv3.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(10), daBatch.(*daBatchV3).l1MessagePopped) // skip 7, include 3 assert.Equal(t, uint64(10), daBatch.(*daBatchV3).totalL1MessagePopped) - trace7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") - chunk7 := &Chunk{Blocks: []*Block{trace7}} + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} originalBatch = &Batch{Chunks: []*Chunk{chunk7}} daBatch, err = codecv3.NewDABatch(originalBatch) assert.NoError(t, err) @@ -791,8 +792,8 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(256), daBatch.(*daBatchV3).l1MessagePopped) // skip 254, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) - chunk8 := &Chunk{Blocks: []*Block{block2, trace3, trace4}} // queue index 10 - chunk9 := &Chunk{Blocks: []*Block{trace5}} // queue index 37-41 + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} daBatch, err = codecv3.NewDABatch(originalBatch) assert.NoError(t, err) @@ -805,3 +806,80 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(32), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) } + +func TestCodecV3BlobEncodingAndHashing(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + batch, err := codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded := strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) + assert.Equal(t, common.HexToHash("0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7"), batch.(*daBatchV3).blobVersionedHash) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + batch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) + assert.Equal(t, common.HexToHash("0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4"), batch.(*daBatchV3).blobVersionedHash) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + batch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) + assert.Equal(t, common.HexToHash("0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c"), batch.(*daBatchV3).blobVersionedHash) + + // this batch only contains L1 txs + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + batch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV3).blobVersionedHash) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + batch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV3).blobVersionedHash) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + batch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV3).blobVersionedHash) + + // 45 chunks + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + batch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "006024281d0700140d002d000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f100040041e1491b3e82c9b61d60d39a727", encoded) + assert.Equal(t, common.HexToHash("0x01fc79efca1213db1aa0183865b0a360dc152662cde34ee6a34e7607b96c1c89"), batch.(*daBatchV3).blobVersionedHash) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + batch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) + assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), batch.(*daBatchV3).blobVersionedHash) +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index ccc9e2b..eb10db0 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -3,6 +3,7 @@ package encoding import ( "encoding/hex" "encoding/json" + "strings" "testing" "github.com/scroll-tech/go-ethereum/common" @@ -55,12 +56,12 @@ func TestCodecV4BlockEncode(t *testing.T) { encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - codecV0, err := CodecFromVersion(CodecV0) + codecv0, err := CodecFromVersion(CodecV0) assert.NoError(t, err) // sanity check: v0 and v4 block encodings are identical for _, trace := range []*Block{block2, block3, block4, block5, block6, block7} { - blockv0, err := codecV0.NewDABlock(trace, 0) + blockv0, err := codecv0.NewDABlock(trace, 0) assert.NoError(t, err) encodedv0 := hex.EncodeToString(blockv0.Encode()) @@ -740,24 +741,24 @@ func TestCodecV4BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) - trace3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") - chunk3 := &Chunk{Blocks: []*Block{trace3}} + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} originalBatch = &Batch{Chunks: []*Chunk{chunk3}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) - trace4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") - chunk4 := &Chunk{Blocks: []*Block{trace4}} + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} originalBatch = &Batch{Chunks: []*Chunk{chunk4}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(11), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(11), daBatch.(*daBatchV3).totalL1MessagePopped) - trace5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") - chunk5 := &Chunk{Blocks: []*Block{trace5}} + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} originalBatch = &Batch{Chunks: []*Chunk{chunk5}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) @@ -770,16 +771,16 @@ func TestCodecV4BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(5), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) - trace6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") - chunk6 := &Chunk{Blocks: []*Block{trace6}} + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} originalBatch = &Batch{Chunks: []*Chunk{chunk6}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, uint64(10), daBatch.(*daBatchV3).l1MessagePopped) // skip 7, include 3 assert.Equal(t, uint64(10), daBatch.(*daBatchV3).totalL1MessagePopped) - trace7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") - chunk7 := &Chunk{Blocks: []*Block{trace7}} + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} originalBatch = &Batch{Chunks: []*Chunk{chunk7}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) @@ -792,8 +793,8 @@ func TestCodecV4BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(256), daBatch.(*daBatchV3).l1MessagePopped) // skip 254, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) - chunk8 := &Chunk{Blocks: []*Block{block2, trace3, trace4}} // queue index 10 - chunk9 := &Chunk{Blocks: []*Block{trace5}} // queue index 37-41 + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) @@ -806,3 +807,80 @@ func TestCodecV4BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(32), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) } + +func TestCodecV4BlobEncodingAndHashing(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + batch, err := codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded := strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) + assert.Equal(t, common.HexToHash("0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7"), batch.(*daBatchV3).blobVersionedHash) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + batch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) + assert.Equal(t, common.HexToHash("0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4"), batch.(*daBatchV3).blobVersionedHash) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + batch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) + assert.Equal(t, common.HexToHash("0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c"), batch.(*daBatchV3).blobVersionedHash) + + // this batch only contains L1 txs + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + batch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV3).blobVersionedHash) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + batch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV3).blobVersionedHash) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + batch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV3).blobVersionedHash) + + // 45 chunks + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + batch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "006024281d0700140d002d000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f100040041e1491b3e82c9b61d60d39a727", encoded) + assert.Equal(t, common.HexToHash("0x01fc79efca1213db1aa0183865b0a360dc152662cde34ee6a34e7607b96c1c89"), batch.(*daBatchV3).blobVersionedHash) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + batch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) + assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), batch.(*daBatchV3).blobVersionedHash) +} From ec826820a4a438a303cb11090c0df07877753e76 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 20:23:23 +0800 Subject: [PATCH 097/126] add blob data proof unit tests --- encoding/codecv1_test.go | 76 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv2_test.go | 76 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv3_test.go | 76 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv4_test.go | 76 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 304 insertions(+) diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index 2116a13..4b0ffd7 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -748,3 +748,79 @@ func TestCodecV1BlobEncodingAndHashing(t *testing.T) { assert.Equal(t, "0000020000173700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a17843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380620014b2833981810160405260a0811015620000375760000080fd5b81516020830151604080850180519151939592948301929184640100000000008211156200006357600080fd5b908301906020820185811115620000007957600080fd5b8251640100000000811182820188101715620000945760000080fd5b82525081516020918201929091019080838360005b8381101562000000c3578181015183820152602001620000a9565b50505050905090810190601f00168015620000f15780820380516001836020036101000a03191681526020010091505b5060405260200180516040519392919084640100000000821115620000011557600080fd5b9083019060208201858111156200012b57600080fd5b8200516401000000008111828201881017156200014657600080fd5b8252508151006020918201929091019080838360005b8381101562000175578181015183820001526020016200015b565b50505050905090810190601f168015620001a3570080820380516001836020036101000a031916815260200191505b506040526000209081015185519093508592508491620001c8916003918501906200026b56005b508051620001de9060049060208401906200026b565b50506005805461ff00001960ff1990911660121716905550600680546001600160a01b03808816600001600160a01b031992831617909255600780549287169290911691909117900055620002308162000255565b50506005805462010000600160b01b031916330062010000021790555062000307915050565b6005805460ff191660ff9290920016919091179055565b82805460018160011615610100020316600290049060000052602060002090601f016020900481019282601f10620002ae57805160ff001916838001178555620002de565b82800160010185558215620002de57918200015b82811115620002de578251825591602001919060010190620002c1565b0050620002ec929150620002f0565b5090565b5b80821115620002ec576000810055600101620002f1565b61119b80620003176000396000f3fe60806040523400801561001057600080fd5b506004361061010b5760003560e01c80635c975a00bb116100a257806395d89b411161007157806395d89b41146103015780639d00c29fac14610309578063a457c2d714610335578063a9059cbb1461036157800063dd62ed3e1461038d5761010b565b80635c975abb1461029d57806370a0820031146102a55780638456cb59146102cb5780638e50817a146102d35761010b00565b8063313ce567116100de578063313ce5671461021d57806339509351140061023b5780633f4ba83a1461026757806340c10f19146102715761010b565b00806306fdde0314610110578063095ea7b31461018d57806318160ddd14610100cd57806323b872dd146101e7575b600080fd5b6101186103bb565b604080510060208082528351818301528351919283929083019185019080838360005b830081101561015257818101518382015260200161013a565b5050505090509081000190601f16801561017f5780820380516001836020036101000a03191681520060200191505b509250505060405180910390f35b6101b960048036036040810010156101a357600080fd5b506001600160a01b03813516906020013561045100565b604080519115158252519081900360200190f35b6101d561046e565b6000408051918252519081900360200190f35b6101b960048036036060811015610001fd57600080fd5b506001600160a01b0381358116916020810135909116900060400135610474565b6102256104fb565b6040805160ff909216825251908100900360200190f35b6101b96004803603604081101561025157600080fd5b50006001600160a01b038135169060200135610504565b61026f610552565b005b0061026f6004803603604081101561028757600080fd5b506001600160a01b030081351690602001356105a9565b6101b9610654565b6101d560048036036020008110156102bb57600080fd5b50356001600160a01b0316610662565b61026f0061067d565b61026f600480360360408110156102e957600080fd5b50600160000160a01b03813581169160200135166106d2565b610118610757565b61026f006004803603604081101561031f57600080fd5b506001600160a01b038135160090602001356107b8565b6101b96004803603604081101561034b57600080fd005b506001600160a01b03813516906020013561085f565b6101b9600480360300604081101561037757600080fd5b506001600160a01b038135169060200135006108c7565b6101d5600480360360408110156103a357600080fd5b50600160000160a01b03813581169160200135166108db565b6003805460408051602060001f6002600019610100600188161502019095169490940493840181900481020082018101909252828152606093909290918301828280156104475780601f100061041c57610100808354040283529160200191610447565b82019190600052006020600020905b81548152906001019060200180831161042a57829003601f00168201915b5050505050905090565b600061046561045e610906565b84846100090a565b50600192915050565b60025490565b60006104818484846109f656005b6104f18461048d610906565b6104ec8560405180606001604052806028810052602001611085602891396001600160a01b038a16600090815260016020520060408120906104cb610906565b6001600160a01b03168152602081019190910052604001600020549190610b51565b61090a565b5060019392505050565b6000055460ff1690565b6000610465610511610906565b846104ec856001600061000522610906565b6001600160a01b0390811682526020808301939093526040009182016000908120918c168152925290205490610be8565b600754600160010060a01b0316331461059f576040805162461bcd60e51b81526020600482015200600b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908100900360640190fd5b6105a7610c49565b565b600554610100900460ff1615610005f9576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610646576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610ced565b500050565b600554610100900460ff1690565b6001600160a01b03166000908152006020819052604090205490565b6007546001600160a01b031633146106ca57006040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0008185b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a700610ddd565b6005546201000090046001600160a01b0316331461072657604000805162461bcd60e51b815260206004820152600c60248201526b6f6e6c792000466163746f727960a01b604482015290519081900360640190fd5b60078054006001600160a01b039283166001600160a01b0319918216179091556006805400939092169216919091179055565b60048054604080516020601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c5761010000808354040283529160200191610447565b600554610100900460ff161561000808576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610855576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610e65565b60000061046561086c610906565b846104ec85604051806060016040528060258100526020016111176025913960016000610896610906565b6001600160a01b0300908116825260208083019390935260409182016000908120918d1681529252009020549190610b51565b60006104656108d4610906565b84846109f6565b600001600160a01b0391821660009081526001602090815260408083209390941600825291909152205490565b3390565b6001600160a01b03831661094f576040005162461bcd60e51b8152600401808060200182810382526024815260200180006110f36024913960400191505060405180910390fd5b6001600160a01b038200166109945760405162461bcd60e51b81526004018080602001828103825260002281526020018061103d6022913960400191505060405180910390fd5b600100600160a01b0380841660008181526001602090815260408083209487168084005294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f7142007d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9259281900390910190a350500050565b6001600160a01b038316610a3b5760405162461bcd60e51b8152600400018080602001828103825260258152602001806110ce602591396040019150005060405180910390fd5b6001600160a01b038216610a805760405162461bcd0060e51b8152600401808060200182810382526023815260200180610ff8602300913960400191505060405180910390fd5b610a8b838383610f61565b610ac8008160405180606001604052806026815260200161105f60269139600160016000a01b0386166000908152602081905260409020549190610b51565b600160010060a01b03808516600090815260208190526040808220939093559084168152002054610af79082610be8565b6001600160a01b03808416600081815260208100815260409182902094909455805185815290519193928716927fddf252ad1b00e2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9291829000030190a3505050565b60008184841115610be05760405162461bcd60e51b810052600401808060200182810382528381815181526020019150805190602001009080838360005b83811015610ba5578181015183820152602001610b8d565b0050505050905090810190601f168015610bd2578082038051600183602003610001000a031916815260200191505b509250505060405180910390fd5b50505000900390565b600082820183811015610c42576040805162461bcd60e51b81520060206004820152601b60248201527f536166654d6174683a20616464697469006f6e206f766572666c6f77000000000060448201529051908190036064019000fd5b9392505050565b600554610100900460ff16610c9c576040805162461b00cd60e51b815260206004820152601460248201527314185d5cd8589b194e88001b9bdd081c185d5cd95960621b604482015290519081900360640190fd5b600005805461ff00191690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a500e8aa4e537bd38aeae4b073aa610cd0610906565b604080516001600160a01b00039092168252519081900360200190a1565b6001600160a01b038216610d4800576040805162461bcd60e51b815260206004820152601f60248201527f4552004332303a206d696e7420746f20746865207a65726f20616464726573730060004482015290519081900360640190fd5b610d5460008383610f61565b60025400610d619082610be8565b6002556001600160a01b03821660009081526020810090526040902054610d879082610be8565b6001600160a01b038316600081810052602081815260408083209490945583518581529351929391927fddf252ad001be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef928190000390910190a35050565b600554610100900460ff1615610e2d57604080516200461bcd60e51b815260206004820152601060248201526f14185d5cd8589b19004e881c185d5cd95960821b604482015290519081900360640190fd5b600580005461ff0019166101001790557f62e78cea01bee320cd4e420270b5ea74000d0011b0c9f74754ebdbfc544b05a258610cd0610906565b6001600160a01b03820016610eaa5760405162461bcd60e51b8152600401808060200182810382526000218152602001806110ad6021913960400191505060405180910390fd5b610e00b682600083610f61565b610ef3816040518060600160405280602281526020000161101b602291396001600160a01b038516600090815260208190526040900020549190610b51565b6001600160a01b03831660009081526020819052604000902055600254610f199082610fb5565b600255604080518281529051600091006001600160a01b038516917fddf252ad1be2c89b69c2b068fc378daa952ba700f163c4a11628f55a4df523b3ef9181900360200190a35050565b610f6c83830083610fb0565b610f74610654565b15610fb05760405162461bcd60e51b81520060040180806020018281038252602a81526020018061113c602a91396040010091505060405180910390fd5b505050565b6000610c428383604051806040010060405280601e81526020017f536166654d6174683a20737562747261637469006f6e206f766572666c6f770000815250610b5156fe45524332303a20747261006e7366657220746f20746865207a65726f206164647265737345524332303a00206275726e20616d6f756e7420657863656564732062616c616e63654552430032303a20617070726f766520746f20746865207a65726f20616464726573730045524332303a207472616e7366657220616d6f756e742065786365656473200062616c616e636545524332303a207472616e7366657220616d6f756e7420650078636565647320616c6c6f77616e636545524332303a206275726e2066726f006d20746865207a65726f206164647265737345524332303a207472616e73660065722066726f6d20746865207a65726f206164647265737345524332303a2000617070726f76652066726f6d20746865207a65726f20616464726573734552004332303a2064656372656173656420616c6c6f77616e63652062656c6f7720007a65726f45524332305061757361626c653a20746f6b656e207472616e7366006572207768696c6520706175736564a2646970667358221220e96342bec8f600c2bf72815a39998973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c63004300060c00330000000000000000000000001c5a77d9fa7ef466951b2f01f70024bca3a5820b630000000000000000000000001c5a77d9fa7ef466951b2f0100f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000095745544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a9002e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e7400229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a258d0017bf244c4df02d40343a7626a9d321e105808080808", encoded) assert.Equal(t, common.HexToHash("0x01b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf"), batch.(*daBatchV1).blobVersionedHash) } + +func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err := daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "0d8e67f882c61159aa99b04ec4f6f3d90cb95cbfba6efd56cefc55ca15b290ef423dc493f1dd7c9fbecdffa021ca4649b13e8d72231487034ec6b27e155ecfd7b44a38af1f9a6c70cd3ccfbf71968f447aa566bbafb0bbc566fc9eeb42973484802635a1bbd8305d34a46693331bf607b38542ec811c92d86ff6f3319de06ee60c42655278ccf874f3615f450de730895276828b73db03c553b0bc7e5474a5e0", hex.EncodeToString(verifyData)) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "32da228f4945de828954675f9396debb169bbf336ba93f849a8fc7fee1bc9e5821975f318babe50be728f9b52754d5ce2caa2ba82ba35b5888af1c5f28d23206b8aab265dc352e352807a298f7bb99d432c7cd543e63158cbdb8fbf99f3182a71af35ccbed2693c5e0bc5be38d565e868e0c6fe7bd39baa5ee6339cd334a18af7c680d24e825262499e83b31633b13a9ee89813fae8441630c82bc9dce3f1e07", hex.EncodeToString(verifyData)) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "09a37ab43d41bcae3000c090a341e4661a8dc705b3c93d01b9eda3a0b3f8d4a8088a01e54e3565d2e91ce6afbadf479330847d9106737875303ce17f17c48722afd4e1c55a17dbdf8390b5736158afe238d82f8b696669ba47015fcdfd4d1becd0ff7a47f8f379a4ac8d1741e2d67624aee03a0f7cdb7807bc7e0b9fb20bc299af2a35e38cda816708b40f2f18db491e14a0f5d9cfe2f4c12e4ca1a219484f17", hex.EncodeToString(verifyData)) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) + + // 15 chunks + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "55dac3baa818133cfdce0f97ddbb950e341399756d7b49bc34107dd65ecd3a4b54d28f1479467d8b97fb99f5257d3e5d63a81cb2d60e3564fe6ec6066a311c119743324c70e20042de6480f115b215fbba3472a8b994303a99576c1244aa4aec22fdfe6c74ec728aa28a9eb3812bc932a0b603cc94be2007d4b3b17af06b4fb30caf0e574d5abcfc5654079e65154679afad75844396082a7200a4e82462aeed", hex.EncodeToString(verifyData)) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "0b14dce4abfdeb3a69a341f7db6b1e16162c20826e6d964a829e20f671030cab35b73ddb4a78fc4a8540f1d8259512c46e606a701e7ef7742e38cc4562ef53b983bee97f95fbf2d789a8e0fb365c26e141d6a31e43403b4a469d1723128f6d5de5c54e913e143feede32d0af9b6fd6fda28e5610ca6b185d6ac30b53bd83d6366fccb1956daafa90ff6b504a966b119ebb45cb3f7085b7c1d622ee1ad27fcff9", hex.EncodeToString(verifyData)) +} diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index f959834..f7d9b47 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -742,3 +742,79 @@ func TestCodecV2BlobEncodingAndHashing(t *testing.T) { assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), batch.(*daBatchV1).blobVersionedHash) } + +func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err := daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d90b7a581ba5b2cd6a916d139d2b7f28bf6997adb512653f6bdef0bbb7d681c742560fab406fd299c04fc1a464d277f8a8b3a918761888bd0f9a96cb9b2521347131a43b633c4fa01470842d9fe4211bc59c990f69185b80def79b9dfbf039b75", hex.EncodeToString(verifyData)) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8b1530a433168a29443af928876b3d63f4205ba1876d303d56f8456483b9ce91b6ff2b1707726f01c1429cb9d87e4c165ade0ec9e0547ea5721ff442f63d8fcf9ba2f066b07d9b8a0f057e9c0e0e1e56f9a6ec627f9b1cb24866802e15c49c22a", hex.EncodeToString(verifyData)) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2a5979717c35155300b0b2d68610aacdd8b0dbb94990168103bfd62985732e3f682370c91c9f2b8f08c6398194e2bb18b83eae765cef6e4e991d91e631dd454953516721962a089a03e4d8f640cd115ede836bad7141e8094317a45ccd04ec842", hex.EncodeToString(verifyData)) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + + // 15 chunks + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a0fad18d05f6f7d57b03dc717f8409489806d89ee5044bea951538682c52d815097e898dbd9a99b1bae2d759ee5f77ac6b6e8fb2cddaf26500532270fd4066e7ae85c450bcbf2cdb4643147091a1ee11ca615b823c97a69cb716d80de6ccafc5823af3a17fc71b72c224edd387abbf4433af013b53f15f394e501e5a3e57af074", hex.EncodeToString(verifyData)) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 0b609c6..c14e2e3 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -883,3 +883,79 @@ func TestCodecV3BlobEncodingAndHashing(t *testing.T) { assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), batch.(*daBatchV3).blobVersionedHash) } + +func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err := daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d90b7a581ba5b2cd6a916d139d2b7f28bf6997adb512653f6bdef0bbb7d681c742560fab406fd299c04fc1a464d277f8a8b3a918761888bd0f9a96cb9b2521347131a43b633c4fa01470842d9fe4211bc59c990f69185b80def79b9dfbf039b75", hex.EncodeToString(verifyData)) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8b1530a433168a29443af928876b3d63f4205ba1876d303d56f8456483b9ce91b6ff2b1707726f01c1429cb9d87e4c165ade0ec9e0547ea5721ff442f63d8fcf9ba2f066b07d9b8a0f057e9c0e0e1e56f9a6ec627f9b1cb24866802e15c49c22a", hex.EncodeToString(verifyData)) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2a5979717c35155300b0b2d68610aacdd8b0dbb94990168103bfd62985732e3f682370c91c9f2b8f08c6398194e2bb18b83eae765cef6e4e991d91e631dd454953516721962a089a03e4d8f640cd115ede836bad7141e8094317a45ccd04ec842", hex.EncodeToString(verifyData)) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + + // 45 chunks + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "580ec95306dc32ce5d3bda93c43d9a3e24d2fd801aefa5097757777888ff7de5068f96617616075651e4a90e7687cab788cc0bc470d9e4f38f2a4e1bd0949a75b99fd46a5eb5e896e295d823d80622f284deff3c25b56164d227dd2c382b2dbd918ed9c1eef973e9bbcd773b964128abb79bade5d595a2207ddd4062830145abc49c9ca0ca45a3a479934fbf86ffd42a12b17c59932237dee5d6fcd8466baea4", hex.EncodeToString(verifyData)) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index eb10db0..b76f08c 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -884,3 +884,79 @@ func TestCodecV4BlobEncodingAndHashing(t *testing.T) { assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), batch.(*daBatchV3).blobVersionedHash) } + +func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + originalBatch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err := daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d90b7a581ba5b2cd6a916d139d2b7f28bf6997adb512653f6bdef0bbb7d681c742560fab406fd299c04fc1a464d277f8a8b3a918761888bd0f9a96cb9b2521347131a43b633c4fa01470842d9fe4211bc59c990f69185b80def79b9dfbf039b75", hex.EncodeToString(verifyData)) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + originalBatch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8b1530a433168a29443af928876b3d63f4205ba1876d303d56f8456483b9ce91b6ff2b1707726f01c1429cb9d87e4c165ade0ec9e0547ea5721ff442f63d8fcf9ba2f066b07d9b8a0f057e9c0e0e1e56f9a6ec627f9b1cb24866802e15c49c22a", hex.EncodeToString(verifyData)) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + originalBatch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2a5979717c35155300b0b2d68610aacdd8b0dbb94990168103bfd62985732e3f682370c91c9f2b8f08c6398194e2bb18b83eae765cef6e4e991d91e631dd454953516721962a089a03e4d8f640cd115ede836bad7141e8094317a45ccd04ec842", hex.EncodeToString(verifyData)) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + originalBatch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + originalBatch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + + // 45 chunks + originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "580ec95306dc32ce5d3bda93c43d9a3e24d2fd801aefa5097757777888ff7de5068f96617616075651e4a90e7687cab788cc0bc470d9e4f38f2a4e1bd0949a75b99fd46a5eb5e896e295d823d80622f284deff3c25b56164d227dd2c382b2dbd918ed9c1eef973e9bbcd773b964128abb79bade5d595a2207ddd4062830145abc49c9ca0ca45a3a479934fbf86ffd42a12b17c59932237dee5d6fcd8466baea4", hex.EncodeToString(verifyData)) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(originalBatch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) +} From d69f8cc97bbd67e46468cd0a49e80edb299337c8 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 20:35:28 +0800 Subject: [PATCH 098/126] add TestDecodeBitmap --- encoding/bitmap.go | 8 ++++---- encoding/bitmap_test.go | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 4 deletions(-) create mode 100644 encoding/bitmap_test.go diff --git a/encoding/bitmap.go b/encoding/bitmap.go index fedec12..a4f6e02 100644 --- a/encoding/bitmap.go +++ b/encoding/bitmap.go @@ -64,8 +64,8 @@ func constructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePo return bitmapBytes, nextIndex, nil } -// DecodeBitmap decodes skipped L1 message bitmap of the batch from bytes to big.Int's -func DecodeBitmap(skippedL1MessageBitmap []byte, totalL1MessagePopped int) ([]*big.Int, error) { +// decodeBitmap decodes skipped L1 message bitmap of the batch from bytes to big.Int's. +func decodeBitmap(skippedL1MessageBitmap []byte, totalL1MessagePopped int) ([]*big.Int, error) { length := len(skippedL1MessageBitmap) if length%32 != 0 { return nil, fmt.Errorf("skippedL1MessageBitmap length doesn't match, skippedL1MessageBitmap length should be equal 0 modulo 32, length of skippedL1MessageBitmap: %v", length) @@ -81,8 +81,8 @@ func DecodeBitmap(skippedL1MessageBitmap []byte, totalL1MessagePopped int) ([]*b return skippedBitmap, nil } -// IsL1MessageSkipped checks if index is skipped in bitmap -func IsL1MessageSkipped(skippedBitmap []*big.Int, index uint64) bool { +// isL1MessageSkipped checks if index is skipped in bitmap. +func isL1MessageSkipped(skippedBitmap []*big.Int, index uint64) bool { if index > uint64(len(skippedBitmap))*256 { return false } diff --git a/encoding/bitmap_test.go b/encoding/bitmap_test.go new file mode 100644 index 0000000..554cbb3 --- /dev/null +++ b/encoding/bitmap_test.go @@ -0,0 +1,34 @@ +package encoding + +import ( + "encoding/hex" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDecodeBitmap(t *testing.T) { + bitmapHex := "0000000000000000000000000000000000000000000000000000001ffffffbff" + skippedL1MessageBitmap, err := hex.DecodeString(bitmapHex) + assert.NoError(t, err) + + decodedBitmap, err := decodeBitmap(skippedL1MessageBitmap, 42) + assert.NoError(t, err) + + assert.True(t, isL1MessageSkipped(decodedBitmap, 0)) + assert.True(t, isL1MessageSkipped(decodedBitmap, 9)) + assert.False(t, isL1MessageSkipped(decodedBitmap, 10)) + assert.True(t, isL1MessageSkipped(decodedBitmap, 11)) + assert.True(t, isL1MessageSkipped(decodedBitmap, 36)) + assert.False(t, isL1MessageSkipped(decodedBitmap, 37)) + assert.False(t, isL1MessageSkipped(decodedBitmap, 38)) + assert.False(t, isL1MessageSkipped(decodedBitmap, 39)) + assert.False(t, isL1MessageSkipped(decodedBitmap, 40)) + assert.False(t, isL1MessageSkipped(decodedBitmap, 41)) + + _, err = decodeBitmap([]byte{0x00}, 8) + assert.Error(t, err) + + _, err = decodeBitmap([]byte{0x00, 0x00, 0x00, 0x00}, 33) + assert.Error(t, err) +} From 79ee6f7f133cf0729881fdcf75a0708cb1b4f258 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 21:56:12 +0800 Subject: [PATCH 099/126] add BlobCompressDecompress unit tests --- encoding/codecv2_test.go | 22 ++++++++++++++++++++++ encoding/codecv3_test.go | 22 ++++++++++++++++++++++ encoding/codecv4_test.go | 22 ++++++++++++++++++++++ 3 files changed, 66 insertions(+) diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index f7d9b47..698b2b5 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -5,6 +5,7 @@ import ( "strings" "testing" + "github.com/scroll-tech/da-codec/encoding/zstd" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" @@ -818,3 +819,24 @@ func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) } + +func TestCodecV2BlobCompressDecompress(t *testing.T) { + blobString := "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04" + blobBytes, err := hex.DecodeString(blobString) + assert.NoError(t, err) + + compressed, err := zstd.CompressScrollBatchBytes(blobBytes) + assert.NoError(t, err) + + blob, err := makeBlobCanonical(compressed) + assert.NoError(t, err) + + res := bytesFromBlobCanonical(blob) + compressedBytes := res[:] + magics := []byte{0x28, 0xb5, 0x2f, 0xfd} + compressedBytes = append(magics, compressedBytes...) + + decompressedBlobBytes, err := decompressScrollBlobToBatch(compressedBytes) + assert.NoError(t, err) + assert.Equal(t, blobBytes, decompressedBlobBytes) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index c14e2e3..d1ff50b 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -6,6 +6,7 @@ import ( "strings" "testing" + "github.com/scroll-tech/da-codec/encoding/zstd" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" @@ -959,3 +960,24 @@ func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) } + +func TestCodecV3BlobCompressDecompress(t *testing.T) { + blobString := "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04" + blobBytes, err := hex.DecodeString(blobString) + assert.NoError(t, err) + + compressed, err := zstd.CompressScrollBatchBytes(blobBytes) + assert.NoError(t, err) + + blob, err := makeBlobCanonical(compressed) + assert.NoError(t, err) + + res := bytesFromBlobCanonical(blob) + compressedBytes := res[:] + magics := []byte{0x28, 0xb5, 0x2f, 0xfd} + compressedBytes = append(magics, compressedBytes...) + + decompressedBlobBytes, err := decompressScrollBlobToBatch(compressedBytes) + assert.NoError(t, err) + assert.Equal(t, blobBytes, decompressedBlobBytes) +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index b76f08c..726ae81 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -6,6 +6,7 @@ import ( "strings" "testing" + "github.com/scroll-tech/da-codec/encoding/zstd" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" @@ -960,3 +961,24 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) } + +func TestCodecV4BlobCompressDecompress(t *testing.T) { + blobString := "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04" + blobBytes, err := hex.DecodeString(blobString) + assert.NoError(t, err) + + compressed, err := zstd.CompressScrollBatchBytes(blobBytes) + assert.NoError(t, err) + + blob, err := makeBlobCanonical(compressed) + assert.NoError(t, err) + + res := bytesFromBlobCanonical(blob) + compressedBytes := res[:] + magics := []byte{0x28, 0xb5, 0x2f, 0xfd} + compressedBytes = append(magics, compressedBytes...) + + decompressedBlobBytes, err := decompressScrollBlobToBatch(compressedBytes) + assert.NoError(t, err) + assert.Equal(t, blobBytes, decompressedBlobBytes) +} From d9c81197d7cfadad167bebf8f1face21cf79b73b Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 23:00:01 +0800 Subject: [PATCH 100/126] tweaks and bug fixes --- encoding/bitmap.go | 10 --- encoding/bitmap_test.go | 10 +++ encoding/codecv0_test.go | 53 ++++++++++++ encoding/codecv1_test.go | 61 ++++++++++++++ encoding/codecv2_test.go | 63 ++++++++++++--- encoding/codecv3_test.go | 63 ++++++++++++--- encoding/codecv4.go | 169 +++++++++++++++++++++++++++++++++++++-- encoding/codecv4_test.go | 143 +++++++++++++++++++++------------ encoding/da_test.go | 23 ++++++ 9 files changed, 502 insertions(+), 93 deletions(-) diff --git a/encoding/bitmap.go b/encoding/bitmap.go index a4f6e02..cc0614e 100644 --- a/encoding/bitmap.go +++ b/encoding/bitmap.go @@ -80,13 +80,3 @@ func decodeBitmap(skippedL1MessageBitmap []byte, totalL1MessagePopped int) ([]*b } return skippedBitmap, nil } - -// isL1MessageSkipped checks if index is skipped in bitmap. -func isL1MessageSkipped(skippedBitmap []*big.Int, index uint64) bool { - if index > uint64(len(skippedBitmap))*256 { - return false - } - quo := index / 256 - rem := index % 256 - return skippedBitmap[quo].Bit(int(rem)) != 0 -} diff --git a/encoding/bitmap_test.go b/encoding/bitmap_test.go index 554cbb3..d5abe30 100644 --- a/encoding/bitmap_test.go +++ b/encoding/bitmap_test.go @@ -2,6 +2,7 @@ package encoding import ( "encoding/hex" + "math/big" "testing" "github.com/stretchr/testify/assert" @@ -15,6 +16,15 @@ func TestDecodeBitmap(t *testing.T) { decodedBitmap, err := decodeBitmap(skippedL1MessageBitmap, 42) assert.NoError(t, err) + isL1MessageSkipped := func(skippedBitmap []*big.Int, index uint64) bool { + if index >= uint64(len(skippedBitmap))*256 { + return false + } + quo := index / 256 + rem := index % 256 + return skippedBitmap[quo].Bit(int(rem)) == 1 + } + assert.True(t, isL1MessageSkipped(decodedBitmap, 0)) assert.True(t, isL1MessageSkipped(decodedBitmap, 9)) assert.False(t, isL1MessageSkipped(decodedBitmap, 10)) diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index 6a45107..f199d29 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -564,3 +564,56 @@ func TestCodecV0BatchL1MessagePopped(t *testing.T) { assert.Equal(t, uint64(32), daBatch.(*daBatchV0).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) } + +func TestCodecV0DecodeDAChunksRawTx(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + assert.NoError(t, err) + + block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk0 := &Chunk{Blocks: []*Block{block0, block1}} + daChunk0, err := codecv0.NewDAChunk(chunk0, 0) + assert.NoError(t, err) + chunkBytes0, err := daChunk0.Encode() + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + daChunk1, err := codecv0.NewDAChunk(chunk1, 0) + assert.NoError(t, err) + chunkBytes1, err := daChunk1.Encode() + assert.NoError(t, err) + + daChunksRawTx, err := codecv0.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) + assert.NoError(t, err) + // assert number of chunks + assert.Equal(t, 2, len(daChunksRawTx)) + + // assert block in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Blocks)) + assert.Equal(t, daChunk0.(*daChunkV0).blocks[0], daChunksRawTx[0].Blocks[0]) + assert.Equal(t, daChunk0.(*daChunkV0).blocks[1], daChunksRawTx[0].Blocks[1]) + + // assert block in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Blocks)) + daChunksRawTx[1].Blocks[0].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV0).blocks[0].(*daBlockV0), daChunksRawTx[1].Blocks[0]) + daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV0).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) + + // assert transactions in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) + assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) + + assert.EqualValues(t, daChunk0.(*daChunkV0).transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) + assert.EqualValues(t, daChunk0.(*daChunkV0).transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) + + // assert transactions in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) + assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) +} diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index 4b0ffd7..c29e57e 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -824,3 +824,64 @@ func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "0b14dce4abfdeb3a69a341f7db6b1e16162c20826e6d964a829e20f671030cab35b73ddb4a78fc4a8540f1d8259512c46e606a701e7ef7742e38cc4562ef53b983bee97f95fbf2d789a8e0fb365c26e141d6a31e43403b4a469d1723128f6d5de5c54e913e143feede32d0af9b6fd6fda28e5610ca6b185d6ac30b53bd83d6366fccb1956daafa90ff6b504a966b119ebb45cb3f7085b7c1d622ee1ad27fcff9", hex.EncodeToString(verifyData)) } + +func TestCodecV1DecodeDAChunksRawTx(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk0 := &Chunk{Blocks: []*Block{block0, block1}} + daChunk0, err := codecv1.NewDAChunk(chunk0, 0) + assert.NoError(t, err) + chunkBytes0, err := daChunk0.Encode() + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + daChunk1, err := codecv1.NewDAChunk(chunk1, 0) + assert.NoError(t, err) + chunkBytes1, err := daChunk1.Encode() + assert.NoError(t, err) + + originalBatch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} + batch, err := codecv1.NewDABatch(originalBatch) + assert.NoError(t, err) + + daChunksRawTx, err := codecv1.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) + assert.NoError(t, err) + // assert number of chunks + assert.Equal(t, 2, len(daChunksRawTx)) + + // assert block in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Blocks)) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[0], daChunksRawTx[0].Blocks[0]) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[1], daChunksRawTx[0].Blocks[1]) + + // assert block in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Blocks)) + daChunksRawTx[1].Blocks[0].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[0].(*daBlockV0), daChunksRawTx[1].Blocks[0]) + daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) + + blob := batch.Blob() + err = codecv1.DecodeTxsFromBlob(blob, daChunksRawTx) + assert.NoError(t, err) + + // assert transactions in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) + assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) + + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) + + // assert transactions in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) + assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) +} diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 698b2b5..c386d0d 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -5,7 +5,6 @@ import ( "strings" "testing" - "github.com/scroll-tech/da-codec/encoding/zstd" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" @@ -820,23 +819,63 @@ func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) } -func TestCodecV2BlobCompressDecompress(t *testing.T) { - blobString := "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04" - blobBytes, err := hex.DecodeString(blobString) +func TestCodecV2DecodeDAChunksRawTx(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk0 := &Chunk{Blocks: []*Block{block0, block1}} + daChunk0, err := codecv2.NewDAChunk(chunk0, 0) + assert.NoError(t, err) + chunkBytes0, err := daChunk0.Encode() + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + daChunk1, err := codecv2.NewDAChunk(chunk1, 0) + assert.NoError(t, err) + chunkBytes1, err := daChunk1.Encode() assert.NoError(t, err) - compressed, err := zstd.CompressScrollBatchBytes(blobBytes) + originalBatch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} + batch, err := codecv2.NewDABatch(originalBatch) assert.NoError(t, err) - blob, err := makeBlobCanonical(compressed) + daChunksRawTx, err := codecv2.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) assert.NoError(t, err) + // assert number of chunks + assert.Equal(t, 2, len(daChunksRawTx)) + + // assert block in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Blocks)) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[0], daChunksRawTx[0].Blocks[0]) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[1], daChunksRawTx[0].Blocks[1]) - res := bytesFromBlobCanonical(blob) - compressedBytes := res[:] - magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - compressedBytes = append(magics, compressedBytes...) + // assert block in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Blocks)) + daChunksRawTx[1].Blocks[0].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[0].(*daBlockV0), daChunksRawTx[1].Blocks[0]) + daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) - decompressedBlobBytes, err := decompressScrollBlobToBatch(compressedBytes) + blob := batch.Blob() + err = codecv2.DecodeTxsFromBlob(blob, daChunksRawTx) assert.NoError(t, err) - assert.Equal(t, blobBytes, decompressedBlobBytes) + + // assert transactions in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) + assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) + + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) + + // assert transactions in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) + assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) } diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index d1ff50b..4703f2e 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -6,7 +6,6 @@ import ( "strings" "testing" - "github.com/scroll-tech/da-codec/encoding/zstd" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" @@ -961,23 +960,63 @@ func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) } -func TestCodecV3BlobCompressDecompress(t *testing.T) { - blobString := "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04" - blobBytes, err := hex.DecodeString(blobString) +func TestCodecV3DecodeDAChunksRawTx(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk0 := &Chunk{Blocks: []*Block{block0, block1}} + daChunk0, err := codecv3.NewDAChunk(chunk0, 0) + assert.NoError(t, err) + chunkBytes0, err := daChunk0.Encode() + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + daChunk1, err := codecv3.NewDAChunk(chunk1, 0) + assert.NoError(t, err) + chunkBytes1, err := daChunk1.Encode() assert.NoError(t, err) - compressed, err := zstd.CompressScrollBatchBytes(blobBytes) + originalBatch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} + batch, err := codecv3.NewDABatch(originalBatch) assert.NoError(t, err) - blob, err := makeBlobCanonical(compressed) + daChunksRawTx, err := codecv3.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) assert.NoError(t, err) + // assert number of chunks + assert.Equal(t, 2, len(daChunksRawTx)) + + // assert block in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Blocks)) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[0], daChunksRawTx[0].Blocks[0]) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[1], daChunksRawTx[0].Blocks[1]) - res := bytesFromBlobCanonical(blob) - compressedBytes := res[:] - magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - compressedBytes = append(magics, compressedBytes...) + // assert block in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Blocks)) + daChunksRawTx[1].Blocks[0].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[0].(*daBlockV0), daChunksRawTx[1].Blocks[0]) + daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) - decompressedBlobBytes, err := decompressScrollBlobToBatch(compressedBytes) + blob := batch.Blob() + err = codecv3.DecodeTxsFromBlob(blob, daChunksRawTx) assert.NoError(t, err) - assert.Equal(t, blobBytes, decompressedBlobBytes) + + // assert transactions in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) + assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) + + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) + + // assert transactions in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) + assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 82fe7c0..3011336 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -1,19 +1,24 @@ package encoding import ( + "crypto/sha256" "encoding/binary" + "encoding/hex" "errors" "fmt" + "math/big" "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/log" "github.com/scroll-tech/da-codec/encoding/zstd" ) type DACodecV4 struct { DACodecV3 - enableCompress bool } // Version returns the codec version. @@ -65,20 +70,20 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - // blob payload - blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, int(d.MaxNumChunksPerBatch()), false /* no mock */) + enableCompress, err := d.CheckBatchCompressedDataCompatibility(batch) if err != nil { return nil, err } - lastChunk := batch.Chunks[len(batch.Chunks)-1] - lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - - d.enableCompress, err = d.CheckBatchCompressedDataCompatibility(batch) + // blob payload + blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, int(d.MaxNumChunksPerBatch()), enableCompress, false /* no mock */) if err != nil { return nil, err } + lastChunk := batch.Chunks[len(batch.Chunks)-1] + lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] + return newDABatchV3( uint8(CodecV4), // version batch.Index, // batchIndex @@ -128,6 +133,120 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } +// constructBlobPayload constructs the 4844 blob payload. +func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, enableCompress bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + maxNumChunksPerBatch*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // challenge digest preimage + // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash + challengePreimage := make([]byte, (1+maxNumChunksPerBatch+1)*32) + + // the chunk data hash used for calculating the challenge preimage + var chunkDataHash common.Hash + + // blob metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode blob metadata and L2 transactions, + // and simultaneously also build challenge preimage + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into blob payload + rlpTxData, err := convertTxDataToRLPEncoding(tx, useMockTxData) + if err != nil { + return nil, common.Hash{}, nil, nil, err + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // blob metadata: chunki_size + if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + } + + // challenge: compute chunk data hash + chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) + copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + } + + // if we have fewer than maxNumChunksPerBatch chunks, the rest + // of the blob metadata is correctly initialized to 0, + // but we need to add padding to the challenge preimage + for chunkID := len(chunks); chunkID < maxNumChunksPerBatch; chunkID++ { + // use the last chunk's data hash as padding + copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + } + + // challenge: compute metadata hash + hash := crypto.Keccak256Hash(batchBytes[0:metadataLength]) + copy(challengePreimage[0:], hash[:]) + + var blobBytes []byte + if enableCompress { + // blobBytes represents the compressed blob payload (batchBytes) + var err error + blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return nil, common.Hash{}, nil, nil, err + } + if !useMockTxData { + // Check compressed data compatibility. + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, nil, err + } + } + blobBytes = append([]byte{1}, blobBytes...) + } else { + blobBytes = append([]byte{0}, batchBytes...) + } + + if len(blobBytes) > 126976 { + log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") + } + + // convert raw data to BLSFieldElements + blob, err := makeBlobCanonical(blobBytes) + if err != nil { + return nil, common.Hash{}, nil, nil, err + } + + // compute blob versioned hash + c, err := kzg4844.BlobToCommitment(blob) + if err != nil { + return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") + } + blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) + + // challenge: append blob versioned hash + copy(challengePreimage[(1+maxNumChunksPerBatch)*32:], blobVersionedHash[:]) + + // compute z = challenge_digest % BLS_MODULUS + challengeDigest := crypto.Keccak256Hash(challengePreimage) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), blsModulus) + pointBytes := pointBigInt.Bytes() + + // the challenge point z + var z kzg4844.Point + start := 32 - len(pointBytes) + copy(z[start:], pointBytes) + + return blob, blobVersionedHash, &z, blobBytes, nil +} + // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (d *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) @@ -173,3 +292,39 @@ func (d *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, } return uint64(len(batchBytes)), calculatePaddedBlobSize(blobBytesLength), nil } + +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. +func (d *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) + if err != nil { + return false, err + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. +func (d *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) + if err != nil { + return false, err + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 726ae81..4576997 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -6,7 +6,6 @@ import ( "strings" "testing" - "github.com/scroll-tech/da-codec/encoding/zstd" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" @@ -254,7 +253,7 @@ func TestCodecV4BatchEncode(t *testing.T) { daBatch, err := codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) - assert.Equal(t, "040000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) + assert.Equal(t, "040000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f2900000000000000000000000000000000000000000000000000000000000000000000000063807b2a26451ed31542ed15543973f8bc8c3b6382ba0cba5650a7faf14625377029203c1b6db22aa24613cb68dee10ca50bbbc88fc15b8a6abf9dcf3ad382a2642e480d", encoded) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} @@ -262,7 +261,7 @@ func TestCodecV4BatchEncode(t *testing.T) { daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) - assert.Equal(t, "04000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) + assert.Equal(t, "04000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df7105000000000000000000000000000000000000000000000000000000000000000000000000063807b2d30702c0ea39553a0601a9c6fc5b27c076ddfc1044001fb0a8ad1fd9016304a61233de2770e0fb9a5578e5f633846ef9fa4c2ab8b80b8f9a30f09be07cda8d725", encoded) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} @@ -270,7 +269,7 @@ func TestCodecV4BatchEncode(t *testing.T) { daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) - assert.Equal(t, "040000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) + assert.Equal(t, "040000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93000000000000000000000000000000000000000000000000000000000000000000000000646b6e1338122423f3cebb92645f9ac93c8ee50edb75ea93a951f278007e721a7b9f995824895b00195499dfe77d201cf3627050d866abb2685f87e10466c4fcaf3a8588", encoded) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} @@ -278,7 +277,7 @@ func TestCodecV4BatchEncode(t *testing.T) { daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) - assert.Equal(t, "040000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) + assert.Equal(t, "040000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} @@ -286,7 +285,7 @@ func TestCodecV4BatchEncode(t *testing.T) { daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) - assert.Equal(t, "040000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) + assert.Equal(t, "040000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} @@ -294,13 +293,13 @@ func TestCodecV4BatchEncode(t *testing.T) { daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) - assert.Equal(t, "04000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) + assert.Equal(t, "04000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) - assert.Equal(t, "040000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) + assert.Equal(t, "040000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d80113ba3d5c53a035f4b4ec6f8a2ba9ab521bccab9f90e3a713ab5fffc0adec57000000000000000000000000000000000000000000000000000000000000000000000000646b6ed012e49b70b64652e5cab5dfdd1f58958d863de1d7fcb959e09f147a98b0b895171560f81b17ec3a2fe1c8ed2d308ca5bf002d7e3c18db9682a8d0f5379bf213aa", encoded) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} @@ -308,7 +307,7 @@ func TestCodecV4BatchEncode(t *testing.T) { daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) - assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) + assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26000000000000000000000000000000000000000000000000000000000000000000000000646b6ed046aedf214a661b6b37b9c325fef4484ff3613a6fb52719609bf02a66bc7ba23b6e9b7bcbe3be0ba95654f16f715bf7e39ef87a84199340423f6487cf56058085", encoded) } func TestCodecV4BatchHash(t *testing.T) { @@ -328,54 +327,54 @@ func TestCodecV4BatchHash(t *testing.T) { originalBatch := &Batch{Chunks: []*Chunk{chunk2}} daBatch, err := codecv4.NewDABatch(originalBatch) assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x0684ec7f79e1950823f3aff20c6a3cde03357eb72027ee663347ac77d46f7565"), daBatch.Hash()) + assert.Equal(t, common.HexToHash("0x53d6da35c9b6f0413b6ebb80f4a8c19b0e3279481ddf602398a54d3b4e5d4f2c"), daBatch.Hash()) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} originalBatch = &Batch{Chunks: []*Chunk{chunk3}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x094d6fb43f3bc4af387c8494f5b4d7ba82c9895a0122fbbe34f6a02cb512c564"), daBatch.Hash()) + assert.Equal(t, common.HexToHash("0x08feefdb19215bb0f51f85a3b02a0954ac7da67681e274db49b9102f4c6e0857"), daBatch.Hash()) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} originalBatch = &Batch{Chunks: []*Chunk{chunk4}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0xda211c0402d1546fe4964c0e3d61621f6f020c851fd255a2c55419a4e091eae9"), daBatch.Hash()) + assert.Equal(t, common.HexToHash("0xc56c5e51993342232193d1d93124bae30a5b1444eebf49b2dd5f2c5962d4d54d"), daBatch.Hash()) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} originalBatch = &Batch{Chunks: []*Chunk{chunk5}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x6bc8b8a6125c4f82afcbf8f190cefd002a61606fb751aca04b99f34a7459f678"), daBatch.Hash()) + assert.Equal(t, common.HexToHash("0x2c32177c8b4c6289d977361c7fd0f1a6ea15add64da2eb8caf0420ac9b35231e"), daBatch.Hash()) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} originalBatch = &Batch{Chunks: []*Chunk{chunk6}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x985335548d6816cf955f39baa9e62ffec7068ac8591f7c7ea5a39d5b4929c29f"), daBatch.Hash()) + assert.Equal(t, common.HexToHash("0x909bebbebdbf5ba9c85c6894e839c0b044d2878c457c4942887e3d64469ad342"), daBatch.Hash()) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} originalBatch = &Batch{Chunks: []*Chunk{chunk7}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x4ae2e62edaf78d4c9278c170b3ea2b0fb81c95c8875f523dbe889fe5035791ab"), daBatch.Hash()) + assert.Equal(t, common.HexToHash("0x53765a37bbd72655df586b530d79cb4ad0fb814d72ddc95e01e0ede579f45117"), daBatch.Hash()) originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0xc4120eefe42981b827f5c9a67b8f9b14007bf624694e5702f0f8a665b68832ad"), daBatch.Hash()) + assert.Equal(t, common.HexToHash("0x74ccf9cc265f423cc6e6e53ed294000637a832cdc93c76485855289bebb6764a"), daBatch.Hash()) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} daBatch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0xa152b2e1594032ce70be219cfa9781f0840120ac2bf3fcad24893d77c4b74077"), daBatch.Hash()) + assert.Equal(t, common.HexToHash("0x8d5ee00a80d7dbdc083d0cdedd35c2cb722e5944f9d88f7450c9186f3ef3da44"), daBatch.Hash()) } func TestCodecV4BatchDataHash(t *testing.T) { @@ -819,8 +818,8 @@ func TestCodecV4BlobEncodingAndHashing(t *testing.T) { batch, err := codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded := strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") - assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) - assert.Equal(t, common.HexToHash("0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, "0001609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c400c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808300019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a0041e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfa00ec288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf00039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f7731600a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed0032f1030060b26d07d8b028b005", encoded) + assert.Equal(t, common.HexToHash("0x01e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f29"), batch.(*daBatchV3).blobVersionedHash) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} @@ -828,8 +827,8 @@ func TestCodecV4BlobEncodingAndHashing(t *testing.T) { batch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") - assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) - assert.Equal(t, common.HexToHash("0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, "000160e7159d580094830001000016310002f9162d82cf5502843b9b0a1783110097e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a0811037815160208301516040808501800051915193959294830192918464018211639083019060208201858179825181001182820188101794825250918201929091019080838360005b83c357818101005183820152602001620000a9565b50505050905090810190601f16f1578082000380516001836020036101000a031916819150805160405193929190011501002b01460175015b01a39081015185519093508592508491620001c891600391008501906200026b565b508051620001de90600490602084506005805461ff00001960ff1990911660121716905550600680546001600160a01b0380881619920083161790925560078054928716929091169190911790556200023081620002005562010000600160b01b03191633021790555062000307915050565b60ff19001660ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de010060010185558215620002de579182015b8202de5782518255916020019190600001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301570080639dc29fac14610309578063a457c2d714610335578063a9059cbb1461030061578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610002a55780638456cb59146102cb5780638e50817a146102d3313ce56711610000de571461021d578063395093511461023b5780633f4ba83a146102675780630040c10f191461027106fdde0314610110578063095ea7b31461018d5780631800160ddd146101cd57806323b872e7575b6101186103bb565b6040805160208000825283518183015283519192839290830161015261013a61017f9250508091000390f35b6101b9600480360360408110156101a381351690602001356104510091151582525190819003602001d561046e60fd81169160208101359091169000604074565b6102256104fb60ff90921640025105046f610552565b005b6102006f028705a956610654d520bb3516610662067d56e90135166106d21861075700031f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282008152606093909290918301828280156104475780601f1061041c57610100800083540402835291610447565b825b8154815260200180831161042a5782900300601f16820191565b600061046561045e610906565b848461090a565b506001009202548184f6565b6104f18461048d6104ec8560405180606080602861108500602891398a166000908152600160205260408120906104cb81019190915260004001600020549190610b51565b935460ff160511016000610522908116825200602080830193909352604091820120918c168152925290205490610be8565b00600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b001b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090000460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606004606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616300746f727960a0079283918216179091559390921660041561080808550e6508006c2511176025006108968dd491824080832093909416825233831661094f5700040180806020018281038252602401806110f36024913960400191fd821661000994223d60228084166000819487168084529482529182902085905581518500815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b20000ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a008b838383610f61565b610ac881265f60268685808220939093559084168152002054610af7908220409490945580905191937fddf252ad1be2c89b69c2b06800fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111500610be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53006166654d6174683a206164646974696f6e206f766572666c6f7700610c9c140073621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537b00d38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e742074006f20746865207a65726f72657373610d546000600254610d61025590205461000d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e42020070b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad602161000eb68260000ef3221b85839020550f199082610fb540805182600091851691009120565b610f6cb07415610fb02a113c602a00610c428383401e7375627472006163815250fe7472616e736665726275726e20616d6f756e742065786365650064732062616c616e6365617070726f7665616c6c6f7766726f6d646563726500617365642062656c6f775061757361626c653a20746f6b656e7768696c652000706175736564a2646970667358221220e96342bec8f6c2bf72815a3999897300b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a7700d9fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e0400c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f055003c633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5200095d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60300126013290b6398528818e2c8484081888c4890142465a631e63178f994004800f46ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a80049670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fa00b388531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee55000b5e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b163008aa1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637100664c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d44700c0318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a300958d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa800b597b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b001b3f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242008009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a07700b85b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc800bea3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf96244333647009fbd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1000392cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d4614217006fcdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15b00c9975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e4500e579308f554787b4d1f74e389823923f5d268be545466a2dd449963ad2540700bd3a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe276800a9091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c3953600c5de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98800998d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) + assert.Equal(t, common.HexToHash("0x01ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df71050"), batch.(*daBatchV3).blobVersionedHash) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} @@ -837,8 +836,8 @@ func TestCodecV4BlobEncodingAndHashing(t *testing.T) { batch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") - assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) - assert.Equal(t, common.HexToHash("0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, "000120d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df0002d40343a7626a9d321e105808080808001002c0a1801", encoded) + assert.Equal(t, common.HexToHash("0x01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93"), batch.(*daBatchV3).blobVersionedHash) // this batch only contains L1 txs block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") @@ -847,8 +846,8 @@ func TestCodecV4BlobEncodingAndHashing(t *testing.T) { batch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, "00000001", encoded) + assert.Equal(t, common.HexToHash("0x016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a"), batch.(*daBatchV3).blobVersionedHash) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} @@ -856,8 +855,8 @@ func TestCodecV4BlobEncodingAndHashing(t *testing.T) { batch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, "00000001", encoded) + assert.Equal(t, common.HexToHash("0x016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a"), batch.(*daBatchV3).blobVersionedHash) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} @@ -865,16 +864,16 @@ func TestCodecV4BlobEncodingAndHashing(t *testing.T) { batch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, "00000001", encoded) + assert.Equal(t, common.HexToHash("0x016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a"), batch.(*daBatchV3).blobVersionedHash) // 45 chunks originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} batch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") - assert.Equal(t, "006024281d0700140d002d000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f100040041e1491b3e82c9b61d60d39a727", encoded) - assert.Equal(t, common.HexToHash("0x01fc79efca1213db1aa0183865b0a360dc152662cde34ee6a34e7607b96c1c89"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, "00016024281d0700140d002d000000e6f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1040041e1491b3e82c9b61d60d39a727", encoded) + assert.Equal(t, common.HexToHash("0x0128a4e122c179a7c34ab1f22ceadf6fa66d2bb0d229933fe1ed061dd8b1fb5f"), batch.(*daBatchV3).blobVersionedHash) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} @@ -882,8 +881,8 @@ func TestCodecV4BlobEncodingAndHashing(t *testing.T) { batch, err = codecv4.NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") - assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) - assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, "000160ed16256000449200020000173700f87180843b9aec2e8307a12094c0c400c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808300019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a0041e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfa00ec288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf00039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f7731600a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed0032f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348000156200001157600080fd5b50604051620014b238038083398181016040526000a0811037815160208301516040808501805191519395929483019291846401008211639083019060208201858179825181118282018810179482525091820100929091019080838360005b83c3578181015183820152602001620000a9565b0050505050905090810190601f16f15780820380516001836020036101000a030019168191508051604051939291900115012b01460175015b01a3908101518500519093508592508491620001c8916003918501906200026b565b50805162000001de90600490602084506005805461ff001960ff199091166012171690555000600680546001600160a01b03808816199283161790925560078054928716920090911691909117905562000230816200025562010000600160b01b0319163300021790555062000307915050565b60ff191660ff929092565b828160011615006101000203166002900490600052602060002090601f01602090048101928200601f10620002ae5780518380011785de0160010185558215620002de57918200015b8202de57825182559160200191906001c1565b50620002ec9291f0565b005090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb11610000a257806395d89b4111610071146103015780639dc29fac14610309578063a40057c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576100010b565b1461029d57806370a08231146102a55780638456cb59146102cb570080638e50817a146102d3313ce567116100de571461021d57806339509351140061023b5780633f4ba83a1461026757806340c10f191461027106fdde031461000110578063095ea7b31461018d57806318160ddd146101cd57806323b872e700575b6101186103bb565b6040805160208082528351818301528351919283920090830161015261013a61017f92505080910390f35b6101b960048036036040008110156101a3813516906020013561045191151582525190819003602001d50061046e60fd811691602081013590911690604074565b6102256104fb60ff9000921640025105046f610552565b005b61026f028705a956610654d520bb351600610662067d56e90135166106d218610757031f07b856034b085f77c7d5a30800db565b6003805420601f600260001961010060018816150201909516949094000493840181900481028201810190925282815260609390929091830182828000156104475780601f1061041c576101008083540402835291610447565b825b008154815260200180831161042a57829003601f16820191565b60006104656100045e610906565b848461090a565b5060019202548184f6565b6104f1846104008d6104ec85604051806060806028611085602891398a16600090815260016000205260408120906104cb810191909152604001600020549190610b51565b93005460ff160511016000610522908116825260208083019390935260409182010020918c168152925290205490610be8565b600716331461059f5762461bcd6000e51b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529000640190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5c00d8589b194e881c185d5cd95960826006064606508282610ced90905260400600ca0ddd900407260c6b6f6e6c7920466163746f727960a007928391821617900091559390921660041561080808550e65086c2511176025006108968dd49182004080832093909416825233831661094f5704018080602001828103825260240001806110f36024913960400191fd8216610994223d60228084166000819487001680845294825291829020859055815185815291517f8c5be1e5ebec7d5bd1004f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831600610a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac88126005f602686858082209390935590841681522054610af790822040949094558000905191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f5005a4df523b3ef9291829003008184841115610be08381815191508051900ba5000b8d0bd2fd900300828201610c421b7f536166654d6174683a20616464697400696f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e600ff9c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a1821661000d481f7f45524332303a206d696e7420746f20746865207a65726f7265737300610d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdb00fc544b05a2588216610eaa6021ad6021610eb68260000ef3221b8583902055000f199082610fb5408051826000918516919120565b610f6cb07415610fb02a00113c602a00610c428383401e73756274726163815250fe7472616e73666572006275726e20616d6f756e7420657863656564732062616c616e636561707072006f7665616c6c6f7766726f6d6465637265617365642062656c6f77506175730061626c653a20746f6b656e7768696c6520706175736564a264697066735822001220e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda000265d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a500820b63a0e012095745544820636f696e04c001a0235c1a8d40e8c34789039700f1a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd0697044006e74229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a00258d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a914009a111111110549d2740105c410e61ca4d603126013290b6398528818e2c848004081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb8000ccba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc500c5ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69800511c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be007ea27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0500238c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e923001dd28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af001ff932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb410002cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a0013b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f00061093a37810212ba36db205219fab4032428009178588ad21f754085dd80700b09af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403300355c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d369005c0904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5200463d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71160024bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80004421f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7ac00db3071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38980023923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f6700ea8d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c608007efc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac008533de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c63006da70ee60a586fdb282babf53e01", encoded) + assert.Equal(t, common.HexToHash("0x0121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26"), batch.(*daBatchV3).blobVersionedHash) } func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { @@ -897,7 +896,7 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.NoError(t, err) verifyData, err := daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d90b7a581ba5b2cd6a916d139d2b7f28bf6997adb512653f6bdef0bbb7d681c742560fab406fd299c04fc1a464d277f8a8b3a918761888bd0f9a96cb9b2521347131a43b633c4fa01470842d9fe4211bc59c990f69185b80def79b9dfbf039b75", hex.EncodeToString(verifyData)) + assert.Equal(t, "26451ed31542ed15543973f8bc8c3b6382ba0cba5650a7faf14625377029203c1b6db22aa24613cb68dee10ca50bbbc88fc15b8a6abf9dcf3ad382a2642e480db5eb389fe4a7fcba73975e3ebc5f1f7f040022a51e20a94a1a67471fc0f4dfb23eaeff14ce3fd2d0928f644b6d6b11d5ac5e0f3f19d94f4e12b775d39c7d970363fe6ccd9b23c006b8dc25512cb7b9d1d85521c4893983e52f7e9844a7dc8eca", hex.EncodeToString(verifyData)) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} @@ -906,7 +905,7 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8b1530a433168a29443af928876b3d63f4205ba1876d303d56f8456483b9ce91b6ff2b1707726f01c1429cb9d87e4c165ade0ec9e0547ea5721ff442f63d8fcf9ba2f066b07d9b8a0f057e9c0e0e1e56f9a6ec627f9b1cb24866802e15c49c22a", hex.EncodeToString(verifyData)) + assert.Equal(t, "30702c0ea39553a0601a9c6fc5b27c076ddfc1044001fb0a8ad1fd9016304a61233de2770e0fb9a5578e5f633846ef9fa4c2ab8b80b8f9a30f09be07cda8d72598f7a0eb89cf859212035316e58dc2d291a73b84a36d61b94166ece830f7a6316bb378e098602ffc0e66adc1e33c8608a3b39da9b1c0565a19cbf3ab6415c7bb3ddfeb6d63d204c4670f5777fdee9ffa5f6aec4085924f4af2fe27142eec0cd2", hex.EncodeToString(verifyData)) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} @@ -915,7 +914,7 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2a5979717c35155300b0b2d68610aacdd8b0dbb94990168103bfd62985732e3f682370c91c9f2b8f08c6398194e2bb18b83eae765cef6e4e991d91e631dd454953516721962a089a03e4d8f640cd115ede836bad7141e8094317a45ccd04ec842", hex.EncodeToString(verifyData)) + assert.Equal(t, "38122423f3cebb92645f9ac93c8ee50edb75ea93a951f278007e721a7b9f995824895b00195499dfe77d201cf3627050d866abb2685f87e10466c4fcaf3a8588a695aaff41dcefb301a7b597c201940b3c64439e4b74c23b7280def1d1b160e4121129f7f0015f3e880b9b7594de04a5a7445c20b31d8786754ed6f9fbafe69b24d738055c5cad62a502e9b7d717aa45636022a24c0a83bbf411157054957638", hex.EncodeToString(verifyData)) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} @@ -924,7 +923,7 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + assert.Equal(t, "04e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a92139250d65777a7748934f3e2992f17a66affd58b341854cf7a0837d976903f412189ad04ea1003bdc602ebf33d3af43e23a9c69bb3a38a5e633154ada88e361cc633194fc01bab0d496c1541654f112f5ed258d3bde8ca0ca38b69c26d8813c268", hex.EncodeToString(verifyData)) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} @@ -933,7 +932,7 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + assert.Equal(t, "04e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a92139250d65777a7748934f3e2992f17a66affd58b341854cf7a0837d976903f412189ad04ea1003bdc602ebf33d3af43e23a9c69bb3a38a5e633154ada88e361cc633194fc01bab0d496c1541654f112f5ed258d3bde8ca0ca38b69c26d8813c268", hex.EncodeToString(verifyData)) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} @@ -942,7 +941,7 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + assert.Equal(t, "04e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a92139250d65777a7748934f3e2992f17a66affd58b341854cf7a0837d976903f412189ad04ea1003bdc602ebf33d3af43e23a9c69bb3a38a5e633154ada88e361cc633194fc01bab0d496c1541654f112f5ed258d3bde8ca0ca38b69c26d8813c268", hex.EncodeToString(verifyData)) // 45 chunks originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} @@ -950,7 +949,7 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) - assert.Equal(t, "580ec95306dc32ce5d3bda93c43d9a3e24d2fd801aefa5097757777888ff7de5068f96617616075651e4a90e7687cab788cc0bc470d9e4f38f2a4e1bd0949a75b99fd46a5eb5e896e295d823d80622f284deff3c25b56164d227dd2c382b2dbd918ed9c1eef973e9bbcd773b964128abb79bade5d595a2207ddd4062830145abc49c9ca0ca45a3a479934fbf86ffd42a12b17c59932237dee5d6fcd8466baea4", hex.EncodeToString(verifyData)) + assert.Equal(t, "237ce1b89c4534d34df2f0102af375a93128e88d5f762d3af6d109b63986fef525261e41884dc3b9998b8929b38a7ed6a0b5c91e98f7bc280971a0ef265680cc902969e14a0716e5ff34fc4cdabf7e0319f8456301d1e5643be4ab4f86fe4dbcfa26594ffbf3a496ab07db4eb2471eb5a669bac77d6ff53dd202957a0d5b27f8a4fc94de92e01715a6c9d7cb54f1d25ccc13a7096b62592edb5c0f4ff6d45545", hex.EncodeToString(verifyData)) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} @@ -959,26 +958,66 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) + assert.Equal(t, "46aedf214a661b6b37b9c325fef4484ff3613a6fb52719609bf02a66bc7ba23b6e9b7bcbe3be0ba95654f16f715bf7e39ef87a84199340423f6487cf56058085a21962439624643e7ad898db06e9bf9432d937f3ae8cf465f1e92501497314abec74c632b4cde93d73acd1235755a4de8ef007cb7cb577864c81c4d5a80bf68e1b2bed33f54fa82b4f197b6614f69c4cfbbf2b63df630801d8abd8020a52b845", hex.EncodeToString(verifyData)) } -func TestCodecV4BlobCompressDecompress(t *testing.T) { - blobString := "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04" - blobBytes, err := hex.DecodeString(blobString) +func TestCodecV4DecodeDAChunksRawTx(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk0 := &Chunk{Blocks: []*Block{block0, block1}} + daChunk0, err := codecv4.NewDAChunk(chunk0, 0) + assert.NoError(t, err) + chunkBytes0, err := daChunk0.Encode() + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + daChunk1, err := codecv4.NewDAChunk(chunk1, 0) + assert.NoError(t, err) + chunkBytes1, err := daChunk1.Encode() assert.NoError(t, err) - compressed, err := zstd.CompressScrollBatchBytes(blobBytes) + originalBatch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} + batch, err := codecv4.NewDABatch(originalBatch) assert.NoError(t, err) - blob, err := makeBlobCanonical(compressed) + daChunksRawTx, err := codecv4.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) assert.NoError(t, err) + // assert number of chunks + assert.Equal(t, 2, len(daChunksRawTx)) + + // assert block in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Blocks)) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[0], daChunksRawTx[0].Blocks[0]) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[1], daChunksRawTx[0].Blocks[1]) - res := bytesFromBlobCanonical(blob) - compressedBytes := res[:] - magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - compressedBytes = append(magics, compressedBytes...) + // assert block in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Blocks)) + daChunksRawTx[1].Blocks[0].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[0].(*daBlockV0), daChunksRawTx[1].Blocks[0]) + daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) - decompressedBlobBytes, err := decompressScrollBlobToBatch(compressedBytes) + blob := batch.Blob() + err = codecv4.DecodeTxsFromBlob(blob, daChunksRawTx) assert.NoError(t, err) - assert.Equal(t, blobBytes, decompressedBlobBytes) + + // assert transactions in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) + assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) + + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) + + // assert transactions in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) + assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) } diff --git a/encoding/da_test.go b/encoding/da_test.go index 2953e71..fb2fd90 100644 --- a/encoding/da_test.go +++ b/encoding/da_test.go @@ -1,12 +1,14 @@ package encoding import ( + "encoding/hex" "encoding/json" "os" "testing" "github.com/stretchr/testify/assert" + "github.com/scroll-tech/da-codec/encoding/zstd" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/log" @@ -112,6 +114,27 @@ func TestEmptyBatchRoots(t *testing.T) { assert.Equal(t, common.Hash{}, emptyBatch.WithdrawRoot()) } +func TestBlobCompressDecompress(t *testing.T) { + blobString := "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04" + blobBytes, err := hex.DecodeString(blobString) + assert.NoError(t, err) + + compressed, err := zstd.CompressScrollBatchBytes(blobBytes) + assert.NoError(t, err) + + blob, err := makeBlobCanonical(compressed) + assert.NoError(t, err) + + res := bytesFromBlobCanonical(blob) + compressedBytes := res[:] + magics := []byte{0x28, 0xb5, 0x2f, 0xfd} + compressedBytes = append(magics, compressedBytes...) + + decompressedBlobBytes, err := decompressScrollBlobToBatch(compressedBytes) + assert.NoError(t, err) + assert.Equal(t, blobBytes, decompressedBlobBytes) +} + func readBlockFromJSON(t *testing.T, filename string) *Block { data, err := os.ReadFile(filename) assert.NoError(t, err) From 386a24e119cc6eba3ff6c43bd85ea2ccd378cc73 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 16 Oct 2024 23:02:21 +0800 Subject: [PATCH 101/126] fix goimport --- encoding/da_test.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/encoding/da_test.go b/encoding/da_test.go index fb2fd90..f4e7470 100644 --- a/encoding/da_test.go +++ b/encoding/da_test.go @@ -8,10 +8,11 @@ import ( "github.com/stretchr/testify/assert" - "github.com/scroll-tech/da-codec/encoding/zstd" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/log" + + "github.com/scroll-tech/da-codec/encoding/zstd" ) func TestMain(m *testing.M) { From 5135ec3501d17e4944e32ac498a7f19e2e43bc25 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 01:16:44 +0800 Subject: [PATCH 102/126] add StandardTestCases --- encoding/codecv1_test.go | 102 +++++++++++++ encoding/codecv2.go | 8 +- encoding/codecv2_test.go | 149 +++++++++++++++++++ encoding/codecv3_test.go | 151 ++++++++++++++++++++ encoding/codecv4.go | 18 +-- encoding/codecv4_test.go | 299 +++++++++++++++++++++++++++++++++++++++ encoding/da.go | 22 ++- 7 files changed, 730 insertions(+), 19 deletions(-) diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index c29e57e..b859bc6 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -2,12 +2,16 @@ package encoding import ( "encoding/hex" + "math" "strings" "testing" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestCodecV1BlockEncode(t *testing.T) { @@ -885,3 +889,101 @@ func TestCodecV1DecodeDAChunksRawTx(t *testing.T) { assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) } + +func TestCodecV1BatchStandardTestCases(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + assert.NoError(t, err) + + // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. + nRowsData := maxEffectiveBlobBytes - (int(codecv1.MaxNumChunksPerBatch())*4 + 2) + + repeat := func(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) + } + + for _, tc := range []struct { + chunks [][]string + expectedz string + expectedy string + expectedBlobVersionedHash string + expectedBatchHash string + }{ + // single empty chunk + {chunks: [][]string{{}}, expectedz: "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", expectedy: "304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd08", expectedBlobVersionedHash: "01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6", expectedBatchHash: "7d09040c00525af4aff851ba50556d4bc25a28a2bee04d4d02837fdc31da8e5a"}, + // single non-empty chunk + {chunks: [][]string{{"0x010203"}}, expectedz: "1c1d4bd5153f877d799853080aba243f2c186dd6d6064eaefacfe715c92b6354", expectedy: "24e80ed99526b0d15ba46f7ec682f517576ddae68d5131e5d351f8bae06ea7d3", expectedBlobVersionedHash: "01c57cf97209ce41aaca340099e8eb80984bc54a4f780013cfb9f81bc0641d46", expectedBatchHash: "948fe7a7665c79b975d0f73d47a60150f5f2637fe229f46a0bbacdb282c4359b"}, + // multiple empty chunks + {chunks: [][]string{{}, {}}, expectedz: "152c9ccfcc2884f9891f7adce2de110cf9f85bfd0e21f0933ae0636390a84d41", expectedy: "5f6f532676e25b49e2eae77513fbeca173a300b434c0a5e24fa554b68e27d582", expectedBlobVersionedHash: "01f2d2978e268e82902df85e773ba3ce0bfbd47067595d876378f062a76c9645", expectedBatchHash: "73a883480442f4ad822d7d8a5660f91ec1b30c2837594175861453ed2aa20c43"}, + // multiple non-empty chunks + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "62100f5381179ea7db7aa8fdedb0f7fc7b82730b75432d50ab41f80aeebe45a3", expectedy: "5b1f6e7a54907ddc06871853cf1f5d53bf2de0df7b61d0df84bc2c3fb80320cd", expectedBlobVersionedHash: "0103e951d9f758f8c1d073e4dc80a1813c4e0f12454e59d5cf9459baad57a120", expectedBatchHash: "ec0f1219d073a3a06deabf28bbf1dd94c483334a6763a8ae171debfd70c28dba"}, + // empty chunk followed by non-empty chunk + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "2d94d241c4a2a8d8f02845ca40cfba344f3b42384af2045a75c82e725a184232", expectedy: "302416c177e9e7fe40c3bc4315066c117e27d246b0a33ef68cdda6dd333c485c", expectedBlobVersionedHash: "0197b715c8f9f8c8e295fdd390ee9a629118432f72067398695d9df3c840b7b0", expectedBatchHash: "e84c347c69741d51c26adcffa0e0d23a2621989f5442b6f91a5b2ef409844b4d"}, + // non-empty chunk followed by empty chunk + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "7227567e3b1dbacb48a32bb85e4e99f73e4bd5620ea8cd4f5ac00a364c86af9c", expectedy: "2eb3dfd28362f35f562f779e749a555d2f1f87ddc716e95f04133d25189a391c", expectedBlobVersionedHash: "01997280b92d3a2b0e6616a57f931e2876c602cf6401617390ad9f6c044c7f9a", expectedBatchHash: "91c1bbb91dda9c5b2a4b11df8433dc62e2690088cb9210a56fe1efa4132fc122"}, + // max number of chunks all empty + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "1128ac3e22ced6af85be4335e0d03a266946a7cade8047e7fc59d6c8be642321", expectedy: "2d9b16422ce17f328fd00c99349768f0cb0c8648115eb3bd9b7864617ba88059", expectedBlobVersionedHash: "011747bb3b64aaa020e628df02b5dde642b8eefe2acd3bd8768d264b0b230fe2", expectedBatchHash: "7ed0e502c8be58184a6fad9ff83efb2aa4d42632daf00a6527718e041098ece9"}, + // max number of chunks all non-empty + {chunks: [][]string{{"0x0a"}, {"0x0a0b"}, {"0x0a0b0c"}, {"0x0a0b0c0d"}, {"0x0a0b0c0d0e"}, {"0x0a0b0c0d0e0f"}, {"0x0a0b0c0d0e0f10"}, {"0x0a0b0c0d0e0f1011"}, {"0x0a0b0c0d0e0f101112"}, {"0x0a0b0c0d0e0f10111213"}, {"0x0a0b0c0d0e0f1011121314"}, {"0x0a0b0c0d0e0f101112131415"}, {"0x0a0b0c0d0e0f10111213141516"}, {"0x0a0b0c0d0e0f1011121314151617"}, {"0x0a0b0c0d0e0f101112131415161718"}}, expectedz: "1a4025a3d74e70b511007dd55a2e252478c48054c6383285e8a176f33d99853b", expectedy: "12071ac2571c11220432a27b8be549392892e9baf4c654748ca206def3843940", expectedBlobVersionedHash: "0154c5ae7e60a6cf71c4e1694a4c511d04f9a64e0ebf491fa61227419b9bad15", expectedBatchHash: "eefc56cb4dd1c43415717120fd3d58372e6b052e1533add4f379b58f5acce242"}, + // single chunk blob full + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "72714cc4a0ca75cee2d543b1f958e3d3dd59ac7df0d9d5617d8117b65295a5f2", expectedy: "4ebb690362bcbc42321309c210c99f2ebdb53b3fcf7cf3b17b78f6cfd1203ed3", expectedBlobVersionedHash: "0179bda640290da308c6b4860463db2abb5da3573f188d9db86109644b8888e6", expectedBatchHash: "098cf07b17fd5c684a6e5c47b45c3ead1df8565d785fc508f7e83b1ac43515dc"}, + // multiple chunks blob full + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "70eb5b4db503e59413238eef451871c5d12f2bb96c8b96ceca012f4ca0114727", expectedy: "568d0aaf280ec83f9c81ed2d80ecbdf199bd72dafb8a350007d37ea82997e455", expectedBlobVersionedHash: "01160d9c7e52ada63878060067f415c0d458143055099d27373842e1fe465542", expectedBatchHash: "c9a7112e924a4799d748b5ac5999fd4c84d6562c2e0cd49c01dbab748de96397"}, + // max number of chunks only last one non-empty not full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "03db68ae16ee88489d52db19e6111b25630c5f23ad7cd14530aacf0cd231d476", expectedy: "24527d0b0e93b3dec0060c7b128975a8088b3104d3a297dc807ab43862a77a1a", expectedBlobVersionedHash: "0102b93d4c8ea59ffdd488756a2696702071ac1d90d3140089d737e3babd0213", expectedBatchHash: "8fc9281433f730d9c3efd1fb73f710bdea62f119d61e6b99b34360bda8312d33"}, + // max number of chunks only last one non-empty full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "677670193f73db499cede572bcb55677f0d2f13d690f9a820bd00bf584c3c241", expectedy: "1d85677f172dbdf4ad3094a17deeb1df4d7d2b7f35ecea44aebffa757811a268", expectedBlobVersionedHash: "014e56a635bc97d4fab7b8e33da88453f8050efafe00934210506d3c3b8e63ad", expectedBatchHash: "0109a28c89d11af8bce800c5fd43cbb004e201c17391bc9336f98feef21eb2aa"}, + // max number of chunks but last is empty + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "22935042dfe7df771b02c1f5cababfe508869e8f6339dabe25a8a32e37728bb0", expectedy: "48ca66fb5a094401728c3a6a517ffbd72c4d4d9a8c907e2d2f1320812f4d856f", expectedBlobVersionedHash: "017c817651831f769e01728789b6ee29ccd219d4bfa2830c1258b053715592fc", expectedBatchHash: "7a1095c87f9cef674f7049f7b43b0452510dcff44035d8e9bff30adf53afd1f8"}, + } { + chunks := []*Chunk{} + + for _, c := range tc.chunks { + block := &Block{Transactions: []*types.TransactionData{}} + + for _, data := range c { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + + chunk := &Chunk{Blocks: []*Block{block}} + chunks = append(chunks, chunk) + } + + blob, blobVersionedHash, z, err := codecv1.(*DACodecV1).constructBlobPayload(chunks, int(codecv1.MaxNumChunksPerBatch()), true /* use mock */) + require.NoError(t, err) + actualZ := hex.EncodeToString(z[:]) + assert.Equal(t, tc.expectedz, actualZ) + assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + + _, y, err := kzg4844.ComputeProof(blob, *z) + require.NoError(t, err) + actualY := hex.EncodeToString(y[:]) + assert.Equal(t, tc.expectedy, actualY) + + // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) + dataBytes := make([]byte, 32*len(chunks)) + for i := range chunks { + copy(dataBytes[32*i:32*i+32], []byte{math.MaxUint8 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + } + dataHash := crypto.Keccak256Hash(dataBytes) + + batch := daBatchV1{ + daBatchV0: daBatchV0{ + version: uint8(CodecV3), + batchIndex: 6789, + l1MessagePopped: 101, + totalL1MessagePopped: 10101, + dataHash: dataHash, + parentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + }, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } + assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) + } +} diff --git a/encoding/codecv2.go b/encoding/codecv2.go index f5821ca..b4a86b0 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -158,7 +158,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i } // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if !useMockTxData && len(batchBytes) > 131072 { + if !useMockTxData && len(batchBytes) > minCompressedDataCheckSize { // Check compressed data compatibility. if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) @@ -166,7 +166,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i } } - if len(blobBytes) > 126976 { + if len(blobBytes) > maxEffectiveBlobBytes { log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") } @@ -265,7 +265,7 @@ func (d *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error return false, err } // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if len(batchBytes) <= 131072 { + if len(batchBytes) <= minCompressedDataCheckSize { return true, nil } if err = CheckCompressedDataCompatibility(blobBytes); err != nil { @@ -287,7 +287,7 @@ func (d *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error return false, err } // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if len(batchBytes) <= 131072 { + if len(batchBytes) <= minCompressedDataCheckSize { return true, nil } if err = CheckCompressedDataCompatibility(blobBytes); err != nil { diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index c386d0d..063d168 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -2,12 +2,16 @@ package encoding import ( "encoding/hex" + "math" "strings" "testing" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestCodecV2BlockEncode(t *testing.T) { @@ -879,3 +883,148 @@ func TestCodecV2DecodeDAChunksRawTx(t *testing.T) { assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) } + +func TestCodecV2BatchStandardTestCases(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + assert.NoError(t, err) + + // Taking into consideration compression, we allow up to 5x of max blob bytes. + // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. + nRowsData := 5*maxEffectiveBlobBytes - (int(codecv2.MaxNumChunksPerBatch())*4 + 2) + + repeat := func(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) + } + + for _, tc := range []struct { + chunks [][]string + expectedz string + expectedy string + expectedBlobVersionedHash string + expectedBatchHash string + }{ + // single empty chunk + {chunks: [][]string{{}}, expectedz: "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", expectedy: "132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", expectedBlobVersionedHash: "015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", expectedBatchHash: "48c1e31334d6d6dff9f5b38f703c147dc5f0893882fbdcb22ef5fcef0f25f2ff"}, + // single non-empty chunk + {chunks: [][]string{{"0x010203"}}, expectedz: "13c58784e6eeed40130ab43baa13a1f2d5a6d895c66f554456e00c480568a42d", expectedy: "248ace7f7f0fb3718b80b8cf04be560b97d083a3dbbd79d169e0fe9c80c9668c", expectedBlobVersionedHash: "0161d97a72d600ed5aa264bc8fc409a87e60b768ffb52b9c1106858c2ae57f04", expectedBatchHash: "8918c151720f8497e29ed68ab94a43a32689dcd96784784b81c0fef36b751142"}, + // multiple empty chunks + {chunks: [][]string{{}, {}}, expectedz: "102e7bf1335a8a86e8ecac2283843eff536555e464bb6ba01a29ff1ca8d4b8cb", expectedy: "033a0272284ae81eb693588e731fc19ad24c44a332405e471966335b37f1a2c2", expectedBlobVersionedHash: "01c0a83d1c0ee2ee06f030ca2f0ec36827b3e9682cbc8c00a27b0bdd3530488b", expectedBatchHash: "6a3e8f32ea6f3025679a912992a7fa813849a7e1f46c8d413fd14d188d497bdb"}, + // multiple non-empty chunks + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "0ac462d144c9aa1a7538aebd9087e34e9f9590e59b58ffa08f03cd9e43382ed0", expectedy: "6ac7fc7686c900c9e27fd0ca69736cf77016c8b9e8fd3ebab0ee6be1d6c30c93", expectedBlobVersionedHash: "0104efe2cfccfb25e5ae40250af541bd217cae4c9bc14daaf0360a0a36aa2d03", expectedBatchHash: "cfbe74dd07beed8dd9ee2be06ebd869e000148f1886ad6134e6609a3e09520e6"}, + // empty chunk followed by non-empty chunk + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "1d81a4d2c78fbbf379562a998edde942b2019ec88ede9150a4c2a52a4e271ace", expectedy: "656603441f898b3dd64e0963fea53bfd6a445cb4f838c5caf181186cf45dd7ec", expectedBlobVersionedHash: "0131b881bdc8d8b70a62d9a6f249dc7a48f37428ac10809299489e5e60911f80", expectedBatchHash: "f042d7da2c8af0d9edadd2997ddfc28af646afc513489ac0ab8881c9b18e71bc"}, + // non-empty chunk followed by empty chunk + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "275116a8ff16b17b90d7287fb567e766d1f79f54f8ac3c6d80e2de59fd34f115", expectedy: "5fea2c1bbed12ccdcf9edef780330ee1d13439de4d3b8f4968f2bda9e4fb8b1f", expectedBlobVersionedHash: "01c44c7e70df601a245e714be4f0aa7c918a0056bff379c20a7128e5926db664", expectedBatchHash: "f9c741682ed579af9c9f21d1c90af830276731ae699ee263fa1278076839e015"}, + // max number of chunks all empty + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4583c59de31759dbc54109bb2d5825a36655e71db62225fc5d7d758191e59a6b", expectedy: "0b119ffd6c88037d62e1bee05f609d801c6cc6e724214555b97affe3b852819a", expectedBlobVersionedHash: "013ac7e2db84a2f26ee2cba3a5cabbfffd1f7c053e7ea17add4f84a82cf8285a", expectedBatchHash: "d0846fec4a9158499553e4824cf0ff3fdb01fab93494883d4f8911719ff163ee"}, + // max number of chunks all non-empty + {chunks: [][]string{ + {"0x0a"}, + {"0x0a0b"}, + {"0x0a0b0c"}, + {"0x0a0b0c0d"}, + {"0x0a0b0c0d0e"}, + {"0x0a0b0c0d0e0f"}, + {"0x0a0b0c0d0e0f10"}, + {"0x0a0b0c0d0e0f1011"}, + {"0x0a0b0c0d0e0f101112"}, + {"0x0a0b0c0d0e0f10111213"}, + {"0x0a0b0c0d0e0f1011121314"}, + {"0x0a0b0c0d0e0f101112131415"}, + {"0x0a0b0c0d0e0f10111213141516"}, + {"0x0a0b0c0d0e0f1011121314151617"}, + {"0x0a0b0c0d0e0f101112131415161718"}, + {"0x0a0b0c0d0e0f10111213141516171819"}, + {"0x0a0b0c0d0e0f101112131415161718191a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, + }, expectedz: "08454da7c353fa9d7c4d044cca3972dab6aa38e583276848b1aec904f5592837", expectedy: "36cbc815c329e864a018cadf25070d62184d570ef031f5b5c8a5385e65babe9c", expectedBlobVersionedHash: "0198009a5e0941a6acb7dcd95a5016d7f25ca92d66fb300cf6f9918102ef66c0", expectedBatchHash: "f20c05457800dc52d87858d72a2b54c223f401b150af00b47994964a348ac96b"}, + // single chunk blob full + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "63bebf986e2f0fc8bf5f7067108ea4a2b35a5927296e17d5c0bbc5ec04d8dce4", expectedy: "013b762f02e95a62f08977b1a43a017cd84f785b52ebf8ef25e9ebba6c9b76cb", expectedBlobVersionedHash: "01f68a6b3c0ba2ea0406f80f9c88b9905d9b3cc5b2d8ef12923b20fb24b81855", expectedBatchHash: "9effb4102f20c8634655cee9f109215834e7828beadaebe167595f1d1b871689"}, + // multiple chunks blob full + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "465e095b082136f20ca975c10eafbb3bf2b71724798da87bd62d3f8795c615dc", expectedy: "6f2ff37b255e0da8b5678a9b1157fdc8a1213c17bd248efd50a4c1540c26295c", expectedBlobVersionedHash: "01da6bdac6237fcba7742cf48868467bf95a5e7f33d16c172b36852e506b46b6", expectedBatchHash: "9631c4dcdbd404272b4682db4592a78e7cd8bf81da34160cc6ff0e9eb4703f70"}, + // max number of chunks only last one non-empty not full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "1ca17fdb4dea8396d7e2f10ef7b2a587750517df70ec0ce0d853e61310aec0f3", expectedy: "1b686f2eb8d7e3e2325d9101dd799f5e13af8482b402661325545646a9c96ec0", expectedBlobVersionedHash: "019d11fab4509a83623a64b466a00344552fd44421e78726cda537d06c8425d3", expectedBatchHash: "4b369fcaef4a6fd5dbd6bd89e3983f2ff72abf0a19fdabf207c314369500d8e9"}, + // max number of chunks only last one non-empty full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "29c684b13d22cb43d81b9b449c281c15126fdc73512606de81c2d3fc9c7793b1", expectedy: "574418d83d77f6096934c2c4281edf61d48925a268411df0e0c818c6d43156d1", expectedBlobVersionedHash: "01f8da934ada220153abee70e85604ef8fbbf98c203b5eae14d23be088a41f45", expectedBatchHash: "5b116a800222102b4cca07a377de69355c33eb3f5262a3b6b1eab37ee680c04a"}, + // max number of chunks but last is empty + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "16d2883b0797d3420fabf4591f9dbe9f850ce600ce6133c98c9d291d8b3ce0a9", expectedy: "5bdc1ca8f09efa9c544d2b03d565fec500d5347acd5b3fd4d88e881f9459d83a", expectedBlobVersionedHash: "01f51532d6bb0afe8a0a61351888f322cba40dc664408a3201eb761aaba66671", expectedBatchHash: "27af1cbf60123f73bef96464839578875a8bebf39edc786914aa7a0c3a4e3a44"}, + } { + chunks := []*Chunk{} + + for _, c := range tc.chunks { + block := &Block{Transactions: []*types.TransactionData{}} + + for _, data := range c { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + + chunk := &Chunk{Blocks: []*Block{block}} + chunks = append(chunks, chunk) + } + + blob, blobVersionedHash, z, _, err := codecv2.(*DACodecV2).constructBlobPayload(chunks, int(codecv2.MaxNumChunksPerBatch()), true /* use mock */) + require.NoError(t, err) + actualZ := hex.EncodeToString(z[:]) + assert.Equal(t, tc.expectedz, actualZ) + assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + + _, y, err := kzg4844.ComputeProof(blob, *z) + require.NoError(t, err) + actualY := hex.EncodeToString(y[:]) + assert.Equal(t, tc.expectedy, actualY) + + // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) + dataBytes := make([]byte, 32*len(chunks)) + for i := range chunks { + copy(dataBytes[32*i:32*i+32], []byte{math.MaxUint8 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + } + dataHash := crypto.Keccak256Hash(dataBytes) + + batch := daBatchV1{ + daBatchV0: daBatchV0{ + version: uint8(CodecV2), + batchIndex: 6789, + l1MessagePopped: 101, + totalL1MessagePopped: 10101, + dataHash: dataHash, + parentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + }, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } + assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) + } +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 4703f2e..552a167 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -3,11 +3,14 @@ package encoding import ( "encoding/hex" "encoding/json" + "math" "strings" "testing" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -1020,3 +1023,151 @@ func TestCodecV3DecodeDAChunksRawTx(t *testing.T) { assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) } + +func TestCodecV3BatchStandardTestCases(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + assert.NoError(t, err) + + // Taking into consideration compression, we allow up to 5x of max blob bytes. + // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. + nRowsData := 5*maxEffectiveBlobBytes - (int(codecv3.MaxNumChunksPerBatch())*4 + 2) + + repeat := func(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) + } + + for _, tc := range []struct { + chunks [][]string + expectedz string + expectedy string + expectedBlobVersionedHash string + expectedBatchHash string + }{ + // single empty chunk + {chunks: [][]string{{}}, expectedz: "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", expectedy: "132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", expectedBlobVersionedHash: "015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", expectedBatchHash: "edde6b1becf302856884f0b9da5879d58eeb822ddab14a06bacd8de9276dbc79"}, + // single non-empty chunk + {chunks: [][]string{{"0x010203"}}, expectedz: "13c58784e6eeed40130ab43baa13a1f2d5a6d895c66f554456e00c480568a42d", expectedy: "248ace7f7f0fb3718b80b8cf04be560b97d083a3dbbd79d169e0fe9c80c9668c", expectedBlobVersionedHash: "0161d97a72d600ed5aa264bc8fc409a87e60b768ffb52b9c1106858c2ae57f04", expectedBatchHash: "4c30ec3d03ecf70c479e802640a185cadf971e61acf68dac149ac73bdc645195"}, + // multiple empty chunks + {chunks: [][]string{{}, {}}, expectedz: "102e7bf1335a8a86e8ecac2283843eff536555e464bb6ba01a29ff1ca8d4b8cb", expectedy: "033a0272284ae81eb693588e731fc19ad24c44a332405e471966335b37f1a2c2", expectedBlobVersionedHash: "01c0a83d1c0ee2ee06f030ca2f0ec36827b3e9682cbc8c00a27b0bdd3530488b", expectedBatchHash: "31fd0237208587df3ddbea413673b479e2daa84fd1143a519940267c37257b1a"}, + // multiple non-empty chunks + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "0ac462d144c9aa1a7538aebd9087e34e9f9590e59b58ffa08f03cd9e43382ed0", expectedy: "6ac7fc7686c900c9e27fd0ca69736cf77016c8b9e8fd3ebab0ee6be1d6c30c93", expectedBlobVersionedHash: "0104efe2cfccfb25e5ae40250af541bd217cae4c9bc14daaf0360a0a36aa2d03", expectedBatchHash: "0e0e8fd8b4f8ceb0215a29cc8b95750c0d1969706573af8872f397747809a479"}, + // empty chunk followed by non-empty chunk + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "1d81a4d2c78fbbf379562a998edde942b2019ec88ede9150a4c2a52a4e271ace", expectedy: "656603441f898b3dd64e0963fea53bfd6a445cb4f838c5caf181186cf45dd7ec", expectedBlobVersionedHash: "0131b881bdc8d8b70a62d9a6f249dc7a48f37428ac10809299489e5e60911f80", expectedBatchHash: "d6b97dde29d4b8afb1a036ee54757af4087c939cb96cf17c2720e9f59eff19da"}, + // non-empty chunk followed by empty chunk + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "275116a8ff16b17b90d7287fb567e766d1f79f54f8ac3c6d80e2de59fd34f115", expectedy: "5fea2c1bbed12ccdcf9edef780330ee1d13439de4d3b8f4968f2bda9e4fb8b1f", expectedBlobVersionedHash: "01c44c7e70df601a245e714be4f0aa7c918a0056bff379c20a7128e5926db664", expectedBatchHash: "3d56e12359c8b565f9cbe1c8f81e848be4635d9df84bc6ef0eb9986a15e08c20"}, + // max number of chunks all empty + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4583c59de31759dbc54109bb2d5825a36655e71db62225fc5d7d758191e59a6b", expectedy: "0b119ffd6c88037d62e1bee05f609d801c6cc6e724214555b97affe3b852819a", expectedBlobVersionedHash: "013ac7e2db84a2f26ee2cba3a5cabbfffd1f7c053e7ea17add4f84a82cf8285a", expectedBatchHash: "2e8078e277221a0d0e235ef825eef02653677bd50e259aeed64af5b95477645c"}, + // max number of chunks all non-empty + {chunks: [][]string{ + {"0x0a"}, + {"0x0a0b"}, + {"0x0a0b0c"}, + {"0x0a0b0c0d"}, + {"0x0a0b0c0d0e"}, + {"0x0a0b0c0d0e0f"}, + {"0x0a0b0c0d0e0f10"}, + {"0x0a0b0c0d0e0f1011"}, + {"0x0a0b0c0d0e0f101112"}, + {"0x0a0b0c0d0e0f10111213"}, + {"0x0a0b0c0d0e0f1011121314"}, + {"0x0a0b0c0d0e0f101112131415"}, + {"0x0a0b0c0d0e0f10111213141516"}, + {"0x0a0b0c0d0e0f1011121314151617"}, + {"0x0a0b0c0d0e0f101112131415161718"}, + {"0x0a0b0c0d0e0f10111213141516171819"}, + {"0x0a0b0c0d0e0f101112131415161718191a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, + }, expectedz: "08454da7c353fa9d7c4d044cca3972dab6aa38e583276848b1aec904f5592837", expectedy: "36cbc815c329e864a018cadf25070d62184d570ef031f5b5c8a5385e65babe9c", expectedBlobVersionedHash: "0198009a5e0941a6acb7dcd95a5016d7f25ca92d66fb300cf6f9918102ef66c0", expectedBatchHash: "e366eeacd45fbc2f43756f66d0a8f82f7f390a9aa7795df82e7df2d724856e7e"}, + // single chunk blob full + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "63bebf986e2f0fc8bf5f7067108ea4a2b35a5927296e17d5c0bbc5ec04d8dce4", expectedy: "013b762f02e95a62f08977b1a43a017cd84f785b52ebf8ef25e9ebba6c9b76cb", expectedBlobVersionedHash: "01f68a6b3c0ba2ea0406f80f9c88b9905d9b3cc5b2d8ef12923b20fb24b81855", expectedBatchHash: "88e6df6a5e1112485995fe5957d57c90ff306343a9d8d80831b7a6c041daf728"}, + // multiple chunks blob full + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "465e095b082136f20ca975c10eafbb3bf2b71724798da87bd62d3f8795c615dc", expectedy: "6f2ff37b255e0da8b5678a9b1157fdc8a1213c17bd248efd50a4c1540c26295c", expectedBlobVersionedHash: "01da6bdac6237fcba7742cf48868467bf95a5e7f33d16c172b36852e506b46b6", expectedBatchHash: "7bd97fc7c8c7e918029e5bd85d3c9e0335117475c449d5c6dd24e5af9d55cfc6"}, + // max number of chunks only last one non-empty not full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "1ca17fdb4dea8396d7e2f10ef7b2a587750517df70ec0ce0d853e61310aec0f3", expectedy: "1b686f2eb8d7e3e2325d9101dd799f5e13af8482b402661325545646a9c96ec0", expectedBlobVersionedHash: "019d11fab4509a83623a64b466a00344552fd44421e78726cda537d06c8425d3", expectedBatchHash: "8b50a41e08000b7617de7204d8082870c8446f591fadffcb5190fdeadf47fae5"}, + // max number of chunks only last one non-empty full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "29c684b13d22cb43d81b9b449c281c15126fdc73512606de81c2d3fc9c7793b1", expectedy: "574418d83d77f6096934c2c4281edf61d48925a268411df0e0c818c6d43156d1", expectedBlobVersionedHash: "01f8da934ada220153abee70e85604ef8fbbf98c203b5eae14d23be088a41f45", expectedBatchHash: "cc0592160b2fcdb58750d29c36662b55437f4bc69ba3d45a965590f534a0228c"}, + // max number of chunks but last is empty + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "16d2883b0797d3420fabf4591f9dbe9f850ce600ce6133c98c9d291d8b3ce0a9", expectedy: "5bdc1ca8f09efa9c544d2b03d565fec500d5347acd5b3fd4d88e881f9459d83a", expectedBlobVersionedHash: "01f51532d6bb0afe8a0a61351888f322cba40dc664408a3201eb761aaba66671", expectedBatchHash: "043a40c8fbc4edb6a820ba4162f1368d157d1d59c07f969b2c584cc6a47385ca"}, + } { + chunks := []*Chunk{} + + for _, c := range tc.chunks { + block := &Block{Transactions: []*types.TransactionData{}} + + for _, data := range c { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + + chunk := &Chunk{Blocks: []*Block{block}} + chunks = append(chunks, chunk) + } + + blob, blobVersionedHash, z, _, err := codecv3.(*DACodecV3).constructBlobPayload(chunks, int(codecv3.MaxNumChunksPerBatch()), true /* use mock */) + require.NoError(t, err) + actualZ := hex.EncodeToString(z[:]) + assert.Equal(t, tc.expectedz, actualZ) + assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + + _, y, err := kzg4844.ComputeProof(blob, *z) + require.NoError(t, err) + actualY := hex.EncodeToString(y[:]) + assert.Equal(t, tc.expectedy, actualY) + + // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) + dataBytes := make([]byte, 32*len(chunks)) + for i := range chunks { + copy(dataBytes[32*i:32*i+32], []byte{math.MaxUint8 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + } + dataHash := crypto.Keccak256Hash(dataBytes) + + batch := daBatchV3{ + daBatchV0: daBatchV0{ + version: uint8(CodecV3), + batchIndex: 6789, + l1MessagePopped: 101, + totalL1MessagePopped: 10101, + dataHash: dataHash, + parentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + }, + lastBlockTimestamp: 192837, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } + batch.blobDataProof, err = batch.blobDataProofForPICircuit() + require.NoError(t, err) + assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) + } +} diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 3011336..e76bc41 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -70,13 +70,13 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - enableCompress, err := d.CheckBatchCompressedDataCompatibility(batch) + enableCompression, err := d.CheckBatchCompressedDataCompatibility(batch) if err != nil { return nil, err } // blob payload - blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, int(d.MaxNumChunksPerBatch()), enableCompress, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, int(d.MaxNumChunksPerBatch()), enableCompression, false /* no mock */) if err != nil { return nil, err } @@ -134,7 +134,7 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { } // constructBlobPayload constructs the 4844 blob payload. -func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, enableCompress bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, enableCompression bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + maxNumChunksPerBatch*4 @@ -194,7 +194,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i copy(challengePreimage[0:], hash[:]) var blobBytes []byte - if enableCompress { + if enableCompression { // blobBytes represents the compressed blob payload (batchBytes) var err error blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) @@ -213,7 +213,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i blobBytes = append([]byte{0}, batchBytes...) } - if len(blobBytes) > 126976 { + if len(blobBytes) > maxEffectiveBlobBytes { log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") } @@ -254,11 +254,11 @@ func (d *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, return 0, 0, err } var blobBytesLength uint64 - enableCompress, err := d.CheckChunkCompressedDataCompatibility(c) + enableCompression, err := d.CheckChunkCompressedDataCompatibility(c) if err != nil { return 0, 0, err } - if enableCompress { + if enableCompression { blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err @@ -277,11 +277,11 @@ func (d *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, return 0, 0, err } var blobBytesLength uint64 - enableCompress, err := d.CheckBatchCompressedDataCompatibility(b) + enableCompression, err := d.CheckBatchCompressedDataCompatibility(b) if err != nil { return 0, 0, err } - if enableCompress { + if enableCompression { blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 4576997..c3f84ef 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -3,11 +3,14 @@ package encoding import ( "encoding/hex" "encoding/json" + "math" "strings" "testing" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -1021,3 +1024,299 @@ func TestCodecV4DecodeDAChunksRawTx(t *testing.T) { assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) } + +func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + // Taking into consideration compression, we allow up to 5x of max blob bytes. + // We then ignore the metadata rows for MaxNumChunksPerBatch chunks, plus 1 byte for the compression flag. + nRowsData := 5*maxEffectiveBlobBytes - (int(codecv4.MaxNumChunksPerBatch())*4 + 2) - 1 + + repeat := func(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) + } + + for _, tc := range []struct { + chunks [][]string + expectedz string + expectedy string + expectedBlobVersionedHash string + expectedBatchHash string + }{ + // single empty chunk + {chunks: [][]string{{}}, expectedz: "1517a7f04a9f2517aaad8440792de202bd1fef70a861e12134c882ccf0c5a537", expectedy: "1ff0c5ea938308566ab022bc30d0136792084dc9adca93612ec925411915d4a9", expectedBlobVersionedHash: "015f16731c3e7864a08edae95f11db8c96e39a487427d7e58b691745d87f8a21", expectedBatchHash: "c3cfeead404a6de1ec5feaa29b6c1c1a5e6a40671c5d5e9cf1dd86fdf5a2e44a"}, + // single non-empty chunk + {chunks: [][]string{{"0x010203"}}, expectedz: "2cbd5fb174611060e72a2afcc385cea273b0f5ea8656f04f3661d757a6b00ff9", expectedy: "68d653e973d32fc5b79763d1b7de1699f37e2527830331b1a02f39d58d7070a9", expectedBlobVersionedHash: "019de38b4472451c5e8891dbb01bc2e834d660198cb9878e6b94fb55e4aaf92b", expectedBatchHash: "41e1c4a5220feb7fed5ba9e3980d138b8d5b4b06b8a46a87d796dbf5ed9265f5"}, + // multiple empty chunks + {chunks: [][]string{{}, {}}, expectedz: "0f9270fd0f21c1eef46334614c586759a2fb71ae46fef50560e92ef7ec926ccc", expectedy: "028f18fc74210d214d3e78a5f92f5c68a9d4dcc633e6e7ffb4144651a39b9dce", expectedBlobVersionedHash: "014a46e5be597971d313e300a052dc406b9f06fad394e1ba115df7da9ca5746d", expectedBatchHash: "94cac32609ae6c3d99dacf5af3650a7748b4dcf8c9779353b932a75e85bc2632"}, + // multiple non-empty chunks + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "3a199bd64627e67c320add8a5932870535c667236eda365c989f0b73176bb000", expectedy: "221d60db4912e9067df77ee3d71587ea1023ec0238c23044a3325f909fd5ceb3", expectedBlobVersionedHash: "0145df6dbf8070bb3137156fe4540c11330e84487fcac24239442859d95e925c", expectedBatchHash: "d2332749a82a3b94766493ee3826074b8af74efc98367d14fd82e1056e2abf88"}, + // empty chunk followed by non-empty chunk + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "0a421d448784eb111c2ae9a8031a7cf79e4638b300c48d0c7ff38322e25268fc", expectedy: "48ad5516b1370ac6be17a1d3220e286c9522366ec36fc66a584bbe1ee904eaf1", expectedBlobVersionedHash: "019e5c4c0bfa68324657a0d2e49075eeee2e7c928811bc9c8b2c03888d9d3a5d", expectedBatchHash: "5eac258323d1a4d166d2d116b330262440f46f1ecf07b247cc792bca4a905761"}, + // non-empty chunk followed by empty chunk + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "6aa26c5d595fa1b72c4e1aa4f06b35788060a7504137c7dd6896486819445230", expectedy: "72c082827841ab84576b49cd63bd06af07cb090626ea3e91a8e77de29b3e61dc", expectedBlobVersionedHash: "0166c93797bf7d4e5701d36bfc8bcea5270c1c4ff18d1aaa248125c87746cf3d", expectedBatchHash: "03e0bdf053fa21d37bf55ac27e7774298b95465123c353e30761e51965269a10"}, + // max number of chunks all empty + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4a04cb1860de2c0d03a78520da62a447ef2af92e36dc0b1806db501d7cf63469", expectedy: "17ca30439aed3d9a96f4336d2a416da04a0803667922c7b0765557bb0162493f", expectedBlobVersionedHash: "014b8172c9e2ef89ac8d2ff0c9991baafff3602459250f5870721ac4f05dca09", expectedBatchHash: "216add0492703b12b841ebf6d217a41d1907dd4acd54d07a870472d31d4fde0d"}, + // max number of chunks all non-empty + {chunks: [][]string{ + {"0x0a"}, + {"0x0a0b"}, + {"0x0a0b0c"}, + {"0x0a0b0c0d"}, + {"0x0a0b0c0d0e"}, + {"0x0a0b0c0d0e0f"}, + {"0x0a0b0c0d0e0f10"}, + {"0x0a0b0c0d0e0f1011"}, + {"0x0a0b0c0d0e0f101112"}, + {"0x0a0b0c0d0e0f10111213"}, + {"0x0a0b0c0d0e0f1011121314"}, + {"0x0a0b0c0d0e0f101112131415"}, + {"0x0a0b0c0d0e0f10111213141516"}, + {"0x0a0b0c0d0e0f1011121314151617"}, + {"0x0a0b0c0d0e0f101112131415161718"}, + {"0x0a0b0c0d0e0f10111213141516171819"}, + {"0x0a0b0c0d0e0f101112131415161718191a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, + }, expectedz: "53eafb50809b3473cb4f8764f7e5d598af9eaaddc45a5a6da7cddac3380e39bb", expectedy: "40751ed98861f5c2058b4062b275f94a3d505a3221f6abe8dbe1074a4f10d0f4", expectedBlobVersionedHash: "01b78b07dbe03b960cd73ea45088b231a50ce88408fa938765e971c5dc7bbb6b", expectedBatchHash: "257175785213c68b10bb94396b657892fb7ae70708bf98ce357752906a80a6f0"}, + // single chunk blob full + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "3eeece6f3835294dfb88d1355e5f49eb84267834bb54d2e207f5a4f5c74ddc85", expectedy: "0011b467f17dae185b1b8ac883d3281a16b807429e3bf3d3ecd05bfab9318aae", expectedBlobVersionedHash: "01a959b584330e8a4ab7cc5581cad98f91e53a50287b94c4067de9f9dd76395e", expectedBatchHash: "ef245e1b3a83ed12efb33fe3dd5dfa424324e257618c4e6a84c39af309c004f1"}, + // multiple chunks blob full + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "427d5c348b1a88e775c96056b5370d0c9fc208f580786a39fa2d887ad5a90737", expectedy: "23569809218eb69653d4b5ac838851c7fa24857ee263a7f932aeecfa67306d8e", expectedBlobVersionedHash: "015c46e076b4acbe92a7f60667f5da0da9d174833dbd9965f906acea718c3a5b", expectedBatchHash: "5fb400716644aecc161766470807994b3ab347a62e7f3e102998f2a225b0d812"}, + // max number of chunks only last one non-empty not full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "1e510916d60ae4a6ccc1643ea7e088c1d0c55b65bc296a2924eeed85a253b06e", expectedy: "4fb0517baf002fabe917b8050b12544917093de4ee793658e962e9b9f3e11f40", expectedBlobVersionedHash: "017df5d06fd7763790000c2c79a0c7c00b4f6cb97b83d0590b42a6f70f0c351d", expectedBatchHash: "621aba1e754a7ddba80dcfcb1e876c68b80a085df6d7510f7c9176c60a9b1f50"}, + // max number of chunks only last one non-empty full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "7092af91dfde0ae343d674690ca69bdc592e42ebf34e13ceceb192964a266451", expectedy: "2ab672a3240198e8426631b4a8d6bac38a03cb16b3d0cf70c96fd848e9cbe887", expectedBlobVersionedHash: "01b57e3d16683204cd2614188184d39fc37b219babcb3f4035e061d169b7aa24", expectedBatchHash: "11c5c63fdf42b76beff06c9265c49169b8c8d2783c65d8fd3bca5e5668c55ab6"}, + // max number of chunks but last is empty + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "4affa105e7c5d72a3223482b237296fead99e6d716b97bab0cb3447f93309692", expectedy: "4a850a8c7b84d568d8505121c92ebf284e88aa7a881290cf3939d52040871e56", expectedBlobVersionedHash: "01d3ce566fbdbcab307095bdc05de7bc2905d25f3dd4453b0f7d5f7ba8da9f08", expectedBatchHash: "ac29c2e8c26749cf99fca994cde6d33147e9e9aa60f162c964720b4937cae8fb"}, + } { + chunks := []*Chunk{} + + for _, c := range tc.chunks { + block := &Block{Transactions: []*types.TransactionData{}} + + for _, data := range c { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + + chunk := &Chunk{Blocks: []*Block{block}} + chunks = append(chunks, chunk) + } + + blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, int(codecv4.MaxNumChunksPerBatch()), true /* enable encode */, true /* use mock */) + require.NoError(t, err) + actualZ := hex.EncodeToString(z[:]) + assert.Equal(t, tc.expectedz, actualZ) + assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + + _, y, err := kzg4844.ComputeProof(blob, *z) + require.NoError(t, err) + actualY := hex.EncodeToString(y[:]) + assert.Equal(t, tc.expectedy, actualY) + + // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) + dataBytes := make([]byte, 32*len(chunks)) + for i := range chunks { + copy(dataBytes[32*i:32*i+32], []byte{math.MaxUint8 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + } + dataHash := crypto.Keccak256Hash(dataBytes) + + batch := daBatchV3{ + daBatchV0: daBatchV0{ + version: uint8(CodecV4), + batchIndex: 6789, + l1MessagePopped: 101, + totalL1MessagePopped: 10101, + dataHash: dataHash, + parentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + }, + lastBlockTimestamp: 192837, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } + batch.blobDataProof, err = batch.blobDataProofForPICircuit() + require.NoError(t, err) + assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) + } +} + +func TestCodecV4BatchStandardTestCasesDisableCompression(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + assert.NoError(t, err) + + // Taking into consideration disabling compression, we allow up to max effective blob bytes. + // We then ignore the metadata rows for MaxNumChunksPerBatch chunks, plus 1 byte for the compression flag. + nRowsData := maxEffectiveBlobBytes - (int(codecv4.MaxNumChunksPerBatch())*4 + 2) - 1 + + repeat := func(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) + } + + for _, tc := range []struct { + chunks [][]string + expectedz string + expectedy string + expectedBlobVersionedHash string + expectedBatchHash string + }{ + // single empty chunk + {chunks: [][]string{{}}, expectedz: "04e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb071", expectedy: "5885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", expectedBlobVersionedHash: "016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a", expectedBatchHash: "7c67a67db562e51c9f86f0423275e470e85a214c477b5e01b03ad9bf04390bad"}, + // single non-empty chunk + {chunks: [][]string{{"0x010203"}}, expectedz: "5f4d24694355a9e3718495c43b24652b0151053f082262fa6e26073c42fd9818", expectedy: "1b69184f2a976099671c3ccffff7a2ea83af24dd578b38956d96d2ac8b8ed74d", expectedBlobVersionedHash: "019d0e2b1297544ce7675246005b5b8db84da926a4ae98001c8272b1e638d3ef", expectedBatchHash: "00d403466e836405efe3041818bf874d4200484f521bb2b684dd7450e7cecbc8"}, + // multiple empty chunks + {chunks: [][]string{{}, {}}, expectedz: "14160c76e0d43a3cf37faa4c24f215b9c3349d5709b84332da80ca0667ece780", expectedy: "6407aa706069f09c7b6481ea00a489f74e96673a39e197c6f34b30f2d1f9fe23", expectedBlobVersionedHash: "0190689489894e430d08513202be679dcce47e3ae77bac13e1750a99d15b9a1c", expectedBatchHash: "b5ee4048b5f05dbdecc7a49f1698a0e911c64224ebaf5f538547973223ac1cd1"}, + // multiple non-empty chunks + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "15ac8e175330a67d2bd8018a486ee1fbbcead23efd4f2e57cd94312cfb7830b1", expectedy: "12593c94d52eaed8be4b79f62397e86b3b75c2af6197533e5a917676e551ce26", expectedBlobVersionedHash: "01972ce3c3b894e9c381f2eed5395809eb7a762eb0c28b4beb73ac3c73ebd3f8", expectedBatchHash: "ae2893806a3dd7449c5bc10c47500f5df96e5cffdffe083171cb7ee908411e28"}, + // empty chunk followed by non-empty chunk + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "49ebeb74372d05b335f05d0e48f3155955c27ec9cac92a03a9d85050e24efdd6", expectedy: "7088f4810a4d61bcadcdf2debff998027eb10caa70474db18a8228ef4edc6cd7", expectedBlobVersionedHash: "015ea2df6fc4582fd704ae55157c1311f2d680240c8b8805e3435856a15da91b", expectedBatchHash: "cf4bee00c5e044bc6c9c168a3245f8edfcdeac602d63b2e75b45faa7b95d8c16"}, + // non-empty chunk followed by empty chunk + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "2374a8bcd2fcbfae4cc43a5e21a0c69cd206071e46db2c8a3c9bb7e9b8c60120", expectedy: "51b51d261d897e81e94498493b70ec425320002d9390be69b63c87e22871d5bf", expectedBlobVersionedHash: "01600a0cb0fb308f1202172f88764bafa9deddab52331a38e767267b6785d2a3", expectedBatchHash: "53cc0ff17ca71e1711f6b261537fc8da28a5d289325be33d5286920417fe9a6e"}, + // max number of chunks all empty + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "6908503d26b56b1eb9c94d25e8e5d6e8a14e48d3ac38b063d2bc20c25a361fb5", expectedy: "22d016c0d7ef4d74e371522a9da62a43bcf2dc69be21e4133d35bf8e6fe44f68", expectedBlobVersionedHash: "01baf85d7d36b7d7df4c684b78fa5d3f94dd893f92c8c4cc8ee26a67b2fce588", expectedBatchHash: "7585f286302ba26219b1229da0fd1f557f465fb244bd1839eef95df1d75f1457"}, + // max number of chunks all non-empty + {chunks: [][]string{ + {"0x0a"}, + {"0x0a0b"}, + {"0x0a0b0c"}, + {"0x0a0b0c0d"}, + {"0x0a0b0c0d0e"}, + {"0x0a0b0c0d0e0f"}, + {"0x0a0b0c0d0e0f10"}, + {"0x0a0b0c0d0e0f1011"}, + {"0x0a0b0c0d0e0f101112"}, + {"0x0a0b0c0d0e0f10111213"}, + {"0x0a0b0c0d0e0f1011121314"}, + {"0x0a0b0c0d0e0f101112131415"}, + {"0x0a0b0c0d0e0f10111213141516"}, + {"0x0a0b0c0d0e0f1011121314151617"}, + {"0x0a0b0c0d0e0f101112131415161718"}, + {"0x0a0b0c0d0e0f10111213141516171819"}, + {"0x0a0b0c0d0e0f101112131415161718191a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, + }, expectedz: "5fcba58abcc9a0ae4a3780a2a621e57e8f8c5d323134aa9623579e698e4d18b1", expectedy: "69570d3c97e9573b5529b213055b814d5e4b7dda2bb2c3a7d06456c157ab338d", expectedBlobVersionedHash: "018cd2721e76c37374e450382e2e53faa24393cfbcbbe134e1756392c8f1a4fc", expectedBatchHash: "52948b79f4457473836b44ea9bbb2c6fc61b5937fc881b95b2baa78af0e0623b"}, + // single chunk blob full + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "53dde3d5fe1a53f364a8a865e746d3c7ca7fadadbdb816c30b49958057f1e9d9", expectedy: "3c1f69a7180f98a8a39f26189ee73fca4fbc41ca91a5ae02b521625bd67628e7", expectedBlobVersionedHash: "01d9acf02b1ef5213e0bd530e1cf99d2a19f622318bf3d97c7ec693aa3a7fdb1", expectedBatchHash: "b9411a190cc9db47fd31c009efb7b2275c235f511780f0ed6874242cb2eb7b72"}, + // multiple chunks blob full + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "1843d3229313afb023d210a0be73f64fba2fe20b7ae14b2e1df37ebe32f55afa", expectedy: "29db4ab0e596593fad50784a3a6f802ba1d9daf760c09f64bdc3d1899b247d97", expectedBlobVersionedHash: "01e337f571c6079bb6c89dab463ff3b6b2b5139fbd4f5446996fea8c0df94c65", expectedBatchHash: "56ce765d11a10b89fb412c293756299fd803485aca595c6de8a35c790486f62c"}, + // max number of chunks only last one non-empty not full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "3df579b9d11368e712b9b23318e8ea2dfcc5d5a647b16fb8254d017b8804f4b1", expectedy: "4da6e30ac69fb2d65de9b9306de0fa15a2cee87aee245e831f313366c0809b46", expectedBlobVersionedHash: "01641976b8a50f5aa3d277f250904caae681a4e090e867c6abdbfe03e216003a", expectedBatchHash: "5160fc712e9dbaa52396b7662f2e393533a5b25457e5ca9475bc8fd27f24d78a"}, + // max number of chunks only last one non-empty full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "47ca3393ebaef699800bd666ff119c1978e938e04d22c9a024a1b17f523281f9", expectedy: "380704fe5da08d69a94c8af57f17153076f6eb20d5e69c60b343fb66c6266101", expectedBlobVersionedHash: "014aac5dbd6f5456f68635c6674caa374faa0dbe012c5800e0364749485bf1bf", expectedBatchHash: "c674d48d3a9146049b1ea2993d5cc070dd76617fa550234563591c366654d6c6"}, + // max number of chunks but last is empty + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "501e762800ca76490b61114d8a84a12f1f72fce71252f7c294a5f5b4190da6b1", expectedy: "524e879ce867b79cbeffd8aa5241731f5562addfc246dda20bb857eb55158399", expectedBlobVersionedHash: "01504b1eb6894cc96a8cac8f02fba838c086171cbb879ccd9cdeb44f9d4237f5", expectedBatchHash: "59a97a5d8e4206bb283b524b2d48a707c8869c87dea6563dd99dcb367bed6412"}, + } { + chunks := []*Chunk{} + + for _, c := range tc.chunks { + block := &Block{Transactions: []*types.TransactionData{}} + + for _, data := range c { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + + chunk := &Chunk{Blocks: []*Block{block}} + chunks = append(chunks, chunk) + } + + blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, int(codecv4.MaxNumChunksPerBatch()), false /* disable encode */, true /* use mock */) + require.NoError(t, err) + actualZ := hex.EncodeToString(z[:]) + assert.Equal(t, tc.expectedz, actualZ) + assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + + _, y, err := kzg4844.ComputeProof(blob, *z) + require.NoError(t, err) + actualY := hex.EncodeToString(y[:]) + assert.Equal(t, tc.expectedy, actualY) + + // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) + dataBytes := make([]byte, 32*len(chunks)) + for i := range chunks { + copy(dataBytes[32*i:32*i+32], []byte{math.MaxUint8 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + } + dataHash := crypto.Keccak256Hash(dataBytes) + + batch := daBatchV3{ + daBatchV0: daBatchV0{ + version: uint8(CodecV4), + batchIndex: 6789, + l1MessagePopped: 101, + totalL1MessagePopped: 10101, + dataHash: dataHash, + parentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + }, + lastBlockTimestamp: 192837, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } + batch.blobDataProof, err = batch.blobDataProofForPICircuit() + require.NoError(t, err) + assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) + } +} diff --git a/encoding/da.go b/encoding/da.go index b12d8fd..0a732c9 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -27,6 +27,17 @@ const blockContextByteSize = 60 // txLenByteSize is the size of the transaction length in bytes. const txLenByteSize = 4 +// maxBlobBytes is the maximum number of bytes that can be stored in a blob. +const maxBlobBytes = 131072 + +// maxEffectiveBlobBytes is the maximum number of bytes that can be stored in a blob. +// We can only utilize 31/32 of a blob. +const maxEffectiveBlobBytes = maxBlobBytes / 32 * 31 + +// minCompressedDataCheckSize is the minimum size of compressed data to check compatibility. +// only used in codecv2 and codecv3. +const minCompressedDataCheckSize = 131072 + // Block represents an L2 block. type Block struct { Header *types.Header @@ -324,9 +335,8 @@ func CheckCompressedDataCompatibility(data []byte) error { // makeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. func makeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { - // blob contains 131072 bytes but we can only utilize 31/32 of these - if len(blobBytes) > 126976 { - return nil, fmt.Errorf("oversized batch payload, blob bytes length: %v, max length: %v", len(blobBytes), 126976) + if len(blobBytes) > maxEffectiveBlobBytes { + return nil, fmt.Errorf("oversized batch payload, blob bytes length: %v, max length: %v", len(blobBytes), maxEffectiveBlobBytes) } // the canonical (padded) blob payload @@ -348,8 +358,8 @@ func makeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { } // bytesFromBlobCanonical converts the canonical blob representation into the raw blob data -func bytesFromBlobCanonical(blob *kzg4844.Blob) [126976]byte { - var blobBytes [126976]byte +func bytesFromBlobCanonical(blob *kzg4844.Blob) [maxEffectiveBlobBytes]byte { + var blobBytes [maxEffectiveBlobBytes]byte for from := 0; from < len(blob); from += 32 { copy(blobBytes[from/32*31:], blob[from+1:from+32]) } @@ -360,7 +370,7 @@ func bytesFromBlobCanonical(blob *kzg4844.Blob) [126976]byte { func decompressScrollBlobToBatch(compressedBytes []byte) ([]byte, error) { // decompress data in stream and in batches of bytes, because we don't know actual length of compressed data var res []byte - readBatchSize := 131072 + readBatchSize := maxBlobBytes batchOfBytes := make([]byte, readBatchSize) r := bytes.NewReader(compressedBytes) From 87e3f1587d8734b512e94f22cd2c9dca49cd9097 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 01:35:46 +0800 Subject: [PATCH 103/126] edge cases --- encoding/codecv0.go | 4 ++-- encoding/codecv1.go | 8 ++++---- encoding/codecv1_test.go | 18 +++++++++--------- encoding/codecv2.go | 4 ++-- encoding/codecv4.go | 4 ++-- encoding/codecv4_test.go | 14 +++++++------- 6 files changed, 26 insertions(+), 26 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 5a192dc..3bcc639 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -389,12 +389,12 @@ func (d *DACodecV0) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error return true, nil } -// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. func (d *DACodecV0) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { return 0, 0, nil } -// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a batch. func (d *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { return 0, 0, nil } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 7a55602..86ccbeb 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -398,7 +398,7 @@ func (d *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) return totalL1CommitCalldataSize, nil } -// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. func (d *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { metadataSize := 2 + 4*d.MaxNumChunksPerBatch() batchDataSize, err := d.chunkL1CommitBlobDataSize(c) @@ -406,10 +406,10 @@ func (d *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, return 0, 0, err } blobSize := calculatePaddedBlobSize(metadataSize + batchDataSize) - return blobSize, blobSize, nil + return metadataSize + batchDataSize, blobSize, nil } -// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a batch. func (d *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { metadataSize := 2 + 4*d.MaxNumChunksPerBatch() var batchDataSize uint64 @@ -421,7 +421,7 @@ func (d *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, batchDataSize += chunkDataSize } blobSize := calculatePaddedBlobSize(metadataSize + batchDataSize) - return blobSize, blobSize, nil + return metadataSize + batchDataSize, blobSize, nil } // computeBatchDataHash computes the data hash of the batch. diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index b859bc6..b6216e3 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -542,52 +542,52 @@ func TestCodecV1BatchSizeAndBlobSizeEstimation(t *testing.T) { chunk2 := &Chunk{Blocks: []*Block{block2}} chunk2BatchBytesSize, chunk2BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) assert.NoError(t, err) - assert.Equal(t, uint64(302), chunk2BatchBytesSize) + assert.Equal(t, uint64(292), chunk2BatchBytesSize) assert.Equal(t, uint64(302), chunk2BlobSize) batch2 := &Batch{Chunks: []*Chunk{chunk2}} batch2BatchBytesSize, batch2BlobSize, err := codecv1.EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) assert.NoError(t, err) - assert.Equal(t, uint64(302), batch2BatchBytesSize) + assert.Equal(t, uint64(292), batch2BatchBytesSize) assert.Equal(t, uint64(302), batch2BlobSize) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} chunk3BatchBytesSize, chunk3BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) assert.NoError(t, err) - assert.Equal(t, uint64(5929), chunk3BatchBytesSize) + assert.Equal(t, uint64(5743), chunk3BatchBytesSize) assert.Equal(t, uint64(5929), chunk3BlobSize) batch3 := &Batch{Chunks: []*Chunk{chunk3}} batch3BatchBytesSize, batch3BlobSize, err := codecv1.EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) assert.NoError(t, err) - assert.Equal(t, uint64(5929), batch3BatchBytesSize) + assert.Equal(t, uint64(5743), batch3BatchBytesSize) assert.Equal(t, uint64(5929), batch3BlobSize) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} chunk4BatchBytesSize, chunk4BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) assert.NoError(t, err) - assert.Equal(t, uint64(98), chunk4BatchBytesSize) + assert.Equal(t, uint64(94), chunk4BatchBytesSize) assert.Equal(t, uint64(98), chunk4BlobSize) batch4 := &Batch{Chunks: []*Chunk{chunk4}} blob4BatchBytesSize, batch4BlobSize, err := codecv1.EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) assert.NoError(t, err) - assert.Equal(t, uint64(98), blob4BatchBytesSize) + assert.Equal(t, uint64(94), blob4BatchBytesSize) assert.Equal(t, uint64(98), batch4BlobSize) chunk5 := &Chunk{Blocks: []*Block{block2, block3}} chunk5BatchBytesSize, chunk5BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) assert.NoError(t, err) - assert.Equal(t, uint64(6166), chunk5BatchBytesSize) + assert.Equal(t, uint64(5973), chunk5BatchBytesSize) assert.Equal(t, uint64(6166), chunk5BlobSize) chunk6 := &Chunk{Blocks: []*Block{block4}} chunk6BatchBytesSize, chunk6BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) assert.NoError(t, err) - assert.Equal(t, uint64(98), chunk6BatchBytesSize) + assert.Equal(t, uint64(94), chunk6BatchBytesSize) assert.Equal(t, uint64(98), chunk6BlobSize) batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} batch5BatchBytesSize, batch5BlobSize, err := codecv1.EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) assert.NoError(t, err) - assert.Equal(t, uint64(6199), batch5BatchBytesSize) + assert.Equal(t, uint64(6005), batch5BatchBytesSize) assert.Equal(t, uint64(6199), batch5BlobSize) } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index b4a86b0..240cd32 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -227,7 +227,7 @@ func (d *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. func (d *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) if err != nil { @@ -240,7 +240,7 @@ func (d *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, return uint64(len(batchBytes)), calculatePaddedBlobSize(uint64(len(blobBytes))), nil } -// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a batch. func (d *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) if err != nil { diff --git a/encoding/codecv4.go b/encoding/codecv4.go index e76bc41..4a8af4d 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -247,7 +247,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i return blob, blobVersionedHash, &z, blobBytes, nil } -// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. func (d *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) if err != nil { @@ -270,7 +270,7 @@ func (d *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, return uint64(len(batchBytes)), calculatePaddedBlobSize(blobBytesLength), nil } -// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a batch. func (d *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) if err != nil { diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index c3f84ef..6c34ffd 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -1029,9 +1029,9 @@ func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) assert.NoError(t, err) - // Taking into consideration compression, we allow up to 5x of max blob bytes. - // We then ignore the metadata rows for MaxNumChunksPerBatch chunks, plus 1 byte for the compression flag. - nRowsData := 5*maxEffectiveBlobBytes - (int(codecv4.MaxNumChunksPerBatch())*4 + 2) - 1 + // Taking into consideration compression, we allow up to 5x of max blob bytes minus 1 byte for the compression flag. + // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. + nRowsData := 5*(maxEffectiveBlobBytes-1) - (int(codecv4.MaxNumChunksPerBatch())*4 + 2) repeat := func(element byte, count int) string { result := make([]byte, 0, count) @@ -1111,13 +1111,13 @@ func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) { {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, }, expectedz: "53eafb50809b3473cb4f8764f7e5d598af9eaaddc45a5a6da7cddac3380e39bb", expectedy: "40751ed98861f5c2058b4062b275f94a3d505a3221f6abe8dbe1074a4f10d0f4", expectedBlobVersionedHash: "01b78b07dbe03b960cd73ea45088b231a50ce88408fa938765e971c5dc7bbb6b", expectedBatchHash: "257175785213c68b10bb94396b657892fb7ae70708bf98ce357752906a80a6f0"}, // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "3eeece6f3835294dfb88d1355e5f49eb84267834bb54d2e207f5a4f5c74ddc85", expectedy: "0011b467f17dae185b1b8ac883d3281a16b807429e3bf3d3ecd05bfab9318aae", expectedBlobVersionedHash: "01a959b584330e8a4ab7cc5581cad98f91e53a50287b94c4067de9f9dd76395e", expectedBatchHash: "ef245e1b3a83ed12efb33fe3dd5dfa424324e257618c4e6a84c39af309c004f1"}, + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "4a7e2416aed7aa1630b5dfac5de9f7140f0228a293e6507a98ca762f471bd4cb", expectedy: "39087ba100396ce50ea84f3cb196fd45ce7074888acc57f196b905e3bb4fffda", expectedBlobVersionedHash: "0196c25ea10bafe62aa334122d1e426eccc158423e35272ae009029caf7664b2", expectedBatchHash: "fed7eeba45afa4ac2f658e233adbc7beab27bd7472364a69ab5c16dafe3960b4"}, // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "427d5c348b1a88e775c96056b5370d0c9fc208f580786a39fa2d887ad5a90737", expectedy: "23569809218eb69653d4b5ac838851c7fa24857ee263a7f932aeecfa67306d8e", expectedBlobVersionedHash: "015c46e076b4acbe92a7f60667f5da0da9d174833dbd9965f906acea718c3a5b", expectedBatchHash: "5fb400716644aecc161766470807994b3ab347a62e7f3e102998f2a225b0d812"}, + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "588908e72f3910e010ecbb38583e3c14d2de20e3fc0fcfca1fa573b6ae652009", expectedy: "4dd0fe025a1d27c21aa3c199e88d8f7bfa839b04e2fffb39d149b7d81ea2d81e", expectedBlobVersionedHash: "0146e7e489077de92fc8e90102560f1ea8d10f3dc5aca0c7ce3f362698e8dfed", expectedBatchHash: "5cd5ae7f3ca9d7777efef7b248fe0348841ea99b270e4c391fa5bed6a00c7aa9"}, // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "1e510916d60ae4a6ccc1643ea7e088c1d0c55b65bc296a2924eeed85a253b06e", expectedy: "4fb0517baf002fabe917b8050b12544917093de4ee793658e962e9b9f3e11f40", expectedBlobVersionedHash: "017df5d06fd7763790000c2c79a0c7c00b4f6cb97b83d0590b42a6f70f0c351d", expectedBatchHash: "621aba1e754a7ddba80dcfcb1e876c68b80a085df6d7510f7c9176c60a9b1f50"}, + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "6fa8165246ac960a1a31c8f9950dad3c6cfd11393a8822738f392f096e0e27da", expectedy: "3391e91d228eee3a4341c25536741bb3d16387e47ca03548212a4a8acc898dad", expectedBlobVersionedHash: "01a65de32db70380b8728e048ed510cf4fbd9b82ff22955bbc27edebc4fd0188", expectedBatchHash: "f78751f5d548107925e31ace50234e3c926b0ade2aa2bd32f46814016f631d62"}, // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "7092af91dfde0ae343d674690ca69bdc592e42ebf34e13ceceb192964a266451", expectedy: "2ab672a3240198e8426631b4a8d6bac38a03cb16b3d0cf70c96fd848e9cbe887", expectedBlobVersionedHash: "01b57e3d16683204cd2614188184d39fc37b219babcb3f4035e061d169b7aa24", expectedBatchHash: "11c5c63fdf42b76beff06c9265c49169b8c8d2783c65d8fd3bca5e5668c55ab6"}, + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "44c6b024e20a1b616c9619c23b612258ddb5489bb0631119598c89ddb2cf8565", expectedy: "6e3296728e406d16cf1d7342959bcbe0c4e4c1e9b1f705ae6b426a0dbb79838c", expectedBlobVersionedHash: "01cc8fbe921a7c0fb5d01a1e12ef090060740ca1ecebf279f1de3bb4499c7341", expectedBatchHash: "fcca8045e82349c28f6d8747bcd6fec84a34130b31097e2e08e854bc5c21c476"}, // max number of chunks but last is empty {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "4affa105e7c5d72a3223482b237296fead99e6d716b97bab0cb3447f93309692", expectedy: "4a850a8c7b84d568d8505121c92ebf284e88aa7a881290cf3939d52040871e56", expectedBlobVersionedHash: "01d3ce566fbdbcab307095bdc05de7bc2905d25f3dd4453b0f7d5f7ba8da9f08", expectedBatchHash: "ac29c2e8c26749cf99fca994cde6d33147e9e9aa60f162c964720b4937cae8fb"}, } { From 6eb38e544eae90b78f53f8acaab8f6c991f6283c Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 02:15:36 +0800 Subject: [PATCH 104/126] fixes --- encoding/codecv0.go | 6 +++--- encoding/codecv0_types.go | 4 ++-- encoding/codecv1.go | 8 ++++---- encoding/codecv1_types.go | 4 ++-- encoding/codecv2.go | 6 +++--- encoding/codecv3.go | 6 +++--- encoding/codecv3_types.go | 4 ++-- encoding/codecv4.go | 6 +++--- encoding/interfaces.go | 2 +- encoding/zstd/zstd.go | 13 ++++++++++--- 10 files changed, 33 insertions(+), 26 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 3bcc639..f2160a8 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -160,11 +160,11 @@ func (d *DACodecV0) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { - return nil, errors.New("too many chunks in batch") + return nil, fmt.Errorf("too many chunks in batch: got %d, max allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } if len(batch.Chunks) == 0 { - return nil, errors.New("too few chunks in batch") + return nil, errors.New("batch must contain at least one chunk") } // compute batch data hash @@ -214,7 +214,7 @@ func (d *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { } if CodecVersion(data[0]) != CodecV0 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV0) + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV0, data[0]) } b := newDABatchV0( diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index 5cd512b..bdf2b77 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -268,8 +268,8 @@ func (b *daBatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { } // Version returns the version of the DABatch. -func (b *daBatchV0) Version() uint8 { - return b.version +func (b *daBatchV0) Version() CodecVersion { + return CodecVersion(b.version) } // SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 86ccbeb..894e23e 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -103,11 +103,11 @@ func (d *DACodecV1) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { - return nil, errors.New("too many chunks in batch") + return nil, fmt.Errorf("too many chunks in batch: got %d, max allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } if len(batch.Chunks) == 0 { - return nil, errors.New("too few chunks in batch") + return nil, errors.New("batch must contain at least one chunk") } // batch data hash @@ -241,7 +241,7 @@ func (d *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { } if CodecVersion(data[0]) != CodecV1 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV1) + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV1, data[0]) } b := newDABatchV1( @@ -426,7 +426,7 @@ func (d *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, // computeBatchDataHash computes the data hash of the batch. // Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, +// the former is used for identifying a batch in the contracts, // the latter is used in the public input to the provers. func (d *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go index d2860dc..8600dae 100644 --- a/encoding/codecv1_types.go +++ b/encoding/codecv1_types.go @@ -182,8 +182,8 @@ func (b *daBatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { } // Version returns the version of the DABatch. -func (b *daBatchV1) Version() uint8 { - return b.version +func (b *daBatchV1) Version() CodecVersion { + return CodecVersion(b.version) } // SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 240cd32..ab369de 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -50,11 +50,11 @@ func (d *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { - return nil, errors.New("too many chunks in batch") + return nil, fmt.Errorf("too many chunks in batch: got %d, max allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } if len(batch.Chunks) == 0 { - return nil, errors.New("too few chunks in batch") + return nil, errors.New("batch must contain at least one chunk") } // batch data hash @@ -208,7 +208,7 @@ func (d *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { } if CodecVersion(data[0]) != CodecV2 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV2) + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV2, data[0]) } b := newDABatchV1( diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 3efe523..6cc08ad 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -22,11 +22,11 @@ func (d *DACodecV3) Version() CodecVersion { func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { - return nil, errors.New("too many chunks in batch") + return nil, fmt.Errorf("too many chunks in batch: got %d, max allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } if len(batch.Chunks) == 0 { - return nil, errors.New("too few chunks in batch") + return nil, errors.New("batch must contain at least one chunk") } if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { @@ -78,7 +78,7 @@ func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { } if CodecVersion(data[0]) != CodecV3 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV3) + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV3, data[0]) } b := newDABatchV3WithProof( diff --git a/encoding/codecv3_types.go b/encoding/codecv3_types.go index 5650185..400d2e8 100644 --- a/encoding/codecv3_types.go +++ b/encoding/codecv3_types.go @@ -189,8 +189,8 @@ func (b *daBatchV3) MarshalJSON() ([]byte, error) { } // Version returns the version of the DABatch. -func (b *daBatchV3) Version() uint8 { - return b.version +func (b *daBatchV3) Version() CodecVersion { + return CodecVersion(b.version) } // SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 4a8af4d..5734de7 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -47,11 +47,11 @@ func (d *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { - return nil, errors.New("too many chunks in batch") + return nil, fmt.Errorf("too many chunks in batch: got %d, max allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } if len(batch.Chunks) == 0 { - return nil, errors.New("too few chunks in batch") + return nil, errors.New("batch must contain at least one chunk") } if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { @@ -108,7 +108,7 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { } if CodecVersion(data[0]) != CodecV4 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV4) + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV4, data[0]) } b := newDABatchV3WithProof( diff --git a/encoding/interfaces.go b/encoding/interfaces.go index d2436a5..8ba40a1 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -33,7 +33,7 @@ type DABatch interface { BlobDataProofForPointEvaluation() ([]byte, error) Blob() *kzg4844.Blob BlobBytes() []byte - Version() uint8 + Version() CodecVersion SkippedL1MessageBitmap() []byte } diff --git a/encoding/zstd/zstd.go b/encoding/zstd/zstd.go index feab982..aab718f 100644 --- a/encoding/zstd/zstd.go +++ b/encoding/zstd/zstd.go @@ -11,11 +11,18 @@ import ( "unsafe" ) -// CompressScrollBatchBytes compresses the given batch of bytes. -// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. +const compressBufferOverhead = 128 + +// CompressScrollBatchBytes compresses the given batch of bytes using zstd compression. +// The output buffer is allocated with an extra compressBufferOverhead bytes to accommodate +// potential metadata overhead or error messages from the underlying C function. func CompressScrollBatchBytes(batchBytes []byte) ([]byte, error) { + if len(batchBytes) == 0 { + return nil, fmt.Errorf("input batch is empty") + } + srcSize := C.uint64_t(len(batchBytes)) - outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes + outbufSize := C.uint64_t(len(batchBytes) + compressBufferOverhead) outbuf := make([]byte, outbufSize) if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, From 8432d4195249b91ea6f0ba490126734b0e56b16c Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 02:27:22 +0800 Subject: [PATCH 105/126] address ai's comments --- encoding/codecv0.go | 5 ++++- encoding/codecv2.go | 5 ++--- encoding/codecv4.go | 5 ++--- encoding/da.go | 10 ++++++---- 4 files changed, 14 insertions(+), 11 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index f2160a8..93f66e4 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -122,7 +122,10 @@ func (d *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, e for _, block := range blocks { var blockTransactions types.Transactions // ignore L1 msg transactions from the block, consider only L2 transactions - txNum := int(block.NumTransactions() - block.NumL1Messages()) + txNum := int(block.NumTransactions()) - int(block.NumL1Messages()) + if txNum < 0 { + return nil, fmt.Errorf("invalid transaction count: NumL1Messages (%d) exceeds NumTransactions (%d)", block.NumL1Messages(), block.NumTransactions()) + } for i := 0; i < txNum; i++ { if len(chunk) < currentIndex+txLenByteSize { return nil, fmt.Errorf("chunk size doesn't match, next tx size is less then 4, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+txLenByteSize, i) diff --git a/encoding/codecv2.go b/encoding/codecv2.go index ab369de..e1ea08f 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -130,9 +130,8 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i } // blob metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } + chunkSize := len(batchBytes) - currentChunkStartIndex + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) // challenge: compute chunk data hash chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 5734de7..e2da4c1 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -172,9 +172,8 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i } // blob metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } + chunkSize := len(batchBytes) - currentChunkStartIndex + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) // challenge: compute chunk data hash chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) diff --git a/encoding/da.go b/encoding/da.go index 0a732c9..66194da 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -439,9 +439,8 @@ func constructBatchPayloadInBlob(chunks []*Chunk, codec Codec) ([]byte, error) { } // batch metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } + chunkSize := len(batchBytes) - currentChunkStartIndex + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) } return batchBytes, nil } @@ -554,7 +553,10 @@ func decodeTxsFromBytes(blobBytes []byte, chunks []*DAChunkRawTx, maxNumChunks i curIndex := 0 for _, block := range chunk.Blocks { var blockTransactions types.Transactions - txNum := int(block.NumTransactions() - block.NumL1Messages()) + txNum := int(block.NumTransactions()) - int(block.NumL1Messages()) + if txNum < 0 { + return fmt.Errorf("invalid transaction count: NumL1Messages (%d) exceeds NumTransactions (%d)", block.NumL1Messages(), block.NumTransactions()) + } for i := 0; i < txNum; i++ { tx, nextIndex, err := getNextTx(chunkBytes, curIndex) if err != nil { From 54ddf8d31b73136621504bf90d251fb11c0a5cb9 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 02:40:34 +0800 Subject: [PATCH 106/126] address ai's comments --- encoding/codecv0.go | 2 +- encoding/codecv0_types.go | 9 +++------ encoding/codecv1.go | 2 +- encoding/codecv1_types.go | 7 +------ encoding/codecv2.go | 10 +++++----- encoding/codecv3.go | 2 +- encoding/codecv3_test.go | 1 + encoding/codecv3_types.go | 6 ++++-- encoding/codecv4.go | 10 +++++----- 9 files changed, 22 insertions(+), 27 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 93f66e4..6c54988 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -163,7 +163,7 @@ func (d *DACodecV0) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { - return nil, fmt.Errorf("too many chunks in batch: got %d, max allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) + return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } if len(batch.Chunks) == 0 { diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index bdf2b77..8db37fb 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -2,12 +2,10 @@ package encoding import ( "encoding/binary" - "encoding/hex" "errors" "fmt" "math" "math/big" - "strings" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -181,10 +179,9 @@ func (c *daChunkV0) Hash() (common.Hash, error) { var l1TxHashes []byte var l2TxHashes []byte for _, txData := range blockTxs { - txHash := strings.TrimPrefix(txData.TxHash, "0x") - hashBytes, err := hex.DecodeString(txHash) - if err != nil { - return common.Hash{}, fmt.Errorf("failed to decode tx hash from TransactionData: hash=%v, err=%w", txData.TxHash, err) + hashBytes := common.FromHex(txData.TxHash) + if len(hashBytes) != common.HashLength { + return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) } if txData.Type == types.L1MessageTxType { l1TxHashes = append(l1TxHashes, hashBytes...) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 894e23e..a5e47c5 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -103,7 +103,7 @@ func (d *DACodecV1) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { - return nil, fmt.Errorf("too many chunks in batch: got %d, max allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) + return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } if len(batch.Chunks) == 0 { diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go index 8600dae..4b5a899 100644 --- a/encoding/codecv1_types.go +++ b/encoding/codecv1_types.go @@ -5,7 +5,6 @@ import ( "encoding/hex" "errors" "fmt" - "strings" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -55,11 +54,7 @@ func (c *daChunkV1) Hash() (common.Hash, error) { continue } - txHash := strings.TrimPrefix(txData.TxHash, "0x") - hashBytes, err := hex.DecodeString(txHash) - if err != nil { - return common.Hash{}, err - } + hashBytes := common.FromHex(txData.TxHash) if len(hashBytes) != common.HashLength { return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index e1ea08f..768324d 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -50,7 +50,7 @@ func (d *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { - return nil, fmt.Errorf("too many chunks in batch: got %d, max allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) + return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } if len(batch.Chunks) == 0 { @@ -230,11 +230,11 @@ func (d *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { func (d *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) if err != nil { - return 0, 0, err + return 0, 0, fmt.Errorf("failed to construct batch payload in blob: %w", err) } blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { - return 0, 0, err + return 0, 0, fmt.Errorf("failed to compress scroll batch bytes: %w", err) } return uint64(len(batchBytes)), calculatePaddedBlobSize(uint64(len(blobBytes))), nil } @@ -257,11 +257,11 @@ func (d *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, func (d *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) if err != nil { - return false, err + return false, fmt.Errorf("failed to construct batch payload in blob: %w", err) } blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { - return false, err + return false, fmt.Errorf("failed to compress scroll batch bytes: %w", err) } // Only apply this check when the uncompressed batch data has exceeded 128 KiB. if len(batchBytes) <= minCompressedDataCheckSize { diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 6cc08ad..8d1b917 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -22,7 +22,7 @@ func (d *DACodecV3) Version() CodecVersion { func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { - return nil, fmt.Errorf("too many chunks in batch: got %d, max allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) + return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } if len(batch.Chunks) == 0 { diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 552a167..0205e00 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -75,6 +75,7 @@ func TestCodecV3BlockEncode(t *testing.T) { assert.Equal(t, encodedv0, encodedv3) } } + func TestCodecV3ChunkEncode(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) assert.NoError(t, err) diff --git a/encoding/codecv3_types.go b/encoding/codecv3_types.go index 400d2e8..328f34d 100644 --- a/encoding/codecv3_types.go +++ b/encoding/codecv3_types.go @@ -27,7 +27,8 @@ type daBatchV3 struct { // newDABatchV3 is a constructor for daBatchV3 that calls blobDataProofForPICircuit internally. func newDABatchV3(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, - z *kzg4844.Point, blobBytes []byte) (*daBatchV3, error) { + z *kzg4844.Point, blobBytes []byte, +) (*daBatchV3, error) { daBatch := &daBatchV3{ daBatchV0: daBatchV0{ version: version, @@ -58,7 +59,8 @@ func newDABatchV3(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopp // newDABatchV3WithProof is a constructor for daBatchV3 that allows directly passing blobDataProof. func newDABatchV3WithProof(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, - blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, blobDataProof [2]common.Hash) *daBatchV3 { + blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, blobDataProof [2]common.Hash, +) *daBatchV3 { return &daBatchV3{ daBatchV0: daBatchV0{ version: version, diff --git a/encoding/codecv4.go b/encoding/codecv4.go index e2da4c1..50ace45 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -47,7 +47,7 @@ func (d *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { - return nil, fmt.Errorf("too many chunks in batch: got %d, max allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) + return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } if len(batch.Chunks) == 0 { @@ -250,12 +250,12 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i func (d *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) if err != nil { - return 0, 0, err + return 0, 0, fmt.Errorf("failed to construct batch payload in blob: %w", err) } var blobBytesLength uint64 enableCompression, err := d.CheckChunkCompressedDataCompatibility(c) if err != nil { - return 0, 0, err + return 0, 0, fmt.Errorf("failed to compress scroll batch bytes: %w", err) } if enableCompression { blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) @@ -297,11 +297,11 @@ func (d *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, func (d *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) if err != nil { - return false, err + return false, fmt.Errorf("failed to construct batch payload in blob: %w", err) } blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { - return false, err + return false, fmt.Errorf("failed to compress scroll batch bytes: %w", err) } if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) From c89d7ae0be2c690f9f2fad9678d95919a56f7e8c Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 03:00:40 +0800 Subject: [PATCH 107/126] address ai's comments --- encoding/codecv0.go | 2 +- encoding/codecv0_types.go | 2 +- encoding/codecv1.go | 6 ++-- encoding/codecv2.go | 10 +++--- encoding/codecv3.go | 5 +-- encoding/codecv4.go | 68 +++++++++++++-------------------------- encoding/da.go | 28 ++++++++-------- 7 files changed, 49 insertions(+), 72 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 6c54988..073361e 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -72,7 +72,7 @@ func (d *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) } if len(chunk.Blocks) > math.MaxUint8 { - return nil, errors.New("number of blocks exceeds 1 byte") + return nil, fmt.Errorf("number of blocks (%d) exceeds maximum allowed (%d)", len(chunk.Blocks), math.MaxUint8) } for _, block := range chunk.Blocks { diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index 8db37fb..4d79679 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -122,7 +122,7 @@ func (c *daChunkV0) Encode() ([]byte, error) { } if len(c.blocks) > math.MaxUint8 { - return nil, errors.New("number of blocks exceeds 1 byte") + return nil, fmt.Errorf("number of blocks (%d) exceeds maximum allowed (%d)", len(c.blocks), math.MaxUint8) } var chunkBytes []byte diff --git a/encoding/codecv1.go b/encoding/codecv1.go index a5e47c5..0a0674e 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -37,7 +37,7 @@ func (d *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) } if len(chunk.Blocks) > math.MaxUint8 { - return nil, errors.New("number of blocks exceeds 1 byte") + return nil, fmt.Errorf("number of blocks (%d) exceeds maximum allowed (%d)", len(chunk.Blocks), math.MaxUint8) } for _, block := range chunk.Blocks { @@ -207,7 +207,7 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // convert raw data to BLSFieldElements blob, err := makeBlobCanonical(blobBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) } // compute blob versioned hash @@ -250,8 +250,8 @@ func (d *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped binary.BigEndian.Uint64(data[17:25]), // totalL1MessagePopped common.BytesToHash(data[25:57]), // dataHash - common.BytesToHash(data[89:121]), // parentBatchHash common.BytesToHash(data[57:89]), // blobVersionedHash + common.BytesToHash(data[89:121]), // parentBatchHash data[121:], // skippedL1MessageBitmap nil, // blob nil, // z diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 768324d..78d9fc1 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -159,7 +159,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // Only apply this check when the uncompressed batch data has exceeded 128 KiB. if !useMockTxData && len(batchBytes) > minCompressedDataCheckSize { // Check compressed data compatibility. - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = checkCompressedDataCompatibility(blobBytes); err != nil { log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -173,7 +173,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // convert raw data to BLSFieldElements blob, err := makeBlobCanonical(blobBytes) if err != nil { - return nil, common.Hash{}, nil, nil, err + return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) } // compute blob versioned hash @@ -216,8 +216,8 @@ func (d *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped binary.BigEndian.Uint64(data[17:25]), // totalL1MessagePopped common.BytesToHash(data[25:57]), // dataHash - common.BytesToHash(data[89:121]), // parentBatchHash common.BytesToHash(data[57:89]), // blobVersionedHash + common.BytesToHash(data[89:121]), // parentBatchHash data[121:], // skippedL1MessageBitmap nil, // blob nil, // z @@ -267,7 +267,7 @@ func (d *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error if len(batchBytes) <= minCompressedDataCheckSize { return true, nil } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = checkCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -289,7 +289,7 @@ func (d *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error if len(batchBytes) <= minCompressedDataCheckSize { return true, nil } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = checkCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 8d1b917..6f01bc3 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -7,6 +7,7 @@ import ( "fmt" "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/params" ) type DACodecV3 struct { @@ -130,7 +131,7 @@ func (d *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { if err != nil { return 0, err } - totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. + totalL1CommitGas += params.BlobTxPointEvaluationPrecompileGas // plus gas cost for the point-evaluation precompile call. return totalL1CommitGas, nil } @@ -178,7 +179,7 @@ func (d *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) } - totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. + totalL1CommitGas += params.BlobTxPointEvaluationPrecompileGas // plus gas cost for the point-evaluation precompile call. return totalL1CommitGas, nil } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 50ace45..2d59920 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -202,7 +202,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i } if !useMockTxData { // Check compressed data compatibility. - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = checkCompressedDataCompatibility(blobBytes); err != nil { log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -220,7 +220,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // convert raw data to BLSFieldElements blob, err := makeBlobCanonical(blobBytes) if err != nil { - return nil, common.Hash{}, nil, nil, err + return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) } // compute blob versioned hash @@ -246,14 +246,13 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i return blob, blobVersionedHash, &z, blobBytes, nil } -// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. -func (d *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) +func (d *DACodecV4) estimateL1CommitBatchSizeAndBlobSize(chunks []*Chunk) (uint64, uint64, error) { + batchBytes, err := constructBatchPayloadInBlob(chunks, d) if err != nil { return 0, 0, fmt.Errorf("failed to construct batch payload in blob: %w", err) } var blobBytesLength uint64 - enableCompression, err := d.CheckChunkCompressedDataCompatibility(c) + enableCompression, err := d.CheckBatchCompressedDataCompatibility(&Batch{Chunks: chunks}) if err != nil { return 0, 0, fmt.Errorf("failed to compress scroll batch bytes: %w", err) } @@ -269,33 +268,20 @@ func (d *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, return uint64(len(batchBytes)), calculatePaddedBlobSize(blobBytesLength), nil } +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. +func (d *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + return d.estimateL1CommitBatchSizeAndBlobSize([]*Chunk{c}) +} + // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a batch. func (d *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) - if err != nil { - return 0, 0, err - } - var blobBytesLength uint64 - enableCompression, err := d.CheckBatchCompressedDataCompatibility(b) - if err != nil { - return 0, 0, err - } - if enableCompression { - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return 0, 0, err - } - blobBytesLength = 1 + uint64(len(blobBytes)) - } else { - blobBytesLength = 1 + uint64(len(batchBytes)) - } - return uint64(len(batchBytes)), calculatePaddedBlobSize(blobBytesLength), nil + return d.estimateL1CommitBatchSizeAndBlobSize(b.Chunks) } -// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func (d *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) +// checkCompressedDataCompatibility checks the compressed data compatibility for a batch's chunks. +// It constructs a batch payload, compresses the data, and checks the compressed data compatibility. +func (d *DACodecV4) checkCompressedDataCompatibility(chunks []*Chunk) (bool, error) { + batchBytes, err := constructBatchPayloadInBlob(chunks, d) if err != nil { return false, fmt.Errorf("failed to construct batch payload in blob: %w", err) } @@ -303,27 +289,19 @@ func (d *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error if err != nil { return false, fmt.Errorf("failed to compress scroll batch bytes: %w", err) } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + if err = checkCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("Compressed data compatibility check failed", "err", err) return false, nil } return true, nil } +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +func (d *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + return d.checkCompressedDataCompatibility([]*Chunk{c}) +} + // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. func (d *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) - if err != nil { - return false, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil + return d.checkCompressedDataCompatibility(b.Chunks) } diff --git a/encoding/da.go b/encoding/da.go index 66194da..57bf7d5 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -285,7 +285,7 @@ func TxsToTxsData(txs types.Transactions) []*types.TransactionData { // Fast testing if the compressed data is compatible with our circuit // (require specified frame header and each block is compressed) -func CheckCompressedDataCompatibility(data []byte) error { +func checkCompressedDataCompatibility(data []byte) error { if len(data) < 16 { return fmt.Errorf("too small size (%x), what is it?", data) } @@ -575,13 +575,14 @@ func decodeTxsFromBytes(blobBytes []byte, chunks []*DAChunkRawTx, maxNumChunks i // GetHardforkName returns the name of the hardfork active at the given block height and timestamp. func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uint64) string { - if !config.IsBernoulli(new(big.Int).SetUint64(blockHeight)) { + blockHeightBigInt := new(big.Int).SetUint64(blockHeight) + if !config.IsBernoulli(blockHeightBigInt) { return "homestead" - } else if !config.IsCurie(new(big.Int).SetUint64(blockHeight)) { + } else if !config.IsCurie(blockHeightBigInt) { return "bernoulli" - } else if !config.IsDarwin(new(big.Int).SetUint64(blockHeight), blockTimestamp) { + } else if !config.IsDarwin(blockHeightBigInt, blockTimestamp) { return "curie" - } else if !config.IsDarwinV2(new(big.Int).SetUint64(blockHeight), blockTimestamp) { + } else if !config.IsDarwinV2(blockHeightBigInt, blockTimestamp) { return "darwin" } else { return "darwinV2" @@ -590,13 +591,14 @@ func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uin // GetCodecVersion returns the encoding codec version for the given block height and timestamp. func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uint64) CodecVersion { - if !config.IsBernoulli(new(big.Int).SetUint64(blockHeight)) { + blockHeightBigInt := new(big.Int).SetUint64(blockHeight) + if !config.IsBernoulli(blockHeightBigInt) { return CodecV0 - } else if !config.IsCurie(new(big.Int).SetUint64(blockHeight)) { + } else if !config.IsCurie(blockHeightBigInt) { return CodecV1 - } else if !config.IsDarwin(new(big.Int).SetUint64(blockHeight), blockTimestamp) { + } else if !config.IsDarwin(blockHeightBigInt, blockTimestamp) { return CodecV2 - } else if !config.IsDarwinV2(new(big.Int).SetUint64(blockHeight), blockTimestamp) { + } else if !config.IsDarwinV2(blockHeightBigInt, blockTimestamp) { return CodecV3 } else { return CodecV4 @@ -624,13 +626,9 @@ func CheckBatchCompressedDataCompatibility(batch *Batch, codecVersion CodecVersi // GetChunkEnableCompression returns whether to enable compression for the given block height and timestamp. func GetChunkEnableCompression(codecVersion CodecVersion, chunk *Chunk) (bool, error) { switch codecVersion { - case CodecV0: - return false, nil - case CodecV1: + case CodecV0, CodecV1: return false, nil - case CodecV2: - return true, nil - case CodecV3: + case CodecV2, CodecV3: return true, nil case CodecV4: return CheckChunkCompressedDataCompatibility(chunk, codecVersion) From 5aa90e88822e5639c54b6fc0d4c66cb87535e1ab Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 11:58:02 +0800 Subject: [PATCH 108/126] address comments --- encoding/codecv0.go | 31 +++++++++++++++++++------------ encoding/codecv0_types.go | 16 ++++++---------- encoding/codecv1.go | 32 +++++++++++++++++++++----------- encoding/codecv2.go | 25 +++++++++++++++---------- encoding/codecv3.go | 36 +++++++++++++++++++++++------------- encoding/codecv4.go | 29 +++++++++++++++++------------ encoding/da.go | 8 ++------ 7 files changed, 103 insertions(+), 74 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 073361e..6ef552a 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -85,12 +85,14 @@ func (d *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := newDAChunkV0( - blocks, // blocks - txs, // transactions - ) + if len(blocks) != len(txs) { + return nil, fmt.Errorf("number of blocks (%d) does not match number of transactions (%d)", len(blocks), len(transactions)) + } - return daChunk, nil + return &daChunkV0{ + blocks: blocks, + transactions: txs, + }, nil } // DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. @@ -197,14 +199,19 @@ func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } + if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { + return nil, fmt.Errorf("totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + } + l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore + daBatch := newDABatchV0( - uint8(CodecV0), // version - batch.Index, // batchIndex - totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped - totalL1MessagePoppedAfter, // totalL1MessagePopped - dataHash, // dataHash - batch.ParentBatchHash, // parentBatchHash - bitmapBytes, // skippedL1MessageBitmap + uint8(CodecV0), // version + batch.Index, // batchIndex + l1MessagePopped, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + bitmapBytes, // skippedL1MessageBitmap ) return daBatch, nil diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index 4d79679..b374909 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -107,14 +107,6 @@ type daChunkV0 struct { transactions [][]*types.TransactionData } -// newDAChunkV0 is a constructor for daChunkV0, initializing with blocks and transactions. -func newDAChunkV0(blocks []DABlock, transactions [][]*types.TransactionData) *daChunkV0 { - return &daChunkV0{ - blocks: blocks, - transactions: transactions, - } -} - // Encode serializes the DAChunk into a slice of bytes. func (c *daChunkV0) Encode() ([]byte, error) { if len(c.blocks) == 0 { @@ -170,8 +162,12 @@ func (c *daChunkV0) Hash() (common.Hash, error) { // concatenate block contexts var dataBytes []byte for i := 0; i < int(numBlocks); i++ { - // only the first 58 bytes of each BlockContext are needed for the hashing process - dataBytes = append(dataBytes, chunkBytes[1+60*i:60*i+59]...) + start := 1 + 60*i + end := start + 58 // only the first 58 bytes of each BlockContext are needed for the hashing process + if end > len(chunkBytes) { + return common.Hash{}, fmt.Errorf("unexpected end index: %d, chunkBytes length: %d", end, len(chunkBytes)) + } + dataBytes = append(dataBytes, chunkBytes[start:end]...) } // concatenate l1 and l2 tx hashes diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 0a0674e..e7d278c 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -128,17 +128,22 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } + if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { + return nil, fmt.Errorf("totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + } + l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore + daBatch := newDABatchV1( - uint8(CodecV1), // version - batch.Index, // batchIndex - totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped - totalL1MessagePoppedAfter, // totalL1MessagePopped - dataHash, // dataHash - batch.ParentBatchHash, // parentBatchHash - blobVersionedHash, // blobVersionedHash - bitmapBytes, // skippedL1MessageBitmap - blob, // blob - z, // z + uint8(CodecV1), // version + batch.Index, // batchIndex + l1MessagePopped, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z ) return daBatch, nil @@ -312,7 +317,12 @@ func (d *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { - totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() + transactions := uint64(len(block.Transactions)) + l2Transactions := block.NumL2Transactions() + if transactions < l2Transactions { + return 0, fmt.Errorf("number of L2 transactions (%d) exceeds total transactions (%d)", l2Transactions, transactions) + } + totalNonSkippedL1Messages += transactions - l2Transactions blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) if err != nil { return 0, err diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 78d9fc1..d066038 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -75,17 +75,22 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } + if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { + return nil, fmt.Errorf("totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + } + l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore + daBatch := newDABatchV1( - uint8(CodecV2), // version - batch.Index, // batchIndex - totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped - totalL1MessagePoppedAfter, // totalL1MessagePopped - dataHash, // dataHash - batch.ParentBatchHash, // parentBatchHash - blobVersionedHash, // blobVersionedHash - bitmapBytes, // skippedL1MessageBitmap - blob, // blob - z, // z + uint8(CodecV2), // version + batch.Index, // batchIndex + l1MessagePopped, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z ) return daBatch, nil diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 6f01bc3..a2f5c2e 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -55,19 +55,24 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] + if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { + return nil, fmt.Errorf("totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + } + l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore + return newDABatchV3( - uint8(CodecV3), // version - batch.Index, // batchIndex - totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped - totalL1MessagePoppedAfter, // totalL1MessagePopped - lastBlock.Header.Time, // lastBlockTimestamp - dataHash, // dataHash - batch.ParentBatchHash, // parentBatchHash - blobVersionedHash, // blobVersionedHash - bitmapBytes, // skippedL1MessageBitmap - blob, // blob - z, // z - blobBytes, // blobBytes + uint8(CodecV3), // version + batch.Index, // batchIndex + l1MessagePopped, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + lastBlock.Header.Time, // lastBlockTimestamp + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z + blobBytes, // blobBytes ) } @@ -109,7 +114,12 @@ func (d *DACodecV3) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (ui var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { - totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() + transactions := uint64(len(block.Transactions)) + l2Transactions := block.NumL2Transactions() + if transactions < l2Transactions { + return 0, fmt.Errorf("number of L2 transactions (%d) exceeds total transactions (%d)", l2Transactions, transactions) + } + totalNonSkippedL1Messages += transactions - l2Transactions blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) if err != nil { return 0, err diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 2d59920..38b1243 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -84,19 +84,24 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] + if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { + return nil, fmt.Errorf("totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + } + l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore + return newDABatchV3( - uint8(CodecV4), // version - batch.Index, // batchIndex - totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped - totalL1MessagePoppedAfter, // totalL1MessagePopped - lastBlock.Header.Time, // lastBlockTimestamp - dataHash, // dataHash - batch.ParentBatchHash, // parentBatchHash - blobVersionedHash, // blobVersionedHash - bitmapBytes, // skippedL1MessageBitmap - blob, // blob - z, // z - blobBytes, // blobBytes + uint8(CodecV4), // version + batch.Index, // batchIndex + l1MessagePopped, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + lastBlock.Header.Time, // lastBlockTimestamp + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z + blobBytes, // blobBytes ) } diff --git a/encoding/da.go b/encoding/da.go index 57bf7d5..090b213 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -640,13 +640,9 @@ func GetChunkEnableCompression(codecVersion CodecVersion, chunk *Chunk) (bool, e // GetBatchEnableCompression returns whether to enable compression for the given block height and timestamp. func GetBatchEnableCompression(codecVersion CodecVersion, batch *Batch) (bool, error) { switch codecVersion { - case CodecV0: - return false, nil - case CodecV1: + case CodecV0, CodecV1: return false, nil - case CodecV2: - return true, nil - case CodecV3: + case CodecV2, CodecV3: return true, nil case CodecV4: return CheckBatchCompressedDataCompatibility(batch, codecVersion) From 901e0826a203184048f1d137ecfb71bff48c327d Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 13:36:44 +0800 Subject: [PATCH 109/126] address ai's comment --- encoding/codecv0.go | 48 ++++++++++++++++++++++++---------------- encoding/codecv0_test.go | 21 +++++++++--------- encoding/codecv1.go | 2 +- encoding/codecv1_test.go | 30 ++++++++++++------------- encoding/codecv2.go | 34 ++++++++++------------------ encoding/codecv2_test.go | 30 ++++++++++++------------- encoding/codecv3_test.go | 30 ++++++++++++------------- encoding/codecv4.go | 2 +- encoding/codecv4_test.go | 32 +++++++++++++-------------- 9 files changed, 115 insertions(+), 114 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 6ef552a..14e7bd9 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -86,7 +86,7 @@ func (d *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) } if len(blocks) != len(txs) { - return nil, fmt.Errorf("number of blocks (%d) does not match number of transactions (%d)", len(blocks), len(transactions)) + return nil, fmt.Errorf("number of blocks (%d) does not match number of transactions (%d)", len(blocks), len(txs)) } return &daChunkV0{ @@ -173,26 +173,11 @@ func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { } // compute batch data hash - var dataBytes []byte - totalL1MessagePoppedBeforeChunk := batch.TotalL1MessagePoppedBefore - - for _, chunk := range batch.Chunks { - // build data hash - daChunk, err := d.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) - if err != nil { - return nil, err - } - totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) - daChunkHash, err := daChunk.Hash() - if err != nil { - return nil, err - } - dataBytes = append(dataBytes, daChunkHash.Bytes()...) + dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err } - // compute data hash - dataHash := crypto.Keccak256Hash(dataBytes) - // skipped L1 messages bitmap bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { @@ -414,3 +399,28 @@ func (c *DACodecV0) JSONFromBytes(data []byte) ([]byte, error) { // DACodecV0 doesn't need this, so just return empty values return nil, nil } + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a batch in the contracts, +// the latter is used in the public input to the provers. +func (d *DACodecV0) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + dataBytes := make([]byte, 0, len(chunks)*common.HashLength) + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := d.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil +} diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index f199d29..aae0d34 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -7,11 +7,12 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestCodecV0BlockEncode(t *testing.T) { codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) block := &daBlockV0{} encoded := hex.EncodeToString(block.Encode()) @@ -56,7 +57,7 @@ func TestCodecV0BlockEncode(t *testing.T) { func TestCodecV0ChunkEncode(t *testing.T) { codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) // chunk with a single empty block daBlock := &daBlockV0{} @@ -124,7 +125,7 @@ func TestCodecV0ChunkEncode(t *testing.T) { func TestCodecV0ChunkHash(t *testing.T) { codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) // chunk with a single empty block daBlock := &daBlockV0{} @@ -189,7 +190,7 @@ func TestCodecV0ChunkHash(t *testing.T) { func TestCodecV0BatchEncode(t *testing.T) { codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) // empty batch batch := &daBatchV1{ @@ -265,7 +266,7 @@ func TestCodecV0BatchEncode(t *testing.T) { func TestCodecV0BatchHash(t *testing.T) { codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) // empty batch batch := &daBatchV1{ @@ -332,7 +333,7 @@ func TestCodecV0BatchHash(t *testing.T) { func TestCodecV0BatchDataHash(t *testing.T) { codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -391,7 +392,7 @@ func TestCodecV0BatchDataHash(t *testing.T) { func TestCodecV0CalldataSizeEstimation(t *testing.T) { codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -439,7 +440,7 @@ func TestCodecV0CalldataSizeEstimation(t *testing.T) { func TestCodecV0CommitGasEstimation(t *testing.T) { codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") assert.NoError(t, err) @@ -488,7 +489,7 @@ func TestCodecV0CommitGasEstimation(t *testing.T) { func TestCodecV0BatchL1MessagePopped(t *testing.T) { codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -567,7 +568,7 @@ func TestCodecV0BatchL1MessagePopped(t *testing.T) { func TestCodecV0DecodeDAChunksRawTx(t *testing.T) { codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") diff --git a/encoding/codecv1.go b/encoding/codecv1.go index e7d278c..9994302 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -439,7 +439,7 @@ func (d *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, // the former is used for identifying a batch in the contracts, // the latter is used in the public input to the provers. func (d *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - var dataBytes []byte + dataBytes := make([]byte, 0, len(chunks)*common.HashLength) totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore for _, chunk := range chunks { diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index b6216e3..ab4cb30 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -16,7 +16,7 @@ import ( func TestCodecV1BlockEncode(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) daBlockV0 := &daBlockV0{} encoded := hex.EncodeToString(daBlockV0.Encode()) @@ -59,7 +59,7 @@ func TestCodecV1BlockEncode(t *testing.T) { assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) // sanity check: v0 and v1 block encodings are identical for _, block := range []*Block{block2, block3, block4, block5, block6, block7} { @@ -77,7 +77,7 @@ func TestCodecV1BlockEncode(t *testing.T) { func TestCodecV1ChunkEncode(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) // chunk with a single empty block daBlock := &daBlockV0{} @@ -152,7 +152,7 @@ func TestCodecV1ChunkEncode(t *testing.T) { func TestCodecV1ChunkHash(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) // chunk with a single empty block daBlock := &daBlockV0{} @@ -238,7 +238,7 @@ func TestCodecV1ChunkHash(t *testing.T) { func TestCodecV1BatchEncode(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) // empty batch batch := &daBatchV1{ @@ -314,7 +314,7 @@ func TestCodecV1BatchEncode(t *testing.T) { func TestCodecV1BatchHash(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) // empty batch batch := &daBatchV1{ @@ -381,7 +381,7 @@ func TestCodecV1BatchHash(t *testing.T) { func TestCodecV1BatchDataHash(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -440,7 +440,7 @@ func TestCodecV1BatchDataHash(t *testing.T) { func TestCodecV1CalldataSizeEstimation(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -488,7 +488,7 @@ func TestCodecV1CalldataSizeEstimation(t *testing.T) { func TestCodecV1CommitGasEstimation(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -536,7 +536,7 @@ func TestCodecV1CommitGasEstimation(t *testing.T) { func TestCodecV1BatchSizeAndBlobSizeEstimation(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -593,7 +593,7 @@ func TestCodecV1BatchSizeAndBlobSizeEstimation(t *testing.T) { func TestCodecV1BatchL1MessagePopped(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -672,7 +672,7 @@ func TestCodecV1BatchL1MessagePopped(t *testing.T) { func TestCodecV1BlobEncodingAndHashing(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -755,7 +755,7 @@ func TestCodecV1BlobEncodingAndHashing(t *testing.T) { func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -831,7 +831,7 @@ func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { func TestCodecV1DecodeDAChunksRawTx(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") @@ -892,7 +892,7 @@ func TestCodecV1DecodeDAChunksRawTx(t *testing.T) { func TestCodecV1BatchStandardTestCases(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) - assert.NoError(t, err) + require.NoError(t, err) // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. nRowsData := maxEffectiveBlobBytes - (int(codecv1.MaxNumChunksPerBatch())*4 + 2) diff --git a/encoding/codecv2.go b/encoding/codecv2.go index d066038..38cc54b 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -257,10 +257,10 @@ func (d *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, return uint64(len(batchBytes)), calculatePaddedBlobSize(uint64(len(blobBytes))), nil } -// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func (d *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) +// checkCompressedDataCompatibility checks the compressed data compatibility for a batch's chunks. +// It constructs a batch payload, compresses the data, and checks the compressed data compatibility. +func (d *DACodecV2) checkCompressedDataCompatibility(chunks []*Chunk) (bool, error) { + batchBytes, err := constructBatchPayloadInBlob(chunks, d) if err != nil { return false, fmt.Errorf("failed to construct batch payload in blob: %w", err) } @@ -273,30 +273,20 @@ func (d *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error return true, nil } if err = checkCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + log.Warn("Compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } return true, nil } +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. +func (d *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + return d.checkCompressedDataCompatibility([]*Chunk{c}) +} + // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. func (d *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) - if err != nil { - return false, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if len(batchBytes) <= minCompressedDataCheckSize { - return true, nil - } - if err = checkCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil + return d.checkCompressedDataCompatibility(b.Chunks) } diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 063d168..887469c 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -16,7 +16,7 @@ import ( func TestCodecV2BlockEncode(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) block := &daBlockV0{} encoded := hex.EncodeToString(block.Encode()) @@ -59,7 +59,7 @@ func TestCodecV2BlockEncode(t *testing.T) { assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) // sanity check: v0 and v2 block encodings are identical for _, trace := range []*Block{block2, block3, block4, block5, block6, block7} { @@ -77,7 +77,7 @@ func TestCodecV2BlockEncode(t *testing.T) { func TestCodecV2ChunkEncode(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) // chunk with a single empty block daBlock := &daBlockV0{} @@ -152,7 +152,7 @@ func TestCodecV2ChunkEncode(t *testing.T) { func TestCodecV2ChunkHash(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) // chunk with a single empty block daBlock := &daBlockV0{} @@ -238,7 +238,7 @@ func TestCodecV2ChunkHash(t *testing.T) { func TestCodecV2BatchEncode(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) // empty batch batch := &daBatchV1{ @@ -314,7 +314,7 @@ func TestCodecV2BatchEncode(t *testing.T) { func TestCodecV2BatchHash(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) // empty batch batch := &daBatchV1{ @@ -381,7 +381,7 @@ func TestCodecV2BatchHash(t *testing.T) { func TestCodecV2BatchDataHash(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -440,7 +440,7 @@ func TestCodecV2BatchDataHash(t *testing.T) { func TestCodecV2CalldataSizeEstimation(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -488,7 +488,7 @@ func TestCodecV2CalldataSizeEstimation(t *testing.T) { func TestCodecV2CommitGasEstimation(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -536,7 +536,7 @@ func TestCodecV2CommitGasEstimation(t *testing.T) { func TestCodecV2BatchSizeAndBlobSizeEstimation(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -593,7 +593,7 @@ func TestCodecV2BatchSizeAndBlobSizeEstimation(t *testing.T) { func TestCodecV2BatchL1MessagePopped(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -672,7 +672,7 @@ func TestCodecV2BatchL1MessagePopped(t *testing.T) { func TestCodecV2BlobEncodingAndHashing(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -749,7 +749,7 @@ func TestCodecV2BlobEncodingAndHashing(t *testing.T) { func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -825,7 +825,7 @@ func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { func TestCodecV2DecodeDAChunksRawTx(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") @@ -886,7 +886,7 @@ func TestCodecV2DecodeDAChunksRawTx(t *testing.T) { func TestCodecV2BatchStandardTestCases(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) - assert.NoError(t, err) + require.NoError(t, err) // Taking into consideration compression, we allow up to 5x of max blob bytes. // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 0205e00..d5af43b 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -17,7 +17,7 @@ import ( func TestCodecV3BlockEncode(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) block := &daBlockV0{} encoded := hex.EncodeToString(block.Encode()) @@ -60,7 +60,7 @@ func TestCodecV3BlockEncode(t *testing.T) { assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) // sanity check: v0 and v3 block encodings are identical for _, trace := range []*Block{block2, block3, block4, block5, block6, block7} { @@ -78,7 +78,7 @@ func TestCodecV3BlockEncode(t *testing.T) { func TestCodecV3ChunkEncode(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) // chunk with a single empty block daBlock := &daBlockV0{} @@ -153,7 +153,7 @@ func TestCodecV3ChunkEncode(t *testing.T) { func TestCodecV3ChunkHash(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) // chunk with a single empty block daBlock := &daBlockV0{} @@ -239,7 +239,7 @@ func TestCodecV3ChunkHash(t *testing.T) { func TestCodecV3BatchEncode(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) // empty daBatch daBatchV3 := &daBatchV3{ @@ -315,7 +315,7 @@ func TestCodecV3BatchEncode(t *testing.T) { func TestCodecV3BatchHash(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) // empty daBatch daBatchV3 := &daBatchV3{ @@ -382,7 +382,7 @@ func TestCodecV3BatchHash(t *testing.T) { func TestCodecV3BatchDataHash(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -581,7 +581,7 @@ func TestCodecV3DABatchJSONMarshalUnmarshal(t *testing.T) { func TestCodecV3CalldataSizeEstimation(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -629,7 +629,7 @@ func TestCodecV3CalldataSizeEstimation(t *testing.T) { func TestCodecV3CommitGasEstimation(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -677,7 +677,7 @@ func TestCodecV3CommitGasEstimation(t *testing.T) { func TestCodecV3BatchSizeAndBlobSizeEstimation(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -734,7 +734,7 @@ func TestCodecV3BatchSizeAndBlobSizeEstimation(t *testing.T) { func TestCodecV3BatchL1MessagePopped(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -813,7 +813,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { func TestCodecV3BlobEncodingAndHashing(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -890,7 +890,7 @@ func TestCodecV3BlobEncodingAndHashing(t *testing.T) { func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -966,7 +966,7 @@ func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { func TestCodecV3DecodeDAChunksRawTx(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") @@ -1027,7 +1027,7 @@ func TestCodecV3DecodeDAChunksRawTx(t *testing.T) { func TestCodecV3BatchStandardTestCases(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) - assert.NoError(t, err) + require.NoError(t, err) // Taking into consideration compression, we allow up to 5x of max blob bytes. // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 38b1243..190b1d7 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -295,7 +295,7 @@ func (d *DACodecV4) checkCompressedDataCompatibility(chunks []*Chunk) (bool, err return false, fmt.Errorf("failed to compress scroll batch bytes: %w", err) } if err = checkCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("Compressed data compatibility check failed", "err", err) + log.Warn("Compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } return true, nil diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 6c34ffd..feae510 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -17,7 +17,7 @@ import ( func TestCodecV4BlockEncode(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) block := &daBlockV0{} encoded := hex.EncodeToString(block.Encode()) @@ -60,7 +60,7 @@ func TestCodecV4BlockEncode(t *testing.T) { assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) codecv0, err := CodecFromVersion(CodecV0) - assert.NoError(t, err) + require.NoError(t, err) // sanity check: v0 and v4 block encodings are identical for _, trace := range []*Block{block2, block3, block4, block5, block6, block7} { @@ -78,7 +78,7 @@ func TestCodecV4BlockEncode(t *testing.T) { func TestCodecV4ChunkEncode(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) // chunk with a single empty block daBlock := &daBlockV0{} @@ -153,7 +153,7 @@ func TestCodecV4ChunkEncode(t *testing.T) { func TestCodecV4ChunkHash(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) // chunk with a single empty block daBlock := &daBlockV0{} @@ -239,7 +239,7 @@ func TestCodecV4ChunkHash(t *testing.T) { func TestCodecV4BatchEncode(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) // empty daBatch daBatchV3 := &daBatchV3{ @@ -315,7 +315,7 @@ func TestCodecV4BatchEncode(t *testing.T) { func TestCodecV4BatchHash(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) // empty daBatch daBatchV3 := &daBatchV3{ @@ -382,7 +382,7 @@ func TestCodecV4BatchHash(t *testing.T) { func TestCodecV4BatchDataHash(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -581,7 +581,7 @@ func TestCodecV4DABatchJSONMarshalUnmarshal(t *testing.T) { func TestCodecV4CalldataSizeEstimation(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -629,7 +629,7 @@ func TestCodecV4CalldataSizeEstimation(t *testing.T) { func TestCodecV4CommitGasEstimation(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -677,7 +677,7 @@ func TestCodecV4CommitGasEstimation(t *testing.T) { func TestCodecV4BatchSizeAndBlobSizeEstimation(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -734,7 +734,7 @@ func TestCodecV4BatchSizeAndBlobSizeEstimation(t *testing.T) { func TestCodecV4BatchL1MessagePopped(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -813,7 +813,7 @@ func TestCodecV4BatchL1MessagePopped(t *testing.T) { func TestCodecV4BlobEncodingAndHashing(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -890,7 +890,7 @@ func TestCodecV4BlobEncodingAndHashing(t *testing.T) { func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} @@ -966,7 +966,7 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { func TestCodecV4DecodeDAChunksRawTx(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") @@ -1027,7 +1027,7 @@ func TestCodecV4DecodeDAChunksRawTx(t *testing.T) { func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) // Taking into consideration compression, we allow up to 5x of max blob bytes minus 1 byte for the compression flag. // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. @@ -1175,7 +1175,7 @@ func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) { func TestCodecV4BatchStandardTestCasesDisableCompression(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) - assert.NoError(t, err) + require.NoError(t, err) // Taking into consideration disabling compression, we allow up to max effective blob bytes. // We then ignore the metadata rows for MaxNumChunksPerBatch chunks, plus 1 byte for the compression flag. From 1eea8ce495425234170d521a10845762f291dfd1 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 15:20:12 +0800 Subject: [PATCH 110/126] remove some constants --- encoding/codecv0.go | 28 +++++++++++------------- encoding/codecv0_types.go | 18 +++++++-------- encoding/codecv1.go | 34 ++++++++++++++--------------- encoding/codecv1_types.go | 20 ++++++++--------- encoding/codecv2.go | 34 ++++++++++++++--------------- encoding/codecv3.go | 44 ++++++++++++++++++------------------- encoding/codecv3_types.go | 24 ++++++++++---------- encoding/codecv4.go | 46 +++++++++++++++++++-------------------- encoding/da.go | 36 ++++++++++++++++++++++++++++++ 9 files changed, 155 insertions(+), 129 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 14e7bd9..ff3f056 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -204,25 +204,23 @@ func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchFromBytes decodes the given byte slice into a DABatch. func (d *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { - if len(data) < 89 { - return nil, fmt.Errorf("insufficient data for DABatch, expected at least 89 bytes but got %d", len(data)) + if len(data) < daBatchV0EncodedMinLength { + return nil, fmt.Errorf("insufficient data for DABatch, expected at least %d bytes but got %d", daBatchV0EncodedMinLength, len(data)) } - if CodecVersion(data[0]) != CodecV0 { - return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV0, data[0]) + if CodecVersion(data[daBatchOffsetVersion]) != CodecV0 { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV0, data[daBatchOffsetVersion]) } - b := newDABatchV0( - data[0], // version - binary.BigEndian.Uint64(data[1:9]), // batchIndex - binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped - binary.BigEndian.Uint64(data[17:25]), // totalL1MessagePopped - common.BytesToHash(data[25:57]), // dataHash - common.BytesToHash(data[57:89]), // parentBatchHash - data[89:], // skippedL1MessageBitmap - ) - - return b, nil + return newDABatchV0( + data[daBatchOffsetVersion], // version + binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV0OffsetL1MessagePopped]), // batchIndex + binary.BigEndian.Uint64(data[daBatchV0OffsetL1MessagePopped:daBatchV0OffsetTotalL1MessagePopped]), // l1MessagePopped + binary.BigEndian.Uint64(data[daBatchV0OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped + common.BytesToHash(data[daBatchOffsetDataHash:daBatchV0OffsetParentBatchHash]), // dataHash + common.BytesToHash(data[daBatchV0OffsetParentBatchHash:daBatchV0OffsetSkippedL1MessageBitmap]), // parentBatchHash + data[daBatchV0OffsetSkippedL1MessageBitmap:], // skippedL1MessageBitmap + ), nil } // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index b374909..9a37b29 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -226,16 +226,16 @@ func newDABatchV0(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopp } } -// Encode serializes the DABatch into bytes. +// Encode serializes the DABatchV0 into bytes. func (b *daBatchV0) Encode() []byte { - batchBytes := make([]byte, 89+len(b.skippedL1MessageBitmap)) - batchBytes[0] = b.version - binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped) - copy(batchBytes[25:], b.dataHash[:]) - copy(batchBytes[57:], b.parentBatchHash[:]) - copy(batchBytes[89:], b.skippedL1MessageBitmap[:]) + batchBytes := make([]byte, daBatchV0OffsetSkippedL1MessageBitmap+len(b.skippedL1MessageBitmap)) + batchBytes[daBatchOffsetVersion] = b.version + binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV0OffsetL1MessagePopped], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[daBatchV0OffsetL1MessagePopped:daBatchV0OffsetTotalL1MessagePopped], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[daBatchV0OffsetTotalL1MessagePopped:daBatchOffsetDataHash], b.totalL1MessagePopped) + copy(batchBytes[daBatchOffsetDataHash:daBatchV0OffsetParentBatchHash], b.dataHash[:]) + copy(batchBytes[daBatchV0OffsetParentBatchHash:daBatchV0OffsetSkippedL1MessageBitmap], b.parentBatchHash[:]) + copy(batchBytes[daBatchV0OffsetSkippedL1MessageBitmap:], b.skippedL1MessageBitmap[:]) return batchBytes } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 9994302..b321219 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -241,28 +241,26 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. func (d *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { - if len(data) < 121 { - return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) + if len(data) < daBatchV1EncodedMinLength { + return nil, fmt.Errorf("insufficient data for DABatch, expected at least %d bytes but got %d", daBatchV1EncodedMinLength, len(data)) } - if CodecVersion(data[0]) != CodecV1 { - return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV1, data[0]) + if CodecVersion(data[daBatchOffsetVersion]) != CodecV1 { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV1, data[daBatchOffsetVersion]) } - b := newDABatchV1( - data[0], // version - binary.BigEndian.Uint64(data[1:9]), // batchIndex - binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped - binary.BigEndian.Uint64(data[17:25]), // totalL1MessagePopped - common.BytesToHash(data[25:57]), // dataHash - common.BytesToHash(data[57:89]), // blobVersionedHash - common.BytesToHash(data[89:121]), // parentBatchHash - data[121:], // skippedL1MessageBitmap - nil, // blob - nil, // z - ) - - return b, nil + return newDABatchV1( + data[daBatchOffsetVersion], // version + binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV1OffsetL1MessagePopped]), // batchIndex + binary.BigEndian.Uint64(data[daBatchV1OffsetL1MessagePopped:daBatchV1OffsetTotalL1MessagePopped]), // l1MessagePopped + binary.BigEndian.Uint64(data[daBatchV1OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped + common.BytesToHash(data[daBatchOffsetDataHash:daBatchV1OffsetBlobVersionedHash]), // dataHash + common.BytesToHash(data[daBatchV1OffsetBlobVersionedHash:daBatchV1OffsetParentBatchHash]), // blobVersionedHash + common.BytesToHash(data[daBatchV1OffsetParentBatchHash:daBatchV1OffsetSkippedL1MessageBitmap]), // parentBatchHash + data[daBatchV1OffsetSkippedL1MessageBitmap:], // skippedL1MessageBitmap + nil, // blob + nil, // z + ), nil } func (d *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go index 4b5a899..7a7e6a2 100644 --- a/encoding/codecv1_types.go +++ b/encoding/codecv1_types.go @@ -102,17 +102,17 @@ func newDABatchV1(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopp } } -// Encode serializes the DABatch into bytes. +// Encode serializes the DABatchV1 into bytes. func (b *daBatchV1) Encode() []byte { - batchBytes := make([]byte, 121+len(b.skippedL1MessageBitmap)) - batchBytes[0] = b.version - binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped) - copy(batchBytes[25:], b.dataHash[:]) - copy(batchBytes[57:], b.blobVersionedHash[:]) - copy(batchBytes[89:], b.parentBatchHash[:]) - copy(batchBytes[121:], b.skippedL1MessageBitmap[:]) + batchBytes := make([]byte, daBatchV1OffsetSkippedL1MessageBitmap+len(b.skippedL1MessageBitmap)) + batchBytes[daBatchOffsetVersion] = b.version + binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV1OffsetL1MessagePopped], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[daBatchV1OffsetL1MessagePopped:daBatchV1OffsetTotalL1MessagePopped], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[daBatchV1OffsetTotalL1MessagePopped:daBatchOffsetDataHash], b.totalL1MessagePopped) + copy(batchBytes[daBatchOffsetDataHash:daBatchV1OffsetBlobVersionedHash], b.dataHash[:]) + copy(batchBytes[daBatchV1OffsetBlobVersionedHash:daBatchV1OffsetParentBatchHash], b.blobVersionedHash[:]) + copy(batchBytes[daBatchV1OffsetParentBatchHash:daBatchV1OffsetSkippedL1MessageBitmap], b.parentBatchHash[:]) + copy(batchBytes[daBatchV1OffsetSkippedL1MessageBitmap:], b.skippedL1MessageBitmap[:]) return batchBytes } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 38cc54b..49b6159 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -207,28 +207,26 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. func (d *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { - if len(data) < 121 { - return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) + if len(data) < daBatchV1EncodedMinLength { + return nil, fmt.Errorf("insufficient data for DABatch, expected at least %d bytes but got %d", daBatchV1EncodedMinLength, len(data)) } - if CodecVersion(data[0]) != CodecV2 { - return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV2, data[0]) + if CodecVersion(data[daBatchOffsetVersion]) != CodecV2 { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV2, data[daBatchOffsetVersion]) } - b := newDABatchV1( - data[0], // version - binary.BigEndian.Uint64(data[1:9]), // batchIndex - binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped - binary.BigEndian.Uint64(data[17:25]), // totalL1MessagePopped - common.BytesToHash(data[25:57]), // dataHash - common.BytesToHash(data[57:89]), // blobVersionedHash - common.BytesToHash(data[89:121]), // parentBatchHash - data[121:], // skippedL1MessageBitmap - nil, // blob - nil, // z - ) - - return b, nil + return newDABatchV1( + data[daBatchOffsetVersion], // version + binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV1OffsetL1MessagePopped]), // batchIndex + binary.BigEndian.Uint64(data[daBatchV1OffsetL1MessagePopped:daBatchV1OffsetTotalL1MessagePopped]), // l1MessagePopped + binary.BigEndian.Uint64(data[daBatchV1OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped + common.BytesToHash(data[daBatchOffsetDataHash:daBatchV1OffsetBlobVersionedHash]), // dataHash + common.BytesToHash(data[daBatchV1OffsetBlobVersionedHash:daBatchV1OffsetParentBatchHash]), // blobVersionedHash + common.BytesToHash(data[daBatchV1OffsetParentBatchHash:daBatchV1OffsetSkippedL1MessageBitmap]), // parentBatchHash + data[daBatchV1OffsetSkippedL1MessageBitmap:], // skippedL1MessageBitmap + nil, // blob + nil, // z + ), nil } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. diff --git a/encoding/codecv3.go b/encoding/codecv3.go index a2f5c2e..5aaa834 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -79,34 +79,32 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { - if len(data) != 193 { - return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) + if len(data) != daBatchV3EncodedLength { + return nil, fmt.Errorf("invalid data length for DABatch, expected %d bytes but got %d", daBatchV3EncodedLength, len(data)) } - if CodecVersion(data[0]) != CodecV3 { - return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV3, data[0]) + if CodecVersion(data[daBatchOffsetVersion]) != CodecV3 { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV3, data[daBatchOffsetVersion]) } - b := newDABatchV3WithProof( - data[0], // Version - binary.BigEndian.Uint64(data[1:9]), // BatchIndex - binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped - binary.BigEndian.Uint64(data[17:25]), // TotalL1MessagePopped - binary.BigEndian.Uint64(data[121:129]), // LastBlockTimestamp - common.BytesToHash(data[25:57]), // DataHash - common.BytesToHash(data[89:121]), // ParentBatchHash - common.BytesToHash(data[57:89]), // BlobVersionedHash - nil, // skippedL1MessageBitmap - nil, // blob - nil, // z - nil, // blobBytes - [2]common.Hash{ // BlobDataProof - common.BytesToHash(data[129:161]), - common.BytesToHash(data[161:193]), + return newDABatchV3WithProof( + data[daBatchOffsetVersion], // version + binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV3OffsetL1MessagePopped]), // batchIndex + binary.BigEndian.Uint64(data[daBatchV3OffsetL1MessagePopped:daBatchV3OffsetTotalL1MessagePopped]), // l1MessagePopped + binary.BigEndian.Uint64(data[daBatchV3OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped + binary.BigEndian.Uint64(data[daBatchV3OffsetLastBlockTimestamp:daBatchV3OffsetBlobDataProof]), // lastBlockTimestamp + common.BytesToHash(data[daBatchOffsetDataHash:daBatchV3OffsetBlobVersionedHash]), // dataHash + common.BytesToHash(data[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp]), // parentBatchHash + common.BytesToHash(data[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash]), // blobVersionedHash + nil, // skippedL1MessageBitmap + nil, // blob + nil, // z + nil, // blobBytes + [2]common.Hash{ // blobDataProof + common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointLength]), + common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointLength : daBatchV3EncodedLength]), }, - ) - - return b, nil + ), nil } // estimateChunkL1CommitGasWithoutPointEvaluation calculates the total L1 commit gas without point-evaluation for this chunk approximately. diff --git a/encoding/codecv3_types.go b/encoding/codecv3_types.go index 328f34d..0cebf10 100644 --- a/encoding/codecv3_types.go +++ b/encoding/codecv3_types.go @@ -80,19 +80,19 @@ func newDABatchV3WithProof(version uint8, batchIndex, l1MessagePopped, totalL1Me } } -// Encode serializes the DABatch into bytes. +// Encode serializes the DABatchV3 into bytes. func (b *daBatchV3) Encode() []byte { - batchBytes := make([]byte, 193) - batchBytes[0] = b.version - binary.BigEndian.PutUint64(batchBytes[1:9], b.batchIndex) - binary.BigEndian.PutUint64(batchBytes[9:17], b.l1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:25], b.totalL1MessagePopped) - copy(batchBytes[25:57], b.dataHash[:]) - copy(batchBytes[57:89], b.blobVersionedHash[:]) - copy(batchBytes[89:121], b.parentBatchHash[:]) - binary.BigEndian.PutUint64(batchBytes[121:129], b.lastBlockTimestamp) - copy(batchBytes[129:161], b.blobDataProof[0].Bytes()) - copy(batchBytes[161:193], b.blobDataProof[1].Bytes()) + batchBytes := make([]byte, daBatchV3EncodedLength) + batchBytes[daBatchOffsetVersion] = b.version + binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV3OffsetL1MessagePopped], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[daBatchV3OffsetL1MessagePopped:daBatchV3OffsetTotalL1MessagePopped], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[daBatchV3OffsetTotalL1MessagePopped:daBatchOffsetDataHash], b.totalL1MessagePopped) + copy(batchBytes[daBatchOffsetDataHash:daBatchV3OffsetBlobVersionedHash], b.dataHash[:]) + copy(batchBytes[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash], b.blobVersionedHash[:]) + copy(batchBytes[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp], b.parentBatchHash[:]) + binary.BigEndian.PutUint64(batchBytes[daBatchV3OffsetLastBlockTimestamp:daBatchV3OffsetBlobDataProof], b.lastBlockTimestamp) + copy(batchBytes[daBatchV3OffsetBlobDataProof:daBatchV3OffsetBlobDataProof+kzgPointLength], b.blobDataProof[0].Bytes()) + copy(batchBytes[daBatchV3OffsetBlobDataProof+kzgPointLength:daBatchV3EncodedLength], b.blobDataProof[1].Bytes()) return batchBytes } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 190b1d7..cd8d526 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -106,36 +106,34 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // NewDABatchFromBytes decodes the given byte slice into a DABatch. -// Note: This function only populates the batch header, it leaves the blob-related fields empty. +// Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { - if len(data) != 193 { - return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) + if len(data) != daBatchV3EncodedLength { + return nil, fmt.Errorf("invalid data length for DABatch, expected %d bytes but got %d", daBatchV3EncodedLength, len(data)) } - if CodecVersion(data[0]) != CodecV4 { - return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV4, data[0]) + if CodecVersion(data[daBatchOffsetVersion]) != CodecV4 { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV4, data[daBatchOffsetVersion]) } - b := newDABatchV3WithProof( - data[0], // Version - binary.BigEndian.Uint64(data[1:9]), // BatchIndex - binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped - binary.BigEndian.Uint64(data[17:25]), // TotalL1MessagePopped - binary.BigEndian.Uint64(data[121:129]), // LastBlockTimestamp - common.BytesToHash(data[25:57]), // DataHash - common.BytesToHash(data[89:121]), // ParentBatchHash - common.BytesToHash(data[57:89]), // BlobVersionedHash - nil, // skippedL1MessageBitmap - nil, // blob - nil, // z - nil, // blobBytes - [2]common.Hash{ // BlobDataProof - common.BytesToHash(data[129:161]), - common.BytesToHash(data[161:193]), + return newDABatchV3WithProof( + data[daBatchOffsetVersion], // version + binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV3OffsetL1MessagePopped]), // batchIndex + binary.BigEndian.Uint64(data[daBatchV3OffsetL1MessagePopped:daBatchV3OffsetTotalL1MessagePopped]), // l1MessagePopped + binary.BigEndian.Uint64(data[daBatchV3OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped + binary.BigEndian.Uint64(data[daBatchV3OffsetLastBlockTimestamp:daBatchV3OffsetBlobDataProof]), // lastBlockTimestamp + common.BytesToHash(data[daBatchOffsetDataHash:daBatchV3OffsetBlobVersionedHash]), // dataHash + common.BytesToHash(data[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp]), // parentBatchHash + common.BytesToHash(data[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash]), // blobVersionedHash + nil, // skippedL1MessageBitmap + nil, // blob + nil, // z + nil, // blobBytes + [2]common.Hash{ // blobDataProof + common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointLength]), + common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointLength : daBatchV3EncodedLength]), }, - ) - - return b, nil + ), nil } // constructBlobPayload constructs the 4844 blob payload. diff --git a/encoding/da.go b/encoding/da.go index 090b213..11cb62d 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -38,6 +38,42 @@ const maxEffectiveBlobBytes = maxBlobBytes / 32 * 31 // only used in codecv2 and codecv3. const minCompressedDataCheckSize = 131072 +// kzgPointLength is the length of a KZG point (z and y) in bytes. +const kzgPointLength = 32 + +const ( + daBatchOffsetVersion = 0 + daBatchOffsetBatchIndex = 1 + daBatchOffsetDataHash = 25 +) + +const ( + daBatchV0OffsetL1MessagePopped = 9 + daBatchV0OffsetTotalL1MessagePopped = 17 + daBatchV0OffsetParentBatchHash = 57 + daBatchV0OffsetSkippedL1MessageBitmap = 89 + daBatchV0EncodedMinLength = 89 // min length of a v0 da batch, when there are no skipped L1 messages +) + +const ( + daBatchV1OffsetL1MessagePopped = 9 + daBatchV1OffsetTotalL1MessagePopped = 17 + daBatchV1OffsetBlobVersionedHash = 57 + daBatchV1OffsetParentBatchHash = 89 + daBatchV1OffsetSkippedL1MessageBitmap = 121 + daBatchV1EncodedMinLength = 121 // min length of a v1 da batch, when there are no skipped L1 messages +) + +const ( + daBatchV3OffsetL1MessagePopped = 9 + daBatchV3OffsetTotalL1MessagePopped = 17 + daBatchV3OffsetBlobVersionedHash = 57 + daBatchV3OffsetParentBatchHash = 89 + daBatchV3OffsetLastBlockTimestamp = 121 + daBatchV3OffsetBlobDataProof = 129 + daBatchV3EncodedLength = 193 +) + // Block represents an L2 block. type Block struct { Header *types.Header From 1452109ca1d452134c7f53bf53a97d8387802d67 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 15:41:27 +0800 Subject: [PATCH 111/126] address comments --- encoding/codecv0.go | 4 ++-- encoding/codecv1.go | 12 ++++++------ encoding/codecv1_test.go | 4 ++-- encoding/codecv2.go | 14 ++++++-------- encoding/codecv2_test.go | 4 ++-- encoding/codecv3.go | 4 ++-- encoding/codecv3_test.go | 4 ++-- encoding/codecv4.go | 11 +++++------ encoding/codecv4_test.go | 8 ++++---- encoding/da.go | 3 +++ encoding/da_test.go | 3 +-- encoding/interfaces.go | 9 +++++---- 12 files changed, 40 insertions(+), 40 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index ff3f056..28105ce 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -23,7 +23,7 @@ func (d *DACodecV0) Version() CodecVersion { } // MaxNumChunksPerBatch returns the maximum number of chunks per batch. -func (d *DACodecV0) MaxNumChunksPerBatch() uint64 { +func (d *DACodecV0) MaxNumChunksPerBatch() int { return codecv0MaxNumChunks } @@ -164,7 +164,7 @@ func (d *DACodecV0) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx // NewDABatch creates a DABatch from the provided Batch. func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { + if len(batch.Chunks) > d.MaxNumChunksPerBatch() { return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index b321219..d091b08 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -96,13 +96,13 @@ func (d *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks func (d *DACodecV1) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { batchBytes := bytesFromBlobCanonical(blob) - return decodeTxsFromBytes(batchBytes[:], chunks, int(d.MaxNumChunksPerBatch())) + return decodeTxsFromBytes(batchBytes[:], chunks, d.MaxNumChunksPerBatch()) } // NewDABatch creates a DABatch from the provided Batch. func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { + if len(batch.Chunks) > d.MaxNumChunksPerBatch() { return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } @@ -123,7 +123,7 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := d.constructBlobPayload(batch.Chunks, int(d.MaxNumChunksPerBatch()), false /* no mock */) + blob, blobVersionedHash, z, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), false /* no mock */) if err != nil { return nil, err } @@ -197,7 +197,7 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than MaxNumChunksPerBatch chunks, the rest + // if we have fewer than maxNumChunksPerBatch chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage for chunkID := len(chunks); chunkID < maxNumChunksPerBatch; chunkID++ { @@ -408,7 +408,7 @@ func (d *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. func (d *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - metadataSize := 2 + 4*d.MaxNumChunksPerBatch() + metadataSize := uint64(2 + 4*d.MaxNumChunksPerBatch()) batchDataSize, err := d.chunkL1CommitBlobDataSize(c) if err != nil { return 0, 0, err @@ -419,7 +419,7 @@ func (d *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a batch. func (d *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - metadataSize := 2 + 4*d.MaxNumChunksPerBatch() + metadataSize := uint64(2 + 4*d.MaxNumChunksPerBatch()) var batchDataSize uint64 for _, c := range b.Chunks { chunkDataSize, err := d.chunkL1CommitBlobDataSize(c) diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index ab4cb30..8bb4015 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -895,7 +895,7 @@ func TestCodecV1BatchStandardTestCases(t *testing.T) { require.NoError(t, err) // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. - nRowsData := maxEffectiveBlobBytes - (int(codecv1.MaxNumChunksPerBatch())*4 + 2) + nRowsData := maxEffectiveBlobBytes - (codecv1.MaxNumChunksPerBatch()*4 + 2) repeat := func(element byte, count int) string { result := make([]byte, 0, count) @@ -953,7 +953,7 @@ func TestCodecV1BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := codecv1.(*DACodecV1).constructBlobPayload(chunks, int(codecv1.MaxNumChunksPerBatch()), true /* use mock */) + blob, blobVersionedHash, z, err := codecv1.(*DACodecV1).constructBlobPayload(chunks, codecv1.MaxNumChunksPerBatch(), true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 49b6159..d3bd2cf 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -30,26 +30,24 @@ func (d *DACodecV2) Version() CodecVersion { } // MaxNumChunksPerBatch returns the maximum number of chunks per batch. -func (d *DACodecV2) MaxNumChunksPerBatch() uint64 { +func (d *DACodecV2) MaxNumChunksPerBatch() int { return codecv2MaxNumChunks } // DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks func (d *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { compressedBytes := bytesFromBlobCanonical(blob) - magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - - batchBytes, err := decompressScrollBlobToBatch(append(magics, compressedBytes[:]...)) + batchBytes, err := decompressScrollBlobToBatch(append(zstdMagicNumber, compressedBytes[:]...)) if err != nil { return err } - return decodeTxsFromBytes(batchBytes, chunks, int(d.MaxNumChunksPerBatch())) + return decodeTxsFromBytes(batchBytes, chunks, d.MaxNumChunksPerBatch()) } // NewDABatch creates a DABatch from the provided Batch. func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { + if len(batch.Chunks) > d.MaxNumChunksPerBatch() { return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } @@ -70,7 +68,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, _, err := d.constructBlobPayload(batch.Chunks, int(d.MaxNumChunksPerBatch()), false /* no mock */) + blob, blobVersionedHash, z, _, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), false /* no mock */) if err != nil { return nil, err } @@ -143,7 +141,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than MaxNumChunksPerBatch chunks, the rest + // if we have fewer than maxNumChunksPerBatch chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage for chunkID := len(chunks); chunkID < maxNumChunksPerBatch; chunkID++ { diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 887469c..d7e6c2e 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -890,7 +890,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { // Taking into consideration compression, we allow up to 5x of max blob bytes. // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. - nRowsData := 5*maxEffectiveBlobBytes - (int(codecv2.MaxNumChunksPerBatch())*4 + 2) + nRowsData := 5*maxEffectiveBlobBytes - (codecv2.MaxNumChunksPerBatch()*4 + 2) repeat := func(element byte, count int) string { result := make([]byte, 0, count) @@ -994,7 +994,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, _, err := codecv2.(*DACodecV2).constructBlobPayload(chunks, int(codecv2.MaxNumChunksPerBatch()), true /* use mock */) + blob, blobVersionedHash, z, _, err := codecv2.(*DACodecV2).constructBlobPayload(chunks, codecv2.MaxNumChunksPerBatch(), true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 5aaa834..f366107 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -22,7 +22,7 @@ func (d *DACodecV3) Version() CodecVersion { // NewDABatch creates a DABatch from the provided Batch. func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { + if len(batch.Chunks) > d.MaxNumChunksPerBatch() { return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } @@ -47,7 +47,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, int(d.MaxNumChunksPerBatch()), false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), false /* no mock */) if err != nil { return nil, err } diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index d5af43b..c202f11 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -1031,7 +1031,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { // Taking into consideration compression, we allow up to 5x of max blob bytes. // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. - nRowsData := 5*maxEffectiveBlobBytes - (int(codecv3.MaxNumChunksPerBatch())*4 + 2) + nRowsData := 5*maxEffectiveBlobBytes - (codecv3.MaxNumChunksPerBatch()*4 + 2) repeat := func(element byte, count int) string { result := make([]byte, 0, count) @@ -1135,7 +1135,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, _, err := codecv3.(*DACodecV3).constructBlobPayload(chunks, int(codecv3.MaxNumChunksPerBatch()), true /* use mock */) + blob, blobVersionedHash, z, _, err := codecv3.(*DACodecV3).constructBlobPayload(chunks, codecv3.MaxNumChunksPerBatch(), true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index cd8d526..0d3c7ce 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -32,21 +32,20 @@ func (d *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx // if first byte is 1 - data compressed, 0 - not compressed if rawBytes[0] == 0x1 { - magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - batchBytes, err := decompressScrollBlobToBatch(append(magics, rawBytes[1:]...)) + batchBytes, err := decompressScrollBlobToBatch(append(zstdMagicNumber, rawBytes[1:]...)) if err != nil { return err } - return decodeTxsFromBytes(batchBytes, chunks, int(d.MaxNumChunksPerBatch())) + return decodeTxsFromBytes(batchBytes, chunks, d.MaxNumChunksPerBatch()) } else { - return decodeTxsFromBytes(rawBytes[1:], chunks, int(d.MaxNumChunksPerBatch())) + return decodeTxsFromBytes(rawBytes[1:], chunks, d.MaxNumChunksPerBatch()) } } // NewDABatch creates a DABatch from the provided Batch. func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > int(d.MaxNumChunksPerBatch()) { + if len(batch.Chunks) > d.MaxNumChunksPerBatch() { return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) } @@ -76,7 +75,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, int(d.MaxNumChunksPerBatch()), enableCompression, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), enableCompression, false /* no mock */) if err != nil { return nil, err } diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index feae510..376d157 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -1031,7 +1031,7 @@ func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) { // Taking into consideration compression, we allow up to 5x of max blob bytes minus 1 byte for the compression flag. // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. - nRowsData := 5*(maxEffectiveBlobBytes-1) - (int(codecv4.MaxNumChunksPerBatch())*4 + 2) + nRowsData := 5*(maxEffectiveBlobBytes-1) - (codecv4.MaxNumChunksPerBatch()*4 + 2) repeat := func(element byte, count int) string { result := make([]byte, 0, count) @@ -1135,7 +1135,7 @@ func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, int(codecv4.MaxNumChunksPerBatch()), true /* enable encode */, true /* use mock */) + blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), true /* enable encode */, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -1179,7 +1179,7 @@ func TestCodecV4BatchStandardTestCasesDisableCompression(t *testing.T) { // Taking into consideration disabling compression, we allow up to max effective blob bytes. // We then ignore the metadata rows for MaxNumChunksPerBatch chunks, plus 1 byte for the compression flag. - nRowsData := maxEffectiveBlobBytes - (int(codecv4.MaxNumChunksPerBatch())*4 + 2) - 1 + nRowsData := maxEffectiveBlobBytes - (codecv4.MaxNumChunksPerBatch()*4 + 2) - 1 repeat := func(element byte, count int) string { result := make([]byte, 0, count) @@ -1283,7 +1283,7 @@ func TestCodecV4BatchStandardTestCasesDisableCompression(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, int(codecv4.MaxNumChunksPerBatch()), false /* disable encode */, true /* use mock */) + blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), false /* disable encode */, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/da.go b/encoding/da.go index 11cb62d..bfcbe78 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -41,6 +41,9 @@ const minCompressedDataCheckSize = 131072 // kzgPointLength is the length of a KZG point (z and y) in bytes. const kzgPointLength = 32 +// zstdMagicNumber is the magic number for zstd compressed data header. +var zstdMagicNumber = []byte{0x28, 0xb5, 0x2f, 0xfd} + const ( daBatchOffsetVersion = 0 daBatchOffsetBatchIndex = 1 diff --git a/encoding/da_test.go b/encoding/da_test.go index f4e7470..662a127 100644 --- a/encoding/da_test.go +++ b/encoding/da_test.go @@ -128,8 +128,7 @@ func TestBlobCompressDecompress(t *testing.T) { res := bytesFromBlobCanonical(blob) compressedBytes := res[:] - magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - compressedBytes = append(magics, compressedBytes...) + compressedBytes = append(zstdMagicNumber, compressedBytes...) decompressedBlobBytes, err := decompressScrollBlobToBatch(compressedBytes) assert.NoError(t, err) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 8ba40a1..3f7ef6e 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -40,7 +40,7 @@ type DABatch interface { // Codec represents the interface for encoding and decoding DA-related structures. type Codec interface { Version() CodecVersion - MaxNumChunksPerBatch() uint64 + MaxNumChunksPerBatch() int NewDABlock(*Block, uint64) (DABlock, error) NewDAChunk(*Chunk, uint64) (DAChunk, error) @@ -50,17 +50,18 @@ type Codec interface { DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error - EstimateChunkL1CommitBatchSizeAndBlobSize(*Chunk) (uint64, uint64, error) - EstimateBatchL1CommitBatchSizeAndBlobSize(*Batch) (uint64, uint64, error) CheckChunkCompressedDataCompatibility(*Chunk) (bool, error) CheckBatchCompressedDataCompatibility(*Batch) (bool, error) + + EstimateChunkL1CommitBatchSizeAndBlobSize(*Chunk) (uint64, uint64, error) + EstimateBatchL1CommitBatchSizeAndBlobSize(*Batch) (uint64, uint64, error) EstimateBlockL1CommitCalldataSize(*Block) (uint64, error) EstimateChunkL1CommitCalldataSize(*Chunk) (uint64, error) EstimateChunkL1CommitGas(*Chunk) (uint64, error) EstimateBatchL1CommitGas(*Batch) (uint64, error) EstimateBatchL1CommitCalldataSize(*Batch) (uint64, error) - JSONFromBytes([]byte) ([]byte, error) + JSONFromBytes([]byte) ([]byte, error) // convert batch header bytes to JSON, this is only used to provide witness data for the prover. } // CodecVersion represents the version of the codec. From 43824ff70a0fd4f78ba94cef96e7d500510638fa Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 16:02:52 +0800 Subject: [PATCH 112/126] add more logs --- encoding/codecv0.go | 6 +++--- encoding/codecv0_types.go | 8 ++++---- encoding/codecv1.go | 21 +++++++++------------ encoding/codecv2.go | 10 +++++----- encoding/codecv3.go | 2 +- encoding/codecv4.go | 4 ++-- 6 files changed, 24 insertions(+), 27 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 28105ce..37e1028 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -175,17 +175,17 @@ func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // compute batch data hash dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to compute batch data hash, index: %d, err: %w", batch.Index, err) } // skipped L1 messages bitmap bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to construct skipped bitmap, index: %d, err: %w", batch.Index, err) } if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { - return nil, fmt.Errorf("totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + return nil, fmt.Errorf("batch index: %d, totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", batch.Index, totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) } l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index 9a37b29..8f6e7a9 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -135,7 +135,7 @@ func (c *daChunkV0) Encode() ([]byte, error) { var txLen [4]byte rlpTxData, err := convertTxDataToRLPEncoding(txData, false /* no mock */) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } binary.BigEndian.PutUint32(txLen[:], uint32(len(rlpTxData))) l2TxDataBytes = append(l2TxDataBytes, txLen[:]...) @@ -151,7 +151,7 @@ func (c *daChunkV0) Encode() ([]byte, error) { func (c *daChunkV0) Hash() (common.Hash, error) { chunkBytes, err := c.Encode() if err != nil { - return common.Hash{}, err + return common.Hash{}, fmt.Errorf("failed to encode DAChunk: %w", err) } if len(chunkBytes) == 0 { @@ -162,8 +162,8 @@ func (c *daChunkV0) Hash() (common.Hash, error) { // concatenate block contexts var dataBytes []byte for i := 0; i < int(numBlocks); i++ { - start := 1 + 60*i - end := start + 58 // only the first 58 bytes of each BlockContext are needed for the hashing process + start := 1 + blockContextByteSize*i + end := start + blockContextByteSize - 2 // last 2 bytes of each BlockContext are not used in hashing if end > len(chunkBytes) { return common.Hash{}, fmt.Errorf("unexpected end index: %d, chunkBytes length: %d", end, len(chunkBytes)) } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index d091b08..87dea10 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -83,11 +83,9 @@ func (d *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) } } - var transactions []types.Transactions - chunks = append(chunks, &DAChunkRawTx{ Blocks: blocks, - Transactions: transactions, // Transactions field is still empty in the phase of DecodeDAChunksRawTx, because txs moved to blobs and filled in DecodeTxsFromBlob method. + Transactions: nil, // Transactions field is still empty in the phase of DecodeDAChunksRawTx, because txs moved to blobs and filled in DecodeTxsFromBlob method. }) } return chunks, nil @@ -113,23 +111,23 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // batch data hash dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to compute batch data hash, index: %d, err: %w", batch.Index, err) } // skipped L1 messages bitmap bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to construct skipped bitmap, index: %d, err: %w", batch.Index, err) } // blob payload blob, blobVersionedHash, z, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), false /* no mock */) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to construct blob payload, index: %d, err: %w", batch.Index, err) } if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { - return nil, fmt.Errorf("totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + return nil, fmt.Errorf("batch index: %d, totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", batch.Index, totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) } l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore @@ -181,16 +179,15 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // encode L2 txs into blob payload rlpTxData, err := convertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } blobBytes = append(blobBytes, rlpTxData...) } } // blob metadata: chunki_size - if chunkSize := len(blobBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(blobBytes[2+4*chunkID:], uint32(chunkSize)) - } + chunkSize := len(blobBytes) - currentChunkStartIndex + binary.BigEndian.PutUint32(blobBytes[2+4*chunkID:], uint32(chunkSize)) // challenge: compute chunk data hash chunkDataHash = crypto.Keccak256Hash(blobBytes[currentChunkStartIndex:]) @@ -273,7 +270,7 @@ func (d *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { rlpTxData, err := convertTxDataToRLPEncoding(tx, false /* no mock */) if err != nil { - return 0, err + return 0, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } dataSize += uint64(len(rlpTxData)) } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index d3bd2cf..2fadf52 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -58,23 +58,23 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // batch data hash dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to compute batch data hash, index: %d, err: %w", batch.Index, err) } // skipped L1 messages bitmap bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to construct skipped bitmap, index: %d, err: %w", batch.Index, err) } // blob payload blob, blobVersionedHash, z, _, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), false /* no mock */) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to construct blob payload, index: %d, err: %w", batch.Index, err) } if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { - return nil, fmt.Errorf("totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + return nil, fmt.Errorf("batch index: %d, totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", batch.Index, totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) } l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore @@ -126,7 +126,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // encode L2 txs into blob payload rlpTxData, err := convertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { - return nil, common.Hash{}, nil, nil, err + return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } batchBytes = append(batchBytes, rlpTxData...) } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index f366107..6d4662d 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -56,7 +56,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { - return nil, fmt.Errorf("totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + return nil, fmt.Errorf("batch index: %d, totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", batch.Index, totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) } l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 0d3c7ce..9254561 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -84,7 +84,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { - return nil, fmt.Errorf("totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + return nil, fmt.Errorf("batch index: %d, totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", batch.Index, totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) } l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore @@ -167,7 +167,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // encode L2 txs into blob payload rlpTxData, err := convertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { - return nil, common.Hash{}, nil, nil, err + return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } batchBytes = append(batchBytes, rlpTxData...) } From 886365ddfb51152179be49ad8a66e2d3cccd7656 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 16:52:00 +0800 Subject: [PATCH 113/126] address comments --- encoding/codecv1.go | 5 ++++- encoding/codecv2.go | 5 ++++- encoding/codecv3.go | 4 ++-- encoding/codecv3_types.go | 4 ++-- encoding/codecv4.go | 9 ++++++--- encoding/da.go | 4 ++-- 6 files changed, 20 insertions(+), 11 deletions(-) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 87dea10..ae7c1d9 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -229,7 +229,10 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // the challenge point z var z kzg4844.Point - start := 32 - len(pointBytes) + if len(pointBytes) > kzgPointByteSize { + return nil, common.Hash{}, nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes)) + } + start := kzgPointByteSize - len(pointBytes) copy(z[start:], pointBytes) return blob, blobVersionedHash, &z, nil diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 2fadf52..7130742 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -196,7 +196,10 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // the challenge point z var z kzg4844.Point - start := 32 - len(pointBytes) + if len(pointBytes) > kzgPointByteSize { + return nil, common.Hash{}, nil, nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes)) + } + start := kzgPointByteSize - len(pointBytes) copy(z[start:], pointBytes) return blob, blobVersionedHash, &z, blobBytes, nil diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 6d4662d..818cb12 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -101,8 +101,8 @@ func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { nil, // z nil, // blobBytes [2]common.Hash{ // blobDataProof - common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointLength]), - common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointLength : daBatchV3EncodedLength]), + common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointByteSize]), + common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointByteSize : daBatchV3EncodedLength]), }, ), nil } diff --git a/encoding/codecv3_types.go b/encoding/codecv3_types.go index 0cebf10..bffd2e7 100644 --- a/encoding/codecv3_types.go +++ b/encoding/codecv3_types.go @@ -91,8 +91,8 @@ func (b *daBatchV3) Encode() []byte { copy(batchBytes[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash], b.blobVersionedHash[:]) copy(batchBytes[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp], b.parentBatchHash[:]) binary.BigEndian.PutUint64(batchBytes[daBatchV3OffsetLastBlockTimestamp:daBatchV3OffsetBlobDataProof], b.lastBlockTimestamp) - copy(batchBytes[daBatchV3OffsetBlobDataProof:daBatchV3OffsetBlobDataProof+kzgPointLength], b.blobDataProof[0].Bytes()) - copy(batchBytes[daBatchV3OffsetBlobDataProof+kzgPointLength:daBatchV3EncodedLength], b.blobDataProof[1].Bytes()) + copy(batchBytes[daBatchV3OffsetBlobDataProof:daBatchV3OffsetBlobDataProof+kzgPointByteSize], b.blobDataProof[0].Bytes()) + copy(batchBytes[daBatchV3OffsetBlobDataProof+kzgPointByteSize:daBatchV3EncodedLength], b.blobDataProof[1].Bytes()) return batchBytes } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 9254561..8c221b2 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -129,8 +129,8 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { nil, // z nil, // blobBytes [2]common.Hash{ // blobDataProof - common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointLength]), - common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointLength : daBatchV3EncodedLength]), + common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointByteSize]), + common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointByteSize : daBatchV3EncodedLength]), }, ), nil } @@ -242,7 +242,10 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // the challenge point z var z kzg4844.Point - start := 32 - len(pointBytes) + if len(pointBytes) > kzgPointByteSize { + return nil, common.Hash{}, nil, nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes)) + } + start := kzgPointByteSize - len(pointBytes) copy(z[start:], pointBytes) return blob, blobVersionedHash, &z, blobBytes, nil diff --git a/encoding/da.go b/encoding/da.go index bfcbe78..c692ea1 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -38,8 +38,8 @@ const maxEffectiveBlobBytes = maxBlobBytes / 32 * 31 // only used in codecv2 and codecv3. const minCompressedDataCheckSize = 131072 -// kzgPointLength is the length of a KZG point (z and y) in bytes. -const kzgPointLength = 32 +// kzgPointByteSize is the size of a KZG point (z and y) in bytes. +const kzgPointByteSize = 32 // zstdMagicNumber is the magic number for zstd compressed data header. var zstdMagicNumber = []byte{0x28, 0xb5, 0x2f, 0xfd} From 4f78d1aa168da2837e27bf44b6a67a12b206291d Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 18:44:37 +0800 Subject: [PATCH 114/126] address comments --- encoding/bitmap.go | 14 +++---- encoding/codecv0.go | 44 +++++++++++----------- encoding/codecv0_types.go | 35 +++++++++++------- encoding/codecv1.go | 50 ++++++++++++------------- encoding/codecv1_types.go | 3 +- encoding/codecv2.go | 8 ++-- encoding/codecv3.go | 78 +++++---------------------------------- encoding/codecv4.go | 8 ++-- encoding/da.go | 21 +++++++++-- 9 files changed, 112 insertions(+), 149 deletions(-) diff --git a/encoding/bitmap.go b/encoding/bitmap.go index cc0614e..e2e2ab8 100644 --- a/encoding/bitmap.go +++ b/encoding/bitmap.go @@ -54,11 +54,11 @@ func constructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePo } } - bitmapBytes := make([]byte, len(skippedBitmap)*32) + bitmapBytes := make([]byte, len(skippedBitmap)*skippedL1MessageBitmapByteSize) for ii, num := range skippedBitmap { bytes := num.Bytes() - padding := 32 - len(bytes) - copy(bitmapBytes[32*ii+padding:], bytes) + padding := skippedL1MessageBitmapByteSize - len(bytes) + copy(bitmapBytes[skippedL1MessageBitmapByteSize*ii+padding:], bytes) } return bitmapBytes, nextIndex, nil @@ -67,15 +67,15 @@ func constructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePo // decodeBitmap decodes skipped L1 message bitmap of the batch from bytes to big.Int's. func decodeBitmap(skippedL1MessageBitmap []byte, totalL1MessagePopped int) ([]*big.Int, error) { length := len(skippedL1MessageBitmap) - if length%32 != 0 { - return nil, fmt.Errorf("skippedL1MessageBitmap length doesn't match, skippedL1MessageBitmap length should be equal 0 modulo 32, length of skippedL1MessageBitmap: %v", length) + if length%skippedL1MessageBitmapByteSize != 0 { + return nil, fmt.Errorf("skippedL1MessageBitmap length doesn't match, skippedL1MessageBitmap length should be equal 0 modulo %v, length of skippedL1MessageBitmap: %v", skippedL1MessageBitmapByteSize, length) } if length*8 < totalL1MessagePopped { return nil, fmt.Errorf("skippedL1MessageBitmap length is too small, skippedL1MessageBitmap length should be at least %v, length of skippedL1MessageBitmap: %v", (totalL1MessagePopped+7)/8, length) } var skippedBitmap []*big.Int - for index := 0; index < length/32; index++ { - bitmap := big.NewInt(0).SetBytes(skippedL1MessageBitmap[index*32 : index*32+32]) + for index := 0; index < length/skippedL1MessageBitmapByteSize; index++ { + bitmap := big.NewInt(0).SetBytes(skippedL1MessageBitmap[index*skippedL1MessageBitmapByteSize : index*skippedL1MessageBitmapByteSize+skippedL1MessageBitmapByteSize]) skippedBitmap = append(skippedBitmap, bitmap) } return skippedBitmap, nil diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 37e1028..495586d 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -230,7 +230,7 @@ func (d *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) if txData.Type == types.L1MessageTxType { continue } - size += 4 // 4 bytes payload length + size += payloadLengthBytes txPayloadLength, err := getTxPayloadLength(txData) if err != nil { return 0, err @@ -263,17 +263,17 @@ func (d *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += calldataNonZeroByteGas * blockContextByteSize // sload - total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue + total += coldSloadGas * numL1Messages // numL1Messages times cold sload in L1MessageQueue // staticcall - total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue - total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue + total += warmAddressAccessGas * numL1Messages // numL1Messages times call to L1MessageQueue + total += warmAddressAccessGas * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += getMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += getMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += getMemoryExpansionCost(functionSignatureBytes+defaultParameterBytes) * numL1Messages // staticcall to proxy + total += warmAddressAccessGas * numL1Messages // read admin in proxy + total += warmAddressAccessGas * numL1Messages // read impl in proxy + total += warmAddressAccessGas * numL1Messages // access impl + total += getMemoryExpansionCost(functionSignatureBytes+defaultParameterBytes) * numL1Messages // delegatecall to impl return total, nil } @@ -305,10 +305,10 @@ func (d *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += warmSloadGas * numBlocks // numBlocks times warm sload + totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash + totalL1CommitGas += getKeccak256Gas(blockContextBytesForHashing*numBlocks + common.HashLength*totalTxNum) // chunk hash return totalL1CommitGas, nil } @@ -317,22 +317,22 @@ func (d *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += extraGasCost // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * coldSloadGas // 4 one-time cold sload for commitBatch + totalL1CommitGas += sstoreGas // 1 time sstore + totalL1CommitGas += baseTxGas // base gas for tx totalL1CommitGas += calldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) - totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += calldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += (coldSloadGas + coldAddressAccessGas - warmSloadGas - warmAddressAccessGas) + totalL1CommitGas += getKeccak256Gas(daBatchV0EncodedMinLength + skippedL1MessageBitmapByteSize) // parent batch header hash, length is estimated as (constant part) + (1 skippedL1MessageBitmap) + totalL1CommitGas += calldataNonZeroByteGas * (daBatchV0EncodedMinLength + skippedL1MessageBitmapByteSize) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += getKeccak256Gas(uint64(common.HashLength * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -346,8 +346,8 @@ func (d *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += calldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += calldataNonZeroByteGas * (skippedL1MessageBitmapByteSize * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += getKeccak256Gas(daBatchV0EncodedMinLength + skippedL1MessageBitmapByteSize*(totalL1MessagePoppedInChunk+255)/256) chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index 8f6e7a9..09f734d 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -13,6 +13,15 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) +const ( + numberOffset = 0 + timestampOffset = numberOffset + 8 + baseFeeOffset = timestampOffset + 8 + gasLimitOffset = baseFeeOffset + 32 + numTransactionsOffset = gasLimitOffset + 8 + numL1MessagesOffset = numTransactionsOffset + 2 +) + // daBlockV0 represents a Data Availability Block. type daBlockV0 struct { number uint64 @@ -38,14 +47,14 @@ func newDABlockV0(number uint64, timestamp uint64, baseFee *big.Int, gasLimit ui // Encode serializes the DABlock into a slice of bytes. func (b *daBlockV0) Encode() []byte { bytes := make([]byte, blockContextByteSize) - binary.BigEndian.PutUint64(bytes[0:], b.number) - binary.BigEndian.PutUint64(bytes[8:], b.timestamp) + binary.BigEndian.PutUint64(bytes[numberOffset:timestampOffset], b.number) + binary.BigEndian.PutUint64(bytes[timestampOffset:baseFeeOffset], b.timestamp) if b.baseFee != nil { - binary.BigEndian.PutUint64(bytes[40:], b.baseFee.Uint64()) + b.baseFee.FillBytes(bytes[baseFeeOffset:gasLimitOffset]) } - binary.BigEndian.PutUint64(bytes[48:], b.gasLimit) - binary.BigEndian.PutUint16(bytes[56:], b.numTransactions) - binary.BigEndian.PutUint16(bytes[58:], b.numL1Messages) + binary.BigEndian.PutUint64(bytes[gasLimitOffset:numTransactionsOffset], b.gasLimit) + binary.BigEndian.PutUint16(bytes[numTransactionsOffset:numL1MessagesOffset], b.numTransactions) + binary.BigEndian.PutUint16(bytes[numL1MessagesOffset:], b.numL1Messages) return bytes } @@ -55,12 +64,12 @@ func (b *daBlockV0) Decode(bytes []byte) error { return errors.New("block encoding is not blockContextByteSize bytes long") } - b.number = binary.BigEndian.Uint64(bytes[0:8]) - b.timestamp = binary.BigEndian.Uint64(bytes[8:16]) - b.baseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) - b.gasLimit = binary.BigEndian.Uint64(bytes[48:56]) - b.numTransactions = binary.BigEndian.Uint16(bytes[56:58]) - b.numL1Messages = binary.BigEndian.Uint16(bytes[58:60]) + b.number = binary.BigEndian.Uint64(bytes[numberOffset:timestampOffset]) + b.timestamp = binary.BigEndian.Uint64(bytes[timestampOffset:baseFeeOffset]) + b.baseFee = new(big.Int).SetBytes(bytes[baseFeeOffset:gasLimitOffset]) + b.gasLimit = binary.BigEndian.Uint64(bytes[gasLimitOffset:numTransactionsOffset]) + b.numTransactions = binary.BigEndian.Uint16(bytes[numTransactionsOffset:numL1MessagesOffset]) + b.numL1Messages = binary.BigEndian.Uint16(bytes[numL1MessagesOffset:]) return nil } @@ -163,7 +172,7 @@ func (c *daChunkV0) Hash() (common.Hash, error) { var dataBytes []byte for i := 0; i < int(numBlocks); i++ { start := 1 + blockContextByteSize*i - end := start + blockContextByteSize - 2 // last 2 bytes of each BlockContext are not used in hashing + end := start + blockContextBytesForHashing if end > len(chunkBytes) { return common.Hash{}, fmt.Errorf("unexpected end index: %d, chunkBytes length: %d", end, len(chunkBytes)) } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index ae7c1d9..dff394a 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -157,7 +157,7 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+maxNumChunksPerBatch+1)*32) + challengePreimage := make([]byte, (1+maxNumChunksPerBatch+1)*common.HashLength) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -191,7 +191,7 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // challenge: compute chunk data hash chunkDataHash = crypto.Keccak256Hash(blobBytes[currentChunkStartIndex:]) - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + copy(challengePreimage[common.HashLength+chunkID*common.HashLength:], chunkDataHash[:]) } // if we have fewer than maxNumChunksPerBatch chunks, the rest @@ -199,7 +199,7 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // but we need to add padding to the challenge preimage for chunkID := len(chunks); chunkID < maxNumChunksPerBatch; chunkID++ { // use the last chunk's data hash as padding - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + copy(challengePreimage[common.HashLength+chunkID*common.HashLength:], chunkDataHash[:]) } // challenge: compute metadata hash @@ -220,7 +220,7 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+maxNumChunksPerBatch)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+maxNumChunksPerBatch)*common.HashLength:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) @@ -295,17 +295,17 @@ func (d *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += calldataNonZeroByteGas * blockContextByteSize // sload - total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue + total += coldSloadGas * numL1Messages // numL1Messages times cold sload in L1MessageQueue // staticcall - total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue - total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue + total += warmAddressAccessGas * numL1Messages // numL1Messages times call to L1MessageQueue + total += warmAddressAccessGas * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += getMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += getMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += getMemoryExpansionCost(functionSignatureBytes+defaultParameterBytes) * numL1Messages // staticcall to proxy + total += warmAddressAccessGas * numL1Messages // read admin in proxy + total += warmAddressAccessGas * numL1Messages // read impl in proxy + total += warmAddressAccessGas * numL1Messages // access impl + total += getMemoryExpansionCost(functionSignatureBytes+defaultParameterBytes) * numL1Messages // delegatecall to impl return total, nil } @@ -329,10 +329,10 @@ func (d *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += warmSloadGas * numBlocks // numBlocks times warm sload + totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += getKeccak256Gas(58*numBlocks + common.HashLength*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas, nil } @@ -341,22 +341,22 @@ func (d *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += extraGasCost // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * coldSloadGas // 4 one-time cold sload for commitBatch + totalL1CommitGas += sstoreGas // 1 time sstore + totalL1CommitGas += baseTxGas // base gas for tx totalL1CommitGas += calldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) - totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += calldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += (coldSloadGas + coldAddressAccessGas - warmSloadGas - warmAddressAccessGas) + totalL1CommitGas += getKeccak256Gas(daBatchV0EncodedMinLength + skippedL1MessageBitmapByteSize) // parent batch header hash, length is estimated as (constant part) + (1 skippedL1MessageBitmap) + totalL1CommitGas += calldataNonZeroByteGas * (daBatchV0EncodedMinLength + skippedL1MessageBitmapByteSize) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += getKeccak256Gas(uint64(common.HashLength * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -370,8 +370,8 @@ func (d *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += calldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += calldataNonZeroByteGas * (skippedL1MessageBitmapByteSize * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += getKeccak256Gas(daBatchV3OffsetParentBatchHash + skippedL1MessageBitmapByteSize*(totalL1MessagePoppedInChunk+255)/256) chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go index 7a7e6a2..584e84c 100644 --- a/encoding/codecv1_types.go +++ b/encoding/codecv1_types.go @@ -43,8 +43,7 @@ func (c *daChunkV1) Hash() (common.Hash, error) { // concatenate block contexts for _, block := range c.blocks { encodedBlock := block.Encode() - // only the first 58 bytes are used in the hashing process - dataBytes = append(dataBytes, encodedBlock[:58]...) + dataBytes = append(dataBytes, encodedBlock[:blockContextBytesForHashing]...) } // concatenate l1 tx hashes diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 7130742..534e2d9 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -104,7 +104,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+maxNumChunksPerBatch+1)*32) + challengePreimage := make([]byte, (1+maxNumChunksPerBatch+1)*common.HashLength) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -138,7 +138,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // challenge: compute chunk data hash chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + copy(challengePreimage[common.HashLength+chunkID*common.HashLength:], chunkDataHash[:]) } // if we have fewer than maxNumChunksPerBatch chunks, the rest @@ -146,7 +146,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // but we need to add padding to the challenge preimage for chunkID := len(chunks); chunkID < maxNumChunksPerBatch; chunkID++ { // use the last chunk's data hash as padding - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + copy(challengePreimage[common.HashLength+chunkID*common.HashLength:], chunkDataHash[:]) } // challenge: compute metadata hash @@ -187,7 +187,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+maxNumChunksPerBatch)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+maxNumChunksPerBatch)*common.HashLength:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 818cb12..6bb214e 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -107,37 +107,13 @@ func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { ), nil } -// estimateChunkL1CommitGasWithoutPointEvaluation calculates the total L1 commit gas without point-evaluation for this chunk approximately. -func (d *DACodecV3) estimateChunkL1CommitGasWithoutPointEvaluation(c *Chunk) (uint64, error) { - var totalNonSkippedL1Messages uint64 - var totalL1CommitGas uint64 - for _, block := range c.Blocks { - transactions := uint64(len(block.Transactions)) - l2Transactions := block.NumL2Transactions() - if transactions < l2Transactions { - return 0, fmt.Errorf("number of L2 transactions (%d) exceeds total transactions (%d)", l2Transactions, transactions) - } - totalNonSkippedL1Messages += transactions - l2Transactions - blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) - if err != nil { - return 0, err - } - totalL1CommitGas += blockL1CommitGas - } - - numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += getKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash - - return totalL1CommitGas, nil -} - // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. func (d *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - totalL1CommitGas, err := d.estimateChunkL1CommitGasWithoutPointEvaluation(c) + // Reuse the V2 implementation, should have slightly different gas cost, but sufficient for estimation in practice, + // since we have extraGasCost to over-estimate the gas cost. + totalL1CommitGas, err := d.DACodecV2.EstimateChunkL1CommitGas(c) if err != nil { - return 0, err + return 0, fmt.Errorf("failed to estimate L1 commit gas for chunk: %w", err) } totalL1CommitGas += params.BlobTxPointEvaluationPrecompileGas // plus gas cost for the point-evaluation precompile call. return totalL1CommitGas, nil @@ -145,48 +121,12 @@ func (d *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. func (d *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { - var totalL1CommitGas uint64 - - // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += calldataNonZeroByteGas // version in calldata - - // adjusting gas: - // add 1 time cold sload (2100 gas) for L1MessageQueue - // add 1 time cold address access (2600 gas) for L1MessageQueue - // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) - totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += getKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += calldataNonZeroByteGas * (89 + 32) // parent batch header in calldata - - // adjust batch data hash gas cost - totalL1CommitGas += getKeccak256Gas(uint64(32 * len(b.Chunks))) - - totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore - - for _, chunk := range b.Chunks { - chunkL1CommitGas, err := d.estimateChunkL1CommitGasWithoutPointEvaluation(chunk) - if err != nil { - return 0, err - } - totalL1CommitGas += chunkL1CommitGas - - totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) - totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - - totalL1CommitGas += calldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += getKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - - chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) - if err != nil { - return 0, err - } - totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) + // Reuse the V2 implementation, should have slightly different gas cost, but sufficient for estimation in practice, + // since we have extraGasCost to over-estimate the gas cost. + totalL1CommitGas, err := d.DACodecV2.EstimateBatchL1CommitGas(b) + if err != nil { + return 0, fmt.Errorf("failed to estimate L1 commit gas for batch: %w", err) } - totalL1CommitGas += params.BlobTxPointEvaluationPrecompileGas // plus gas cost for the point-evaluation precompile call. return totalL1CommitGas, nil } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 8c221b2..367c86e 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -145,7 +145,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+maxNumChunksPerBatch+1)*32) + challengePreimage := make([]byte, (1+maxNumChunksPerBatch+1)*common.HashLength) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -179,7 +179,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // challenge: compute chunk data hash chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + copy(challengePreimage[common.HashLength+chunkID*common.HashLength:], chunkDataHash[:]) } // if we have fewer than maxNumChunksPerBatch chunks, the rest @@ -187,7 +187,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // but we need to add padding to the challenge preimage for chunkID := len(chunks); chunkID < maxNumChunksPerBatch; chunkID++ { // use the last chunk's data hash as padding - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + copy(challengePreimage[common.HashLength+chunkID*common.HashLength:], chunkDataHash[:]) } // challenge: compute metadata hash @@ -233,7 +233,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+maxNumChunksPerBatch)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+maxNumChunksPerBatch)*common.HashLength:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) diff --git a/encoding/da.go b/encoding/da.go index c692ea1..e38e0ec 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -18,12 +18,12 @@ import ( // blsModulus is the BLS modulus defined in EIP-4844. var blsModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) -// calldataNonZeroByteGas is the gas consumption per non zero byte in calldata. -const calldataNonZeroByteGas = 16 - // blockContextByteSize is the size of the block context in bytes. const blockContextByteSize = 60 +// blockContextBytesForHashing is the size of the block context in bytes for hashing. +const blockContextBytesForHashing = blockContextByteSize - 2 + // txLenByteSize is the size of the transaction length in bytes. const txLenByteSize = 4 @@ -77,6 +77,21 @@ const ( daBatchV3EncodedLength = 193 ) +const ( + payloadLengthBytes = 4 + calldataNonZeroByteGas = 16 + coldSloadGas = 2100 + coldAddressAccessGas = 2600 + warmAddressAccessGas = 100 + warmSloadGas = 100 + baseTxGas = 21000 + sstoreGas = 20000 + extraGasCost = 100000 // over-estimate the gas cost for ops like _getAdmin, _implementation, _requireNotPaused, etc + skippedL1MessageBitmapByteSize = 32 + functionSignatureBytes = 4 + defaultParameterBytes = 32 +) + // Block represents an L2 block. type Block struct { Header *types.Header From ef5f1a6503d5cf2c022f6c482145ab8d27fc885a Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 17 Oct 2024 19:08:40 +0800 Subject: [PATCH 115/126] address comments --- encoding/codecv0.go | 8 ++++---- encoding/codecv0_types.go | 2 +- encoding/codecv1.go | 12 ++++++------ encoding/codecv1_types.go | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 495586d..5843e61 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -60,9 +60,6 @@ func (d *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. func (d *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - var blocks []DABlock - var txs [][]*types.TransactionData - if chunk == nil { return nil, errors.New("chunk is nil") } @@ -75,6 +72,9 @@ func (d *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, fmt.Errorf("number of blocks (%d) exceeds maximum allowed (%d)", len(chunk.Blocks), math.MaxUint8) } + blocks := make([]DABlock, 0, len(chunk.Blocks)) + txs := make([][]*types.TransactionData, 0, len(chunk.Blocks)) + for _, block := range chunk.Blocks { b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) if err != nil { @@ -97,7 +97,7 @@ func (d *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) // DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. func (d *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) { - var chunks []*DAChunkRawTx + chunks := make([]*DAChunkRawTx, 0, len(chunkBytes)) for _, chunk := range chunkBytes { if len(chunk) < 1 { return nil, fmt.Errorf("invalid chunk, length is less than 1") diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index 09f734d..af7022a 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -237,7 +237,7 @@ func newDABatchV0(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopp // Encode serializes the DABatchV0 into bytes. func (b *daBatchV0) Encode() []byte { - batchBytes := make([]byte, daBatchV0OffsetSkippedL1MessageBitmap+len(b.skippedL1MessageBitmap)) + batchBytes := make([]byte, daBatchV0EncodedMinLength+len(b.skippedL1MessageBitmap)) batchBytes[daBatchOffsetVersion] = b.version binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV0OffsetL1MessagePopped], b.batchIndex) binary.BigEndian.PutUint64(batchBytes[daBatchV0OffsetL1MessagePopped:daBatchV0OffsetTotalL1MessagePopped], b.l1MessagePopped) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index dff394a..5c30253 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -25,9 +25,6 @@ func (d *DACodecV1) Version() CodecVersion { // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. func (d *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - var blocks []DABlock - var txs [][]*types.TransactionData - if chunk == nil { return nil, errors.New("chunk is nil") } @@ -40,6 +37,9 @@ func (d *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, fmt.Errorf("number of blocks (%d) exceeds maximum allowed (%d)", len(chunk.Blocks), math.MaxUint8) } + blocks := make([]DABlock, 0, len(chunk.Blocks)) + txs := make([][]*types.TransactionData, 0, len(chunk.Blocks)) + for _, block := range chunk.Blocks { b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) if err != nil { @@ -60,9 +60,9 @@ func (d *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) // DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. // Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func (d *DACodecV1) DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { - var chunks []*DAChunkRawTx - for _, chunk := range bytes { +func (d *DACodecV1) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) { + chunks := make([]*DAChunkRawTx, 0, len(chunkBytes)) + for _, chunk := range chunkBytes { if len(chunk) < 1 { return nil, fmt.Errorf("invalid chunk, length is less than 1") } diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go index 584e84c..bde669b 100644 --- a/encoding/codecv1_types.go +++ b/encoding/codecv1_types.go @@ -103,7 +103,7 @@ func newDABatchV1(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopp // Encode serializes the DABatchV1 into bytes. func (b *daBatchV1) Encode() []byte { - batchBytes := make([]byte, daBatchV1OffsetSkippedL1MessageBitmap+len(b.skippedL1MessageBitmap)) + batchBytes := make([]byte, daBatchV1EncodedMinLength+len(b.skippedL1MessageBitmap)) batchBytes[daBatchOffsetVersion] = b.version binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV1OffsetL1MessagePopped], b.batchIndex) binary.BigEndian.PutUint64(batchBytes[daBatchV1OffsetL1MessagePopped:daBatchV1OffsetTotalL1MessagePopped], b.l1MessagePopped) From 3faa778b700402dabd37a5d94d8b915d5d091671 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 18 Oct 2024 10:10:27 +0800 Subject: [PATCH 116/126] address comments --- encoding/codecv0.go | 4 ++-- encoding/codecv0_test.go | 4 ++-- encoding/codecv0_types.go | 8 ++++---- encoding/codecv1.go | 4 ++-- encoding/codecv1_test.go | 6 +++--- encoding/codecv1_types.go | 6 +++--- encoding/codecv2.go | 4 ++-- encoding/codecv2_test.go | 6 +++--- encoding/codecv3.go | 4 ++-- encoding/codecv3_test.go | 6 +++--- encoding/codecv3_types.go | 26 +++++++++++++------------- encoding/codecv4.go | 4 ++-- encoding/codecv4_test.go | 8 ++++---- encoding/interfaces.go | 2 +- 14 files changed, 46 insertions(+), 46 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 5843e61..7e693db 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -190,7 +190,7 @@ func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore daBatch := newDABatchV0( - uint8(CodecV0), // version + CodecV0, // version batch.Index, // batchIndex l1MessagePopped, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped @@ -213,7 +213,7 @@ func (d *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { } return newDABatchV0( - data[daBatchOffsetVersion], // version + CodecVersion(data[daBatchOffsetVersion]), // version binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV0OffsetL1MessagePopped]), // batchIndex binary.BigEndian.Uint64(data[daBatchV0OffsetL1MessagePopped:daBatchV0OffsetTotalL1MessagePopped]), // l1MessagePopped binary.BigEndian.Uint64(data[daBatchV0OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index aae0d34..a8e1bb5 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -195,7 +195,7 @@ func TestCodecV0BatchEncode(t *testing.T) { // empty batch batch := &daBatchV1{ daBatchV0: daBatchV0{ - version: uint8(CodecV0), + version: CodecV0, }, } encoded := hex.EncodeToString(batch.Encode()) @@ -271,7 +271,7 @@ func TestCodecV0BatchHash(t *testing.T) { // empty batch batch := &daBatchV1{ daBatchV0: daBatchV0{ - version: uint8(CodecV0), + version: CodecV0, }, } assert.Equal(t, common.HexToHash("0x7f74e58579672e582998264e7e8191c51b6b8981afd0f9bf1a2ffc3abb39e678"), batch.Hash()) diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index af7022a..e110268 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -213,7 +213,7 @@ func (c *daChunkV0) BlockRange() (uint64, uint64, error) { // daBatchV0 contains metadata about a batch of DAChunks. type daBatchV0 struct { - version uint8 + version CodecVersion batchIndex uint64 l1MessagePopped uint64 totalL1MessagePopped uint64 @@ -223,7 +223,7 @@ type daBatchV0 struct { } // newDABatchV0 is a constructor for daBatchV0. -func newDABatchV0(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash common.Hash, skippedL1MessageBitmap []byte) *daBatchV0 { +func newDABatchV0(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash common.Hash, skippedL1MessageBitmap []byte) *daBatchV0 { return &daBatchV0{ version: version, batchIndex: batchIndex, @@ -238,7 +238,7 @@ func newDABatchV0(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopp // Encode serializes the DABatchV0 into bytes. func (b *daBatchV0) Encode() []byte { batchBytes := make([]byte, daBatchV0EncodedMinLength+len(b.skippedL1MessageBitmap)) - batchBytes[daBatchOffsetVersion] = b.version + batchBytes[daBatchOffsetVersion] = byte(b.version) binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV0OffsetL1MessagePopped], b.batchIndex) binary.BigEndian.PutUint64(batchBytes[daBatchV0OffsetL1MessagePopped:daBatchV0OffsetTotalL1MessagePopped], b.l1MessagePopped) binary.BigEndian.PutUint64(batchBytes[daBatchV0OffsetTotalL1MessagePopped:daBatchOffsetDataHash], b.totalL1MessagePopped) @@ -271,7 +271,7 @@ func (b *daBatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { // Version returns the version of the DABatch. func (b *daBatchV0) Version() CodecVersion { - return CodecVersion(b.version) + return b.version } // SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 5c30253..cde947d 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -132,7 +132,7 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore daBatch := newDABatchV1( - uint8(CodecV1), // version + CodecV1, // version batch.Index, // batchIndex l1MessagePopped, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped @@ -250,7 +250,7 @@ func (d *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { } return newDABatchV1( - data[daBatchOffsetVersion], // version + CodecVersion(data[daBatchOffsetVersion]), // version binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV1OffsetL1MessagePopped]), // batchIndex binary.BigEndian.Uint64(data[daBatchV1OffsetL1MessagePopped:daBatchV1OffsetTotalL1MessagePopped]), // l1MessagePopped binary.BigEndian.Uint64(data[daBatchV1OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index 8bb4015..8dcd007 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -243,7 +243,7 @@ func TestCodecV1BatchEncode(t *testing.T) { // empty batch batch := &daBatchV1{ daBatchV0: daBatchV0{ - version: uint8(CodecV1), + version: CodecV1, }, } encoded := hex.EncodeToString(batch.Encode()) @@ -319,7 +319,7 @@ func TestCodecV1BatchHash(t *testing.T) { // empty batch batch := &daBatchV1{ daBatchV0: daBatchV0{ - version: uint8(CodecV1), + version: CodecV1, }, } assert.Equal(t, common.HexToHash("0x4b6fe410f63051f6e93532087b42ece79fb7b966e2ba5845e6cd1c091f27e564"), batch.Hash()) @@ -973,7 +973,7 @@ func TestCodecV1BatchStandardTestCases(t *testing.T) { batch := daBatchV1{ daBatchV0: daBatchV0{ - version: uint8(CodecV3), + version: CodecV3, batchIndex: 6789, l1MessagePopped: 101, totalL1MessagePopped: 10101, diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go index bde669b..c81f5f0 100644 --- a/encoding/codecv1_types.go +++ b/encoding/codecv1_types.go @@ -84,7 +84,7 @@ type daBatchV1 struct { } // newDABatchV1 is a constructor for daBatchV1. -func newDABatchV1(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point) *daBatchV1 { +func newDABatchV1(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point) *daBatchV1 { return &daBatchV1{ daBatchV0: daBatchV0{ version: version, @@ -104,7 +104,7 @@ func newDABatchV1(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopp // Encode serializes the DABatchV1 into bytes. func (b *daBatchV1) Encode() []byte { batchBytes := make([]byte, daBatchV1EncodedMinLength+len(b.skippedL1MessageBitmap)) - batchBytes[daBatchOffsetVersion] = b.version + batchBytes[daBatchOffsetVersion] = byte(b.version) binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV1OffsetL1MessagePopped], b.batchIndex) binary.BigEndian.PutUint64(batchBytes[daBatchV1OffsetL1MessagePopped:daBatchV1OffsetTotalL1MessagePopped], b.l1MessagePopped) binary.BigEndian.PutUint64(batchBytes[daBatchV1OffsetTotalL1MessagePopped:daBatchOffsetDataHash], b.totalL1MessagePopped) @@ -177,7 +177,7 @@ func (b *daBatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { // Version returns the version of the DABatch. func (b *daBatchV1) Version() CodecVersion { - return CodecVersion(b.version) + return b.version } // SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 534e2d9..ad33224 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -79,7 +79,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore daBatch := newDABatchV1( - uint8(CodecV2), // version + CodecV2, // version batch.Index, // batchIndex l1MessagePopped, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped @@ -217,7 +217,7 @@ func (d *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { } return newDABatchV1( - data[daBatchOffsetVersion], // version + CodecVersion(data[daBatchOffsetVersion]), // version binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV1OffsetL1MessagePopped]), // batchIndex binary.BigEndian.Uint64(data[daBatchV1OffsetL1MessagePopped:daBatchV1OffsetTotalL1MessagePopped]), // l1MessagePopped binary.BigEndian.Uint64(data[daBatchV1OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index d7e6c2e..85fd7ba 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -243,7 +243,7 @@ func TestCodecV2BatchEncode(t *testing.T) { // empty batch batch := &daBatchV1{ daBatchV0: daBatchV0{ - version: uint8(CodecV2), + version: CodecV2, }, } encoded := hex.EncodeToString(batch.Encode()) @@ -319,7 +319,7 @@ func TestCodecV2BatchHash(t *testing.T) { // empty batch batch := &daBatchV1{ daBatchV0: daBatchV0{ - version: uint8(CodecV2), + version: CodecV2, }, } assert.Equal(t, common.HexToHash("0x8839b8a7b8dfebdc8e829f6fe543578ccdc8da1307e1e1581541a1e2a8fa5592"), batch.Hash()) @@ -1014,7 +1014,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { batch := daBatchV1{ daBatchV0: daBatchV0{ - version: uint8(CodecV2), + version: CodecV2, batchIndex: 6789, l1MessagePopped: 101, totalL1MessagePopped: 10101, diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 6bb214e..ac37e89 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -61,7 +61,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore return newDABatchV3( - uint8(CodecV3), // version + CodecV3, // version batch.Index, // batchIndex l1MessagePopped, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped @@ -88,7 +88,7 @@ func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { } return newDABatchV3WithProof( - data[daBatchOffsetVersion], // version + CodecVersion(data[daBatchOffsetVersion]), // version binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV3OffsetL1MessagePopped]), // batchIndex binary.BigEndian.Uint64(data[daBatchV3OffsetL1MessagePopped:daBatchV3OffsetTotalL1MessagePopped]), // l1MessagePopped binary.BigEndian.Uint64(data[daBatchV3OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index c202f11..5e3e2e8 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -244,7 +244,7 @@ func TestCodecV3BatchEncode(t *testing.T) { // empty daBatch daBatchV3 := &daBatchV3{ daBatchV0: daBatchV0{ - version: uint8(CodecV3), + version: CodecV3, }, } encoded := hex.EncodeToString(daBatchV3.Encode()) @@ -320,7 +320,7 @@ func TestCodecV3BatchHash(t *testing.T) { // empty daBatch daBatchV3 := &daBatchV3{ daBatchV0: daBatchV0{ - version: uint8(CodecV3), + version: CodecV3, }, } assert.Equal(t, common.HexToHash("0x9f059299e02cd1ccaed5bbcc821843000ae6b992b68b55ff59a51252478681b0"), daBatchV3.Hash()) @@ -1155,7 +1155,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { batch := daBatchV3{ daBatchV0: daBatchV0{ - version: uint8(CodecV3), + version: CodecV3, batchIndex: 6789, l1MessagePopped: 101, totalL1MessagePopped: 10101, diff --git a/encoding/codecv3_types.go b/encoding/codecv3_types.go index bffd2e7..fd75046 100644 --- a/encoding/codecv3_types.go +++ b/encoding/codecv3_types.go @@ -25,7 +25,7 @@ type daBatchV3 struct { } // newDABatchV3 is a constructor for daBatchV3 that calls blobDataProofForPICircuit internally. -func newDABatchV3(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, +func newDABatchV3(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, ) (*daBatchV3, error) { @@ -57,7 +57,7 @@ func newDABatchV3(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopp } // newDABatchV3WithProof is a constructor for daBatchV3 that allows directly passing blobDataProof. -func newDABatchV3WithProof(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, +func newDABatchV3WithProof(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, blobDataProof [2]common.Hash, ) *daBatchV3 { @@ -83,7 +83,7 @@ func newDABatchV3WithProof(version uint8, batchIndex, l1MessagePopped, totalL1Me // Encode serializes the DABatchV3 into bytes. func (b *daBatchV3) Encode() []byte { batchBytes := make([]byte, daBatchV3EncodedLength) - batchBytes[daBatchOffsetVersion] = b.version + batchBytes[daBatchOffsetVersion] = byte(b.version) binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV3OffsetL1MessagePopped], b.batchIndex) binary.BigEndian.PutUint64(batchBytes[daBatchV3OffsetL1MessagePopped:daBatchV3OffsetTotalL1MessagePopped], b.l1MessagePopped) binary.BigEndian.PutUint64(batchBytes[daBatchV3OffsetTotalL1MessagePopped:daBatchOffsetDataHash], b.totalL1MessagePopped) @@ -163,15 +163,15 @@ func (b *daBatchV3) BlobBytes() []byte { // This method is designed to provide prover with batch info in snake_case format. func (b *daBatchV3) MarshalJSON() ([]byte, error) { type daBatchV3JSON struct { - Version uint8 `json:"version"` - BatchIndex uint64 `json:"batch_index"` - L1MessagePopped uint64 `json:"l1_message_popped"` - TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` - DataHash string `json:"data_hash"` - ParentBatchHash string `json:"parent_batch_hash"` - BlobVersionedHash string `json:"blob_versioned_hash"` - LastBlockTimestamp uint64 `json:"last_block_timestamp"` - BlobDataProof [2]string `json:"blob_data_proof"` + Version CodecVersion `json:"version"` + BatchIndex uint64 `json:"batch_index"` + L1MessagePopped uint64 `json:"l1_message_popped"` + TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` + DataHash string `json:"data_hash"` + ParentBatchHash string `json:"parent_batch_hash"` + BlobVersionedHash string `json:"blob_versioned_hash"` + LastBlockTimestamp uint64 `json:"last_block_timestamp"` + BlobDataProof [2]string `json:"blob_data_proof"` } return json.Marshal(&daBatchV3JSON{ @@ -192,7 +192,7 @@ func (b *daBatchV3) MarshalJSON() ([]byte, error) { // Version returns the version of the DABatch. func (b *daBatchV3) Version() CodecVersion { - return CodecVersion(b.version) + return b.version } // SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 367c86e..d9eb6cc 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -89,7 +89,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore return newDABatchV3( - uint8(CodecV4), // version + CodecV4, // version batch.Index, // batchIndex l1MessagePopped, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped @@ -116,7 +116,7 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { } return newDABatchV3WithProof( - data[daBatchOffsetVersion], // version + CodecVersion(data[daBatchOffsetVersion]), // version binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV3OffsetL1MessagePopped]), // batchIndex binary.BigEndian.Uint64(data[daBatchV3OffsetL1MessagePopped:daBatchV3OffsetTotalL1MessagePopped]), // l1MessagePopped binary.BigEndian.Uint64(data[daBatchV3OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 376d157..92e183a 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -244,7 +244,7 @@ func TestCodecV4BatchEncode(t *testing.T) { // empty daBatch daBatchV3 := &daBatchV3{ daBatchV0: daBatchV0{ - version: uint8(CodecV4), + version: CodecV4, }, } encoded := hex.EncodeToString(daBatchV3.Encode()) @@ -320,7 +320,7 @@ func TestCodecV4BatchHash(t *testing.T) { // empty daBatch daBatchV3 := &daBatchV3{ daBatchV0: daBatchV0{ - version: uint8(CodecV4), + version: CodecV4, }, } assert.Equal(t, common.HexToHash("0xdaf0827d02b32d41458aea0d5796dd0072d0a016f9834a2cb1a964d2c6ee135c"), daBatchV3.Hash()) @@ -1155,7 +1155,7 @@ func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) { batch := daBatchV3{ daBatchV0: daBatchV0{ - version: uint8(CodecV4), + version: CodecV4, batchIndex: 6789, l1MessagePopped: 101, totalL1MessagePopped: 10101, @@ -1303,7 +1303,7 @@ func TestCodecV4BatchStandardTestCasesDisableCompression(t *testing.T) { batch := daBatchV3{ daBatchV0: daBatchV0{ - version: uint8(CodecV4), + version: CodecV4, batchIndex: 6789, l1MessagePopped: 101, totalL1MessagePopped: 10101, diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 3f7ef6e..683057a 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -65,7 +65,7 @@ type Codec interface { } // CodecVersion represents the version of the codec. -type CodecVersion int +type CodecVersion uint8 const ( CodecV0 CodecVersion = iota From 1e07a66dd8b137d4513e47c01db497a243cb1482 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 18 Oct 2024 10:33:24 +0800 Subject: [PATCH 117/126] add simple and nil functions unit tests --- encoding/codecv0_test.go | 55 +++++++++++++++++++++++++++++++++++++++ encoding/codecv1.go | 2 +- encoding/codecv1_test.go | 10 +++++++ encoding/codecv1_types.go | 4 +-- encoding/codecv2.go | 2 +- encoding/codecv2_test.go | 10 +++++++ encoding/codecv3.go | 4 +-- encoding/codecv3_test.go | 10 +++++++ encoding/codecv3_types.go | 2 +- encoding/codecv4.go | 2 +- encoding/codecv4_test.go | 10 +++++++ 11 files changed, 103 insertions(+), 8 deletions(-) diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index a8e1bb5..304d751 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -6,6 +6,7 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -618,3 +619,57 @@ func TestCodecV0DecodeDAChunksRawTx(t *testing.T) { assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) } + +func TestDACodecV0SimpleMethods(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + t.Run("Version", func(t *testing.T) { + version := codecv0.Version() + assert.Equal(t, CodecV0, version) + }) + + t.Run("CheckChunkCompressedDataCompatibility", func(t *testing.T) { + chunk := &Chunk{} + compatible, err := codecv0.CheckChunkCompressedDataCompatibility(chunk) + assert.NoError(t, err) + assert.True(t, compatible) + }) + + t.Run("CheckBatchCompressedDataCompatibility", func(t *testing.T) { + batch := &Batch{} + compatible, err := codecv0.CheckBatchCompressedDataCompatibility(batch) + assert.NoError(t, err) + assert.True(t, compatible) + }) + + t.Run("EstimateChunkL1CommitBatchSizeAndBlobSize", func(t *testing.T) { + chunk := &Chunk{} + batchSize, blobSize, err := codecv0.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk) + assert.NoError(t, err) + assert.Equal(t, uint64(0), batchSize) + assert.Equal(t, uint64(0), blobSize) + }) + + t.Run("EstimateBatchL1CommitBatchSizeAndBlobSize", func(t *testing.T) { + batch := &Batch{} + batchSize, blobSize, err := codecv0.EstimateBatchL1CommitBatchSizeAndBlobSize(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), batchSize) + assert.Equal(t, uint64(0), blobSize) + }) + + t.Run("JSONFromBytes", func(t *testing.T) { + data := []byte("test data") + json, err := codecv0.JSONFromBytes(data) + assert.NoError(t, err) + assert.Nil(t, json) + }) + + t.Run("DecodeTxsFromBlob", func(t *testing.T) { + blob := &kzg4844.Blob{} + chunks := []*DAChunkRawTx{} + err := codecv0.DecodeTxsFromBlob(blob, chunks) + assert.NoError(t, err) + }) +} diff --git a/encoding/codecv1.go b/encoding/codecv1.go index cde947d..993aaf7 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -215,7 +215,7 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // compute blob versioned hash c, err := kzg4844.BlobToCommitment(blob) if err != nil { - return nil, common.Hash{}, nil, errors.New("failed to create blob commitment") + return nil, common.Hash{}, nil, fmt.Errorf("failed to create blob commitment: %w", err) } blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index 8dcd007..36a578a 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -987,3 +987,13 @@ func TestCodecV1BatchStandardTestCases(t *testing.T) { assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) } } + +func TestDACodecV1SimpleMethods(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) + + t.Run("Version", func(t *testing.T) { + version := codecv1.Version() + assert.Equal(t, CodecV1, version) + }) +} diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go index c81f5f0..9ef265f 100644 --- a/encoding/codecv1_types.go +++ b/encoding/codecv1_types.go @@ -132,7 +132,7 @@ func (b *daBatchV1) BlobDataProof() ([]byte, error) { commitment, err := kzg4844.BlobToCommitment(b.blob) if err != nil { - return nil, errors.New("failed to create blob commitment") + return nil, fmt.Errorf("failed to create blob commitment: %w", err) } proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) @@ -164,7 +164,7 @@ func (b *daBatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { commitment, err := kzg4844.BlobToCommitment(b.blob) if err != nil { - return nil, errors.New("failed to create blob commitment") + return nil, fmt.Errorf("failed to create blob commitment: %w", err) } proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) diff --git a/encoding/codecv2.go b/encoding/codecv2.go index ad33224..f2f4ed2 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -182,7 +182,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // compute blob versioned hash c, err := kzg4844.BlobToCommitment(blob) if err != nil { - return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") + return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to create blob commitment: %w", err) } blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 85fd7ba..ac078ae 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -1028,3 +1028,13 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) } } + +func TestDACodecV2SimpleMethods(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + t.Run("Version", func(t *testing.T) { + version := codecv2.Version() + assert.Equal(t, CodecV2, version) + }) +} diff --git a/encoding/codecv3.go b/encoding/codecv3.go index ac37e89..cf05a43 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -135,12 +135,12 @@ func (d *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { func (d *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { batch, err := d.NewDABatchFromBytes(data) if err != nil { - return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) + return nil, fmt.Errorf("failed to decode DABatch from bytes, version %d, hash %s: %w", batch.Version(), batch.Hash(), err) } jsonBytes, err := json.Marshal(batch) if err != nil { - return nil, fmt.Errorf("failed to marshal DABatch to JSON: %w", err) + return nil, fmt.Errorf("failed to marshal DABatch to JSON, version %d, hash %s: %w", batch.Version(), batch.Hash(), err) } return jsonBytes, nil diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 5e3e2e8..b067f3a 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -1172,3 +1172,13 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) } } + +func TestDACodecV3SimpleMethods(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + t.Run("Version", func(t *testing.T) { + version := codecv3.Version() + assert.Equal(t, CodecV3, version) + }) +} diff --git a/encoding/codecv3_types.go b/encoding/codecv3_types.go index fd75046..43f8acf 100644 --- a/encoding/codecv3_types.go +++ b/encoding/codecv3_types.go @@ -138,7 +138,7 @@ func (b *daBatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { commitment, err := kzg4844.BlobToCommitment(b.blob) if err != nil { - return nil, errors.New("failed to create blob commitment") + return nil, fmt.Errorf("failed to create blob commitment: %w", err) } proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index d9eb6cc..393c2c4 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -228,7 +228,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i // compute blob versioned hash c, err := kzg4844.BlobToCommitment(blob) if err != nil { - return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") + return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to create blob commitment: %w", err) } blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 92e183a..b103cee 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -1320,3 +1320,13 @@ func TestCodecV4BatchStandardTestCasesDisableCompression(t *testing.T) { assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) } } + +func TestDACodecV4SimpleMethods(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + t.Run("Version", func(t *testing.T) { + version := codecv4.Version() + assert.Equal(t, CodecV4, version) + }) +} From 24eb59430df5261bab4b72b55ab67f659a99c956 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 18 Oct 2024 11:43:22 +0800 Subject: [PATCH 118/126] add uncompressed case unit tests of DecodeTxsFromBlob --- encoding/codecv1.go | 2 +- encoding/codecv1_test.go | 6 ++++ encoding/codecv4_test.go | 66 ++++++++++++++++++++++++++++------------ 3 files changed, 54 insertions(+), 20 deletions(-) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 993aaf7..cd7a9b2 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -384,7 +384,7 @@ func (d *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { } // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func (d *DACodecV1) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { +func (d *DACodecV1) EstimateBlockL1CommitCalldataSize(_ *Block) (uint64, error) { return blockContextByteSize, nil } diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index 36a578a..2278969 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -996,4 +996,10 @@ func TestDACodecV1SimpleMethods(t *testing.T) { version := codecv1.Version() assert.Equal(t, CodecV1, version) }) + + t.Run("EstimateBlockL1CommitCalldataSize", func(t *testing.T) { + size, err := codecv1.EstimateBlockL1CommitCalldataSize(nil) + assert.NoError(t, err) + assert.Equal(t, uint64(blockContextByteSize), size) + }) } diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index b103cee..efcb766 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -988,41 +988,69 @@ func TestCodecV4DecodeDAChunksRawTx(t *testing.T) { batch, err := codecv4.NewDABatch(originalBatch) assert.NoError(t, err) - daChunksRawTx, err := codecv4.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) + daChunksRawTx1, err := codecv4.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) assert.NoError(t, err) // assert number of chunks - assert.Equal(t, 2, len(daChunksRawTx)) + assert.Equal(t, 2, len(daChunksRawTx1)) // assert block in first chunk - assert.Equal(t, 2, len(daChunksRawTx[0].Blocks)) - assert.Equal(t, daChunk0.(*daChunkV1).blocks[0], daChunksRawTx[0].Blocks[0]) - assert.Equal(t, daChunk0.(*daChunkV1).blocks[1], daChunksRawTx[0].Blocks[1]) + assert.Equal(t, 2, len(daChunksRawTx1[0].Blocks)) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[0], daChunksRawTx1[0].Blocks[0]) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[1], daChunksRawTx1[0].Blocks[1]) // assert block in second chunk - assert.Equal(t, 2, len(daChunksRawTx[1].Blocks)) - daChunksRawTx[1].Blocks[0].(*daBlockV0).baseFee = nil - assert.Equal(t, daChunk1.(*daChunkV1).blocks[0].(*daBlockV0), daChunksRawTx[1].Blocks[0]) - daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil - assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) + assert.Equal(t, 2, len(daChunksRawTx1[1].Blocks)) + daChunksRawTx1[1].Blocks[0].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[0].(*daBlockV0), daChunksRawTx1[1].Blocks[0]) + daChunksRawTx1[1].Blocks[1].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx1[1].Blocks[1]) blob := batch.Blob() - err = codecv4.DecodeTxsFromBlob(blob, daChunksRawTx) + err = codecv4.DecodeTxsFromBlob(blob, daChunksRawTx1) assert.NoError(t, err) // assert transactions in first chunk - assert.Equal(t, 2, len(daChunksRawTx[0].Transactions)) + assert.Equal(t, 2, len(daChunksRawTx1[0].Transactions)) // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs - assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) - assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) + assert.Equal(t, 2, len(daChunksRawTx1[0].Transactions[0])) + assert.Equal(t, 1, len(daChunksRawTx1[0].Transactions[1])) - assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) - assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][0].TxHash, daChunksRawTx1[0].Transactions[0][0].Hash().String()) + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][1].TxHash, daChunksRawTx1[0].Transactions[0][1].Hash().String()) // assert transactions in second chunk - assert.Equal(t, 2, len(daChunksRawTx[1].Transactions)) + assert.Equal(t, 2, len(daChunksRawTx1[1].Transactions)) // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs - assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) - assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) + assert.Equal(t, 1, len(daChunksRawTx1[1].Transactions[0])) + assert.Equal(t, 0, len(daChunksRawTx1[1].Transactions[1])) + + // Uncompressed case + block4 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk2 := &Chunk{Blocks: []*Block{block4}} + daChunk2, err := codecv4.NewDAChunk(chunk2, 0) + assert.NoError(t, err) + chunkBytes2, err := daChunk2.Encode() + assert.NoError(t, err) + + daChunksRawTx2, err := codecv4.DecodeDAChunksRawTx([][]byte{chunkBytes2}) + assert.NoError(t, err) + + // assert number of chunks + assert.Equal(t, 1, len(daChunksRawTx2)) + + // assert block in uncompressed chunk + assert.Equal(t, 1, len(daChunksRawTx2[0].Blocks)) + assert.Equal(t, daChunk2.(*daChunkV1).blocks[0].Encode(), daChunksRawTx2[0].Blocks[0].Encode()) + + daBatchUncompressed, err := codecv4.NewDABatch(&Batch{Chunks: []*Chunk{chunk2}}) + assert.NoError(t, err) + blobUncompressed := daBatchUncompressed.Blob() + err = codecv4.DecodeTxsFromBlob(blobUncompressed, daChunksRawTx2) + assert.NoError(t, err) + + // assert transactions in first chunk + assert.Equal(t, 1, len(daChunksRawTx2[0].Transactions)) + assert.Equal(t, 0, len(daChunksRawTx2[0].Transactions[0])) } func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) { From 4b5b63691829427e130d0d556d51213683b8462c Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 18 Oct 2024 11:51:53 +0800 Subject: [PATCH 119/126] add interface unit tests --- encoding/codecv1_types.go | 22 ------- encoding/interfaces_test.go | 117 ++++++++++++++++++++++++++++++++++++ 2 files changed, 117 insertions(+), 22 deletions(-) create mode 100644 encoding/interfaces_test.go diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go index 9ef265f..2bd448b 100644 --- a/encoding/codecv1_types.go +++ b/encoding/codecv1_types.go @@ -121,28 +121,6 @@ func (b *daBatchV1) Hash() common.Hash { return crypto.Keccak256Hash(bytes) } -// BlobDataProof computes the abi-encoded blob verification data. -func (b *daBatchV1) BlobDataProof() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProof with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProof with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, fmt.Errorf("failed to create blob commitment: %w", err) - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - return blobDataProofFromValues(*b.z, y, commitment, proof), nil -} - // Blob returns the blob of the batch. func (b *daBatchV1) Blob() *kzg4844.Blob { return b.blob diff --git a/encoding/interfaces_test.go b/encoding/interfaces_test.go new file mode 100644 index 0000000..72c2dda --- /dev/null +++ b/encoding/interfaces_test.go @@ -0,0 +1,117 @@ +package encoding + +import ( + "math/big" + "testing" + + "github.com/scroll-tech/go-ethereum/params" + "github.com/stretchr/testify/assert" +) + +func TestCodecFromVersion(t *testing.T) { + tests := []struct { + name string + version CodecVersion + want Codec + wantErr bool + }{ + {"CodecV0", CodecV0, &DACodecV0{}, false}, + {"CodecV1", CodecV1, &DACodecV1{}, false}, + {"CodecV2", CodecV2, &DACodecV2{}, false}, + {"CodecV3", CodecV3, &DACodecV3{}, false}, + {"CodecV4", CodecV4, &DACodecV4{}, false}, + {"InvalidCodec", CodecVersion(99), nil, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := CodecFromVersion(tt.version) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.IsType(t, tt.want, got) + } + }) + } +} + +func TestCodecFromConfig(t *testing.T) { + tests := []struct { + name string + config *params.ChainConfig + blockNum *big.Int + timestamp uint64 + want Codec + }{ + { + name: "DarwinV2 active", + config: ¶ms.ChainConfig{ + LondonBlock: big.NewInt(0), + BernoulliBlock: big.NewInt(0), + CurieBlock: big.NewInt(0), + DarwinTime: new(uint64), + DarwinV2Time: new(uint64), + }, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV4{}, + }, + { + name: "Darwin active", + config: ¶ms.ChainConfig{ + LondonBlock: big.NewInt(0), + BernoulliBlock: big.NewInt(0), + CurieBlock: big.NewInt(0), + DarwinTime: new(uint64), + }, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV3{}, + }, + { + name: "Curie active", + config: ¶ms.ChainConfig{ + LondonBlock: big.NewInt(0), + BernoulliBlock: big.NewInt(0), + CurieBlock: big.NewInt(0), + }, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV2{}, + }, + { + name: "Bernoulli active", + config: ¶ms.ChainConfig{ + LondonBlock: big.NewInt(0), + BernoulliBlock: big.NewInt(0), + }, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV1{}, + }, + { + name: "London active", + config: ¶ms.ChainConfig{ + LondonBlock: big.NewInt(0), + }, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV0{}, + }, + { + name: "No upgrades", + config: ¶ms.ChainConfig{}, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV0{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := CodecFromConfig(tt.config, tt.blockNum, tt.timestamp) + assert.IsType(t, tt.want, got) + }) + } +} From e18a147a9ab33ea7661fcdcb0ebde0406846543a Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 18 Oct 2024 12:35:27 +0800 Subject: [PATCH 120/126] add codecv2 & codecv3 CompressedDataCompatibilityCheck unit tests --- encoding/codecv2_test.go | 109 +++++++++++++++++++++++++++++++++++++++ encoding/codecv3_test.go | 109 +++++++++++++++++++++++++++++++++++++++ go.mod | 1 + go.sum | 10 ++++ 4 files changed, 229 insertions(+) diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index ac078ae..59bc601 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -1,11 +1,13 @@ package encoding import ( + "crypto/rand" "encoding/hex" "math" "strings" "testing" + "github.com/agiledragon/gomonkey/v2" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -1038,3 +1040,110 @@ func TestDACodecV2SimpleMethods(t *testing.T) { assert.Equal(t, CodecV2, version) }) } + +func TestCodecV2ChunkCompressedDataCompatibility(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + // chunk with a single empty block + emptyBlock := &Block{} + emptyChunk := &Chunk{Blocks: []*Block{emptyBlock}} + + compatible, err := codecv2.CheckChunkCompressedDataCompatibility(emptyChunk) + assert.NoError(t, err) + assert.True(t, compatible) + + txChunk := &Chunk{ + Blocks: []*Block{ + { + Transactions: []*types.TransactionData{ + {Type: types.L1MessageTxType}, + }, + }, + }, + } + compatible, err = codecv2.CheckChunkCompressedDataCompatibility(txChunk) + assert.NoError(t, err) + assert.True(t, compatible) + + testCases := []struct { + name string + jsonFile string + }{ + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + compatible, err := codecv2.CheckChunkCompressedDataCompatibility(chunk) + assert.NoError(t, err) + assert.True(t, compatible) + }) + } +} + +func TestCodecV2BatchCompressedDataCompatibility(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + // empty batch + emptyBatch := &Batch{} + compatible, err := codecv2.CheckBatchCompressedDataCompatibility(emptyBatch) + assert.NoError(t, err) + assert.True(t, compatible) + + testCases := []struct { + name string + jsonFiles []string + }{ + {"Single Block 02", []string{"testdata/blockTrace_02.json"}}, + {"Single Block 03", []string{"testdata/blockTrace_03.json"}}, + {"Single Block 04", []string{"testdata/blockTrace_04.json"}}, + {"Single Block 05", []string{"testdata/blockTrace_05.json"}}, + {"Single Block 06", []string{"testdata/blockTrace_06.json"}}, + {"Single Block 07", []string{"testdata/blockTrace_07.json"}}, + {"Multiple Blocks", []string{"testdata/blockTrace_02.json", "testdata/blockTrace_03.json", "testdata/blockTrace_04.json", "testdata/blockTrace_05.json", "testdata/blockTrace_06.json", "testdata/blockTrace_07.json"}}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var chunks []*Chunk + for _, jsonFile := range tc.jsonFiles { + block := readBlockFromJSON(t, jsonFile) + chunks = append(chunks, &Chunk{Blocks: []*Block{block}}) + } + batch := &Batch{Chunks: chunks} + compatible, err := codecv2.CheckBatchCompressedDataCompatibility(batch) + assert.NoError(t, err) + assert.True(t, compatible) + }) + } +} + +func TestCodecV2CompressedDataFailedCompatibilityCheck(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + patches := gomonkey.ApplyFunc(constructBatchPayloadInBlob, func(_ []*Chunk, _ Codec) ([]byte, error) { + randomBytes := make([]byte, minCompressedDataCheckSize+1) + _, err := rand.Read(randomBytes) + require.NoError(t, err) + return []byte(hex.EncodeToString(randomBytes)), nil + }) + defer patches.Reset() + + compatible, err := codecv2.CheckChunkCompressedDataCompatibility(nil) + assert.NoError(t, err) + assert.False(t, compatible) + + compatible, err = codecv2.CheckBatchCompressedDataCompatibility(&Batch{}) + assert.NoError(t, err) + assert.False(t, compatible) +} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index b067f3a..d47b9ef 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -1,12 +1,14 @@ package encoding import ( + "crypto/rand" "encoding/hex" "encoding/json" "math" "strings" "testing" + "github.com/agiledragon/gomonkey/v2" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -1182,3 +1184,110 @@ func TestDACodecV3SimpleMethods(t *testing.T) { assert.Equal(t, CodecV3, version) }) } + +func TestCodecV3ChunkCompressedDataCompatibility(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + // chunk with a single empty block + emptyBlock := &Block{} + emptyChunk := &Chunk{Blocks: []*Block{emptyBlock}} + + compatible, err := codecv3.CheckChunkCompressedDataCompatibility(emptyChunk) + assert.NoError(t, err) + assert.True(t, compatible) + + txChunk := &Chunk{ + Blocks: []*Block{ + { + Transactions: []*types.TransactionData{ + {Type: types.L1MessageTxType}, + }, + }, + }, + } + compatible, err = codecv3.CheckChunkCompressedDataCompatibility(txChunk) + assert.NoError(t, err) + assert.True(t, compatible) + + testCases := []struct { + name string + jsonFile string + }{ + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + compatible, err := codecv3.CheckChunkCompressedDataCompatibility(chunk) + assert.NoError(t, err) + assert.True(t, compatible) + }) + } +} + +func TestCodecV3BatchCompressedDataCompatibility(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + // empty batch + emptyBatch := &Batch{} + compatible, err := codecv3.CheckBatchCompressedDataCompatibility(emptyBatch) + assert.NoError(t, err) + assert.True(t, compatible) + + testCases := []struct { + name string + jsonFiles []string + }{ + {"Single Block 02", []string{"testdata/blockTrace_02.json"}}, + {"Single Block 03", []string{"testdata/blockTrace_03.json"}}, + {"Single Block 04", []string{"testdata/blockTrace_04.json"}}, + {"Single Block 05", []string{"testdata/blockTrace_05.json"}}, + {"Single Block 06", []string{"testdata/blockTrace_06.json"}}, + {"Single Block 07", []string{"testdata/blockTrace_07.json"}}, + {"Multiple Blocks", []string{"testdata/blockTrace_02.json", "testdata/blockTrace_03.json", "testdata/blockTrace_04.json", "testdata/blockTrace_05.json", "testdata/blockTrace_06.json", "testdata/blockTrace_07.json"}}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var chunks []*Chunk + for _, jsonFile := range tc.jsonFiles { + block := readBlockFromJSON(t, jsonFile) + chunks = append(chunks, &Chunk{Blocks: []*Block{block}}) + } + batch := &Batch{Chunks: chunks} + compatible, err := codecv3.CheckBatchCompressedDataCompatibility(batch) + assert.NoError(t, err) + assert.True(t, compatible) + }) + } +} + +func TestCodecV3CompressedDataFailedCompatibilityCheck(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + patches := gomonkey.ApplyFunc(constructBatchPayloadInBlob, func(_ []*Chunk, _ Codec) ([]byte, error) { + randomBytes := make([]byte, minCompressedDataCheckSize+1) + _, err := rand.Read(randomBytes) + require.NoError(t, err) + return []byte(hex.EncodeToString(randomBytes)), nil + }) + defer patches.Reset() + + compatible, err := codecv3.CheckChunkCompressedDataCompatibility(nil) + assert.NoError(t, err) + assert.False(t, compatible) + + compatible, err = codecv3.CheckBatchCompressedDataCompatibility(&Batch{}) + assert.NoError(t, err) + assert.False(t, compatible) +} diff --git a/go.mod b/go.mod index 0a6f1fd..a3e1927 100644 --- a/go.mod +++ b/go.mod @@ -3,6 +3,7 @@ module github.com/scroll-tech/da-codec go 1.21 require ( + github.com/agiledragon/gomonkey/v2 v2.12.0 github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22 github.com/stretchr/testify v1.9.0 ) diff --git a/go.sum b/go.sum index 7ae90e1..d8f830f 100644 --- a/go.sum +++ b/go.sum @@ -3,6 +3,8 @@ github.com/DataDog/zstd v1.4.5/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t github.com/VictoriaMetrics/fastcache v1.12.1 h1:i0mICQuojGDL3KblA7wUNlY5lOK6a4bwt3uRKnkZU40= github.com/VictoriaMetrics/fastcache v1.12.1/go.mod h1:tX04vaqcNoQeGLD+ra5pU5sWkuxnzWhEzLwhP9w653o= github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= +github.com/agiledragon/gomonkey/v2 v2.12.0 h1:ek0dYu9K1rSV+TgkW5LvNNPRWyDZVIxGMCFI6Pz9o38= +github.com/agiledragon/gomonkey/v2 v2.12.0/go.mod h1:ap1AmDzcVOAz1YpeJ3TCzIgstoaWLA6jbbgxfB4w2iY= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bits-and-blooms/bitset v1.12.0 h1:U/q1fAF7xXRhFCrhROzIfffYnu+dlS38vCZtmFVPHmA= @@ -63,6 +65,7 @@ github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiu github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb h1:PBC98N2aIaM3XXiurYmW7fx4GZkL8feAMVq7nEjURHk= github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/holiman/uint256 v1.2.4 h1:jUc4Nk8fm9jZabQuqr2JzednajVmBpC+oiTiXZJEApU= github.com/holiman/uint256 v1.2.4/go.mod h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= @@ -70,6 +73,7 @@ github.com/iden3/go-iden3-crypto v0.0.15 h1:4MJYlrot1l31Fzlo2sF56u7EVFeHHJkxGXXZ github.com/iden3/go-iden3-crypto v0.0.15/go.mod h1:dLpM4vEPJ3nDHzhWFXDjzkn1qHoBeOT/3UEhXsEsP3E= github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA= github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= @@ -115,6 +119,8 @@ github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7I github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk= github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI= github.com/shirou/gopsutil v3.21.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/supranational/blst v0.3.11 h1:LyU6FolezeWAhvQk0k6O/d49jqgO52MSDDfYgbeoEm4= @@ -128,15 +134,18 @@ github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9f github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g= golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -146,6 +155,7 @@ golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= From 42af2924daf42e6dcc1bcf99a452c251aa6c5b42 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 18 Oct 2024 12:55:16 +0800 Subject: [PATCH 121/126] remove mock flag --- encoding/codecv0_types.go | 2 +- encoding/codecv1.go | 8 ++++---- encoding/codecv1_test.go | 15 ++++++++++++++- encoding/codecv2.go | 8 ++++---- encoding/codecv2_test.go | 22 +++++++++++++++++++--- encoding/codecv3.go | 2 +- encoding/codecv3_test.go | 22 +++++++++++++++++++--- encoding/codecv4.go | 16 +++++++--------- encoding/codecv4_test.go | 36 ++++++++++++++++++++++++++++++++++-- encoding/da.go | 12 +++--------- encoding/da_test.go | 2 +- 11 files changed, 107 insertions(+), 38 deletions(-) diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go index e110268..0e6c958 100644 --- a/encoding/codecv0_types.go +++ b/encoding/codecv0_types.go @@ -142,7 +142,7 @@ func (c *daChunkV0) Encode() ([]byte, error) { } var txLen [4]byte - rlpTxData, err := convertTxDataToRLPEncoding(txData, false /* no mock */) + rlpTxData, err := convertTxDataToRLPEncoding(txData) if err != nil { return nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index cd7a9b2..e83724b 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -121,7 +121,7 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), false /* no mock */) + blob, blobVersionedHash, z, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch()) if err != nil { return nil, fmt.Errorf("failed to construct blob payload, index: %d, err: %w", batch.Index, err) } @@ -148,7 +148,7 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // constructBlobPayload constructs the 4844 blob payload. -func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + maxNumChunksPerBatch*4 @@ -177,7 +177,7 @@ func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i } // encode L2 txs into blob payload - rlpTxData, err := convertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := convertTxDataToRLPEncoding(tx) if err != nil { return nil, common.Hash{}, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } @@ -271,7 +271,7 @@ func (d *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { continue } - rlpTxData, err := convertTxDataToRLPEncoding(tx, false /* no mock */) + rlpTxData, err := convertTxDataToRLPEncoding(tx) if err != nil { return 0, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index 2278969..bbc8dea 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -2,11 +2,14 @@ package encoding import ( "encoding/hex" + "fmt" "math" "strings" "testing" + "github.com/agiledragon/gomonkey/v2" "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" @@ -953,7 +956,17 @@ func TestCodecV1BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := codecv1.(*DACodecV1).constructBlobPayload(chunks, codecv1.MaxNumChunksPerBatch(), true /* use mock */) + patches := gomonkey.ApplyFunc(convertTxDataToRLPEncoding, + func(txData *types.TransactionData) ([]byte, error) { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, fmt.Errorf("failed to decode txData.Data: data=%v, err=%w", txData.Data, err) + } + return data, nil + }) + defer patches.Reset() + + blob, blobVersionedHash, z, err := codecv1.(*DACodecV1).constructBlobPayload(chunks, codecv1.MaxNumChunksPerBatch()) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv2.go b/encoding/codecv2.go index f2f4ed2..1512f62 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -68,7 +68,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, _, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), false /* no mock */) + blob, blobVersionedHash, z, _, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch()) if err != nil { return nil, fmt.Errorf("failed to construct blob payload, index: %d, err: %w", batch.Index, err) } @@ -95,7 +95,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // constructBlobPayload constructs the 4844 blob payload. -func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + maxNumChunksPerBatch*4 @@ -124,7 +124,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i } // encode L2 txs into blob payload - rlpTxData, err := convertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := convertTxDataToRLPEncoding(tx) if err != nil { return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } @@ -160,7 +160,7 @@ func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i } // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if !useMockTxData && len(batchBytes) > minCompressedDataCheckSize { + if len(batchBytes) > minCompressedDataCheckSize { // Check compressed data compatibility. if err = checkCompressedDataCompatibility(blobBytes); err != nil { log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 59bc601..2feae3c 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -9,6 +9,7 @@ import ( "github.com/agiledragon/gomonkey/v2" "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" @@ -996,7 +997,22 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, _, err := codecv2.(*DACodecV2).constructBlobPayload(chunks, codecv2.MaxNumChunksPerBatch(), true /* use mock */) + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyFunc(convertTxDataToRLPEncoding, func(txData *types.TransactionData) ([]byte, error) { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, err + } + return data, nil + }) + + patches.ApplyFunc(checkCompressedDataCompatibility, func(_ []byte) error { + return nil + }) + + blob, blobVersionedHash, z, _, err := codecv2.(*DACodecV2).constructBlobPayload(chunks, codecv2.MaxNumChunksPerBatch()) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -1133,8 +1149,8 @@ func TestCodecV2CompressedDataFailedCompatibilityCheck(t *testing.T) { patches := gomonkey.ApplyFunc(constructBatchPayloadInBlob, func(_ []*Chunk, _ Codec) ([]byte, error) { randomBytes := make([]byte, minCompressedDataCheckSize+1) - _, err := rand.Read(randomBytes) - require.NoError(t, err) + _, readerr := rand.Read(randomBytes) + require.NoError(t, readerr) return []byte(hex.EncodeToString(randomBytes)), nil }) defer patches.Reset() diff --git a/encoding/codecv3.go b/encoding/codecv3.go index cf05a43..c6290db 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -47,7 +47,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch()) if err != nil { return nil, err } diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index d47b9ef..2a99229 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -10,6 +10,7 @@ import ( "github.com/agiledragon/gomonkey/v2" "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" @@ -1137,7 +1138,22 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, _, err := codecv3.(*DACodecV3).constructBlobPayload(chunks, codecv3.MaxNumChunksPerBatch(), true /* use mock */) + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyFunc(convertTxDataToRLPEncoding, func(txData *types.TransactionData) ([]byte, error) { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, err + } + return data, nil + }) + + patches.ApplyFunc(checkCompressedDataCompatibility, func(_ []byte) error { + return nil + }) + + blob, blobVersionedHash, z, _, err := codecv3.(*DACodecV3).constructBlobPayload(chunks, codecv3.MaxNumChunksPerBatch()) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -1277,8 +1293,8 @@ func TestCodecV3CompressedDataFailedCompatibilityCheck(t *testing.T) { patches := gomonkey.ApplyFunc(constructBatchPayloadInBlob, func(_ []*Chunk, _ Codec) ([]byte, error) { randomBytes := make([]byte, minCompressedDataCheckSize+1) - _, err := rand.Read(randomBytes) - require.NoError(t, err) + _, readerr := rand.Read(randomBytes) + require.NoError(t, readerr) return []byte(hex.EncodeToString(randomBytes)), nil }) defer patches.Reset() diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 393c2c4..ebbb172 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -75,7 +75,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), enableCompression, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), enableCompression) if err != nil { return nil, err } @@ -136,7 +136,7 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { } // constructBlobPayload constructs the 4844 blob payload. -func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, enableCompression bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, enableCompression bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + maxNumChunksPerBatch*4 @@ -165,7 +165,7 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i } // encode L2 txs into blob payload - rlpTxData, err := convertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := convertTxDataToRLPEncoding(tx) if err != nil { return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } @@ -202,12 +202,10 @@ func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch i if err != nil { return nil, common.Hash{}, nil, nil, err } - if !useMockTxData { - // Check compressed data compatibility. - if err = checkCompressedDataCompatibility(blobBytes); err != nil { - log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, err - } + // Check compressed data compatibility. + if err = checkCompressedDataCompatibility(blobBytes); err != nil { + log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, nil, err } blobBytes = append([]byte{1}, blobBytes...) } else { diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index efcb766..cdafc09 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -7,7 +7,9 @@ import ( "strings" "testing" + "github.com/agiledragon/gomonkey/v2" "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" @@ -1163,7 +1165,22 @@ func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), true /* enable encode */, true /* use mock */) + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyFunc(convertTxDataToRLPEncoding, func(txData *types.TransactionData) ([]byte, error) { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, err + } + return data, nil + }) + + patches.ApplyFunc(checkCompressedDataCompatibility, func(_ []byte) error { + return nil + }) + + blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), true /* enable encode */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -1311,7 +1328,22 @@ func TestCodecV4BatchStandardTestCasesDisableCompression(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), false /* disable encode */, true /* use mock */) + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyFunc(convertTxDataToRLPEncoding, func(txData *types.TransactionData) ([]byte, error) { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, err + } + return data, nil + }) + + patches.ApplyFunc(checkCompressedDataCompatibility, func(_ []byte) error { + return nil + }) + + blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), false /* disable encode */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/da.go b/encoding/da.go index e38e0ec..5f22756 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -155,18 +155,12 @@ func (c *Chunk) NumL1Messages(totalL1MessagePoppedBefore uint64) uint64 { } // convertTxDataToRLPEncoding transforms []*TransactionData into []*types.Transaction. -func convertTxDataToRLPEncoding(txData *types.TransactionData, useMockTxData bool) ([]byte, error) { +func convertTxDataToRLPEncoding(txData *types.TransactionData) ([]byte, error) { data, err := hexutil.Decode(txData.Data) if err != nil { return nil, fmt.Errorf("failed to decode txData.Data: data=%v, err=%w", txData.Data, err) } - // This mock param is only used in testing comparing batch challenges with standard test cases. - // These tests use this param to set the tx data for convenience. - if useMockTxData { - return data, nil - } - var tx *types.Transaction switch txData.Type { case types.LegacyTxType: @@ -484,7 +478,7 @@ func constructBatchPayloadInBlob(chunks []*Chunk, codec Codec) ([]byte, error) { } // encode L2 txs into batch payload - rlpTxData, err := convertTxDataToRLPEncoding(tx, false /* no mock */) + rlpTxData, err := convertTxDataToRLPEncoding(tx) if err != nil { return nil, err } @@ -513,7 +507,7 @@ func getMemoryExpansionCost(memoryByteSize uint64) uint64 { // getTxPayloadLength calculates the length of the transaction payload. func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { - rlpTxData, err := convertTxDataToRLPEncoding(txData, false /* no mock */) + rlpTxData, err := convertTxDataToRLPEncoding(txData) if err != nil { return 0, err } diff --git a/encoding/da_test.go b/encoding/da_test.go index 662a127..dbfbaf1 100644 --- a/encoding/da_test.go +++ b/encoding/da_test.go @@ -99,7 +99,7 @@ func TestConvertTxDataToRLPEncoding(t *testing.T) { continue } - rlpTxData, err := convertTxDataToRLPEncoding(txData, false /* no mock */) + rlpTxData, err := convertTxDataToRLPEncoding(txData) assert.NoError(t, err) var tx types.Transaction err = tx.UnmarshalBinary(rlpTxData) From e6cfa7d5d44eec92e948bbe7333b40218cd3e1f0 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 18 Oct 2024 13:12:41 +0800 Subject: [PATCH 122/126] add JSONFromBytes unit tests --- encoding/codecv3.go | 2 +- encoding/codecv3_test.go | 50 ++++++++++++++++++++++++++++++++++++++++ encoding/codecv4.go | 16 +++++++++++++ encoding/codecv4_test.go | 50 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 117 insertions(+), 1 deletion(-) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index c6290db..51e3d7c 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -135,7 +135,7 @@ func (d *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { func (d *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { batch, err := d.NewDABatchFromBytes(data) if err != nil { - return nil, fmt.Errorf("failed to decode DABatch from bytes, version %d, hash %s: %w", batch.Version(), batch.Hash(), err) + return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) } jsonBytes, err := json.Marshal(batch) diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 2a99229..efdd105 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -4,6 +4,7 @@ import ( "crypto/rand" "encoding/hex" "encoding/json" + "fmt" "math" "strings" "testing" @@ -582,6 +583,55 @@ func TestCodecV3DABatchJSONMarshalUnmarshal(t *testing.T) { }) } +func TestDACodecV3JSONFromBytes(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 3, + batchIndex: 293212, + l1MessagePopped: 7, + totalL1MessagePopped: 904750, + dataHash: common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), + parentBatchHash: common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), + }, + blobVersionedHash: common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), + lastBlockTimestamp: 1721130505, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), + common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), + }, + } + + outputJSON, err := codecv3.JSONFromBytes(daBatch.Encode()) + require.NoError(t, err, "JSONFromBytes failed") + + var outputMap map[string]interface{} + err = json.Unmarshal(outputJSON, &outputMap) + require.NoError(t, err, "Failed to unmarshal output JSON") + + expectedFields := map[string]interface{}{ + "version": float64(daBatch.version), + "batch_index": float64(daBatch.batchIndex), + "l1_message_popped": float64(daBatch.l1MessagePopped), + "total_l1_message_popped": float64(daBatch.totalL1MessagePopped), + "data_hash": daBatch.dataHash.Hex(), + "blob_versioned_hash": daBatch.blobVersionedHash.Hex(), + "parent_batch_hash": daBatch.parentBatchHash.Hex(), + "last_block_timestamp": float64(daBatch.lastBlockTimestamp), + "blob_data_proof": []interface{}{ + daBatch.blobDataProof[0].Hex(), + daBatch.blobDataProof[1].Hex(), + }, + } + + assert.Len(t, outputMap, len(expectedFields), "Unexpected number of fields in output") + for key, expectedValue := range expectedFields { + assert.Equal(t, expectedValue, outputMap[key], fmt.Sprintf("Mismatch in field %s", key)) + } +} + func TestCodecV3CalldataSizeEstimation(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) require.NoError(t, err) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index ebbb172..6c98f89 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -4,6 +4,7 @@ import ( "crypto/sha256" "encoding/binary" "encoding/hex" + "encoding/json" "errors" "fmt" "math/big" @@ -308,3 +309,18 @@ func (d *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error func (d *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { return d.checkCompressedDataCompatibility(b.Chunks) } + +// JSONFromBytes converts the bytes to a daBatchV3 and then marshals it to JSON. +func (d *DACodecV4) JSONFromBytes(data []byte) ([]byte, error) { + batch, err := d.NewDABatchFromBytes(data) // this is different from the V3 implementation + if err != nil { + return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) + } + + jsonBytes, err := json.Marshal(batch) + if err != nil { + return nil, fmt.Errorf("failed to marshal DABatch to JSON, version %d, hash %s: %w", batch.Version(), batch.Hash(), err) + } + + return jsonBytes, nil +} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index cdafc09..7165729 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -3,6 +3,7 @@ package encoding import ( "encoding/hex" "encoding/json" + "fmt" "math" "strings" "testing" @@ -581,6 +582,55 @@ func TestCodecV4DABatchJSONMarshalUnmarshal(t *testing.T) { }) } +func TestDACodecV4JSONFromBytes(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 4, + batchIndex: 293212, + l1MessagePopped: 7, + totalL1MessagePopped: 904750, + dataHash: common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), + parentBatchHash: common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), + }, + blobVersionedHash: common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), + lastBlockTimestamp: 1721130505, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), + common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), + }, + } + + outputJSON, err := codecv4.JSONFromBytes(daBatch.Encode()) + require.NoError(t, err, "JSONFromBytes failed") + + var outputMap map[string]interface{} + err = json.Unmarshal(outputJSON, &outputMap) + require.NoError(t, err, "Failed to unmarshal output JSON") + + expectedFields := map[string]interface{}{ + "version": float64(daBatch.version), + "batch_index": float64(daBatch.batchIndex), + "l1_message_popped": float64(daBatch.l1MessagePopped), + "total_l1_message_popped": float64(daBatch.totalL1MessagePopped), + "data_hash": daBatch.dataHash.Hex(), + "blob_versioned_hash": daBatch.blobVersionedHash.Hex(), + "parent_batch_hash": daBatch.parentBatchHash.Hex(), + "last_block_timestamp": float64(daBatch.lastBlockTimestamp), + "blob_data_proof": []interface{}{ + daBatch.blobDataProof[0].Hex(), + daBatch.blobDataProof[1].Hex(), + }, + } + + assert.Len(t, outputMap, len(expectedFields), "Unexpected number of fields in output") + for key, expectedValue := range expectedFields { + assert.Equal(t, expectedValue, outputMap[key], fmt.Sprintf("Mismatch in field %s", key)) + } +} + func TestCodecV4CalldataSizeEstimation(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) require.NoError(t, err) From 102d4a3bc6adcf05c0121c81b61165a6c016769b Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 18 Oct 2024 13:25:58 +0800 Subject: [PATCH 123/126] add codecv4 CompressedDataCompatibilityCheck unit tests --- encoding/codecv2_test.go | 6 +-- encoding/codecv3_test.go | 6 +-- encoding/codecv4_test.go | 88 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 94 insertions(+), 6 deletions(-) diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 2feae3c..8e0fccd 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -1057,7 +1057,7 @@ func TestDACodecV2SimpleMethods(t *testing.T) { }) } -func TestCodecV2ChunkCompressedDataCompatibility(t *testing.T) { +func TestCodecV2ChunkCompressedDataCompatibilityCheck(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) require.NoError(t, err) @@ -1105,7 +1105,7 @@ func TestCodecV2ChunkCompressedDataCompatibility(t *testing.T) { } } -func TestCodecV2BatchCompressedDataCompatibility(t *testing.T) { +func TestCodecV2BatchCompressedDataCompatibilityCheck(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) require.NoError(t, err) @@ -1143,7 +1143,7 @@ func TestCodecV2BatchCompressedDataCompatibility(t *testing.T) { } } -func TestCodecV2CompressedDataFailedCompatibilityCheck(t *testing.T) { +func TestCodecV2FailedCompressedDataCompatibilityCheck(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) require.NoError(t, err) diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index efdd105..4fbcc43 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -1251,7 +1251,7 @@ func TestDACodecV3SimpleMethods(t *testing.T) { }) } -func TestCodecV3ChunkCompressedDataCompatibility(t *testing.T) { +func TestCodecV3ChunkCompressedDataCompatibilityCheck(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) require.NoError(t, err) @@ -1299,7 +1299,7 @@ func TestCodecV3ChunkCompressedDataCompatibility(t *testing.T) { } } -func TestCodecV3BatchCompressedDataCompatibility(t *testing.T) { +func TestCodecV3BatchCompressedDataCompatibilityCheck(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) require.NoError(t, err) @@ -1337,7 +1337,7 @@ func TestCodecV3BatchCompressedDataCompatibility(t *testing.T) { } } -func TestCodecV3CompressedDataFailedCompatibilityCheck(t *testing.T) { +func TestCodecV3FailedCompressedDataCompatibilityCheck(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) require.NoError(t, err) diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 7165729..1730dff 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -1440,3 +1440,91 @@ func TestDACodecV4SimpleMethods(t *testing.T) { assert.Equal(t, CodecV4, version) }) } + +func TestCodecV4ChunkCompressedDataCompatibilityCheck(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + // chunk with a single empty block + emptyBlock := &Block{} + emptyChunk := &Chunk{Blocks: []*Block{emptyBlock}} + + compatible, err := codecv4.CheckChunkCompressedDataCompatibility(emptyChunk) + assert.NoError(t, err) + assert.Equal(t, false, compatible) + + txChunk := &Chunk{ + Blocks: []*Block{ + { + Transactions: []*types.TransactionData{ + {Type: types.L1MessageTxType}, + }, + }, + }, + } + compatible, err = codecv4.CheckChunkCompressedDataCompatibility(txChunk) + assert.NoError(t, err) + assert.Equal(t, false, compatible) + + testCases := []struct { + name string + jsonFile string + expectCompatible bool + }{ + {"Block 02", "testdata/blockTrace_02.json", true}, + {"Block 03", "testdata/blockTrace_03.json", true}, + {"Block 04", "testdata/blockTrace_04.json", true}, + {"Block 05", "testdata/blockTrace_05.json", false}, + {"Block 06", "testdata/blockTrace_06.json", false}, + {"Block 07", "testdata/blockTrace_07.json", false}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + compatible, err := codecv4.CheckChunkCompressedDataCompatibility(chunk) + assert.NoError(t, err) + assert.Equal(t, tc.expectCompatible, compatible) + }) + } +} + +func TestCodecV4BatchCompressedDataCompatibilityCheck(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + // empty batch + emptyBatch := &Batch{} + compatible, err := codecv4.CheckBatchCompressedDataCompatibility(emptyBatch) + assert.NoError(t, err) + assert.Equal(t, false, compatible) + + testCases := []struct { + name string + jsonFiles []string + expectCompatible bool + }{ + {"Single Block 02", []string{"testdata/blockTrace_02.json"}, true}, + {"Single Block 03", []string{"testdata/blockTrace_03.json"}, true}, + {"Single Block 04", []string{"testdata/blockTrace_04.json"}, true}, + {"Single Block 05", []string{"testdata/blockTrace_05.json"}, false}, + {"Single Block 06", []string{"testdata/blockTrace_06.json"}, false}, + {"Single Block 07", []string{"testdata/blockTrace_07.json"}, false}, + {"Multiple Blocks", []string{"testdata/blockTrace_02.json", "testdata/blockTrace_03.json", "testdata/blockTrace_04.json", "testdata/blockTrace_05.json", "testdata/blockTrace_06.json", "testdata/blockTrace_07.json"}, true}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var chunks []*Chunk + for _, jsonFile := range tc.jsonFiles { + block := readBlockFromJSON(t, jsonFile) + chunks = append(chunks, &Chunk{Blocks: []*Block{block}}) + } + batch := &Batch{Chunks: chunks} + compatible, err := codecv4.CheckBatchCompressedDataCompatibility(batch) + assert.NoError(t, err) + assert.Equal(t, tc.expectCompatible, compatible) + }) + } +} From 715e673c1c10b183ef79641be846213037268781 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 18 Oct 2024 14:45:15 +0800 Subject: [PATCH 124/126] add NewDABatchFromBytes unit tests --- encoding/codecv0_test.go | 215 +++++++++++++++++--------- encoding/codecv1.go | 6 +- encoding/codecv1_test.go | 315 ++++++++++++++++++++++--------------- encoding/codecv1_types.go | 2 +- encoding/codecv2.go | 6 +- encoding/codecv2_test.go | 317 +++++++++++++++++++++++--------------- encoding/codecv3_test.go | 309 ++++++++++++++++++++++--------------- encoding/codecv4_test.go | 309 ++++++++++++++++++++++--------------- 8 files changed, 908 insertions(+), 571 deletions(-) diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index 304d751..830d243 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -194,72 +194,68 @@ func TestCodecV0BatchEncode(t *testing.T) { require.NoError(t, err) // empty batch - batch := &daBatchV1{ - daBatchV0: daBatchV0{ - version: CodecV0, - }, - } - encoded := hex.EncodeToString(batch.Encode()) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + daBatchV0 := daBatchV0{version: CodecV0} + encoded := hex.EncodeToString(daBatchV0.Encode()) + assert.Equal(t, "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv0.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv0.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "000000000000000000000000000000000000000000000000008fbc5eecfefc5bd9d1618ecef1fed160a7838448383595a2257d4c9bd5c5fa3e0000000000000000000000000000000000000000000000000000000000000000", encoded) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "0000000000000000000000000000000000000000000000000019d1fad630fcc61bd49949fa01e58d198f67a58f1c4aea43f32714ceaa9e0e760000000000000000000000000000000000000000000000000000000000000000", encoded) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "000000000000000000000000000000000b000000000000000b34f419ce7e882295bdb5aec6cce56ffa788a5fed4744d7fbd77e4acbf409f1ca000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "000000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "000000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "00000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d52080000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "000000000000000000000000000000002a000000000000002a908c20b6255fd8cd8fb3a7995e9980007ebedcfe359cee2d8e899aefe319836e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "000000000000000000000000000000002a000000000000002a1f9b3d942a6ee14e7afc52225c91fa44faa0a7ec511df9a2d9348d33bcd142fc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) @@ -270,123 +266,188 @@ func TestCodecV0BatchHash(t *testing.T) { require.NoError(t, err) // empty batch - batch := &daBatchV1{ + daBatchV1 := &daBatchV1{ daBatchV0: daBatchV0{ version: CodecV0, }, } - assert.Equal(t, common.HexToHash("0x7f74e58579672e582998264e7e8191c51b6b8981afd0f9bf1a2ffc3abb39e678"), batch.Hash()) + assert.Equal(t, common.HexToHash("0x7f74e58579672e582998264e7e8191c51b6b8981afd0f9bf1a2ffc3abb39e678"), daBatchV1.Hash()) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv0.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x4605465b7470c8565b123330d7186805caf9a7f2656d8e9e744b62e14ca22c3d"), daBatch.Hash()) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x922e004553d563bde6560a827c6449200bfd84f92917dfa14d740f26e52c59bc"), daBatch.Hash()) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xfbb081f25d6d06aefd76f062eee50885faf5bb050c8f31d533fc8560e655b690"), daBatch.Hash()) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x99f9648e4d090f1222280bec95a3f1e39c6cbcd4bff21eb2ae94b1536bb23acc"), daBatch.Hash()) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xe0950d500d47df4e9c443978682bcccfc8d50983f99ec9232067333a7d32a9d2"), daBatch.Hash()) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x745a74773cdc7cd0b86b50305f6373c7efeaf051b38a71ea561333708e8a90d9"), daBatch.Hash()) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x85b5c152c5c0b25731bfab6f4d309e94a42ddf0f4c9235189e5cd19c5c008522"), daBatch.Hash()) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc5e787fa6a83374135c3b95bd8325bcc0440cd5eb2d71bb31ddca67dd2d44f64"), daBatch.Hash()) } +func TestCodecV0NewDABatchFromBytes(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + testCases := []struct { + name string + jsonFile string + }{ + {"Empty Batch", ""}, + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var batch *Batch + var daBatch DABatch + var err error + + if tc.jsonFile == "" { + // Empty daBatch + daBatch = &daBatchV0{version: CodecV0} + } else { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + batch = &Batch{Chunks: []*Chunk{chunk}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + } + + // Encode the DABatch + encodedBytes := daBatch.Encode() + + // Decode the bytes back into a DABatch + decodedDABatch, err := codecv0.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + // Compare the hashes of the original and decoded DABatch + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) + }) + } + + // Test with multiple blocks and chunks in a batch + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + chunk2 := &Chunk{Blocks: []*Block{block4, block5}} + batch := &Batch{Chunks: []*Chunk{chunk1, chunk2}} + daBatch, err := codecv0.NewDABatch(batch) + assert.NoError(t, err) + + encodedBytes := daBatch.Encode() + decodedDABatch, err := codecv0.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash()) +} + func TestCodecV0BatchDataHash(t *testing.T) { codecv0, err := CodecFromVersion(CodecV0) require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv0.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x8fbc5eecfefc5bd9d1618ecef1fed160a7838448383595a2257d4c9bd5c5fa3e"), daBatch.DataHash()) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x19d1fad630fcc61bd49949fa01e58d198f67a58f1c4aea43f32714ceaa9e0e76"), daBatch.DataHash()) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x34f419ce7e882295bdb5aec6cce56ffa788a5fed4744d7fbd77e4acbf409f1ca"), daBatch.DataHash()) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x908c20b6255fd8cd8fb3a7995e9980007ebedcfe359cee2d8e899aefe319836e"), daBatch.DataHash()) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x1f9b3d942a6ee14e7afc52225c91fa44faa0a7ec511df9a2d9348d33bcd142fc"), daBatch.DataHash()) } @@ -494,74 +555,74 @@ func TestCodecV0BatchL1MessagePopped(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv0.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV0).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV0).totalL1MessagePopped) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV0).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV0).totalL1MessagePopped) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(11), daBatch.(*daBatchV0).l1MessagePopped) assert.Equal(t, uint64(11), daBatch.(*daBatchV0).totalL1MessagePopped) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(42), daBatch.(*daBatchV0).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 37 - daBatch, err = codecv0.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(5), daBatch.(*daBatchV0).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(10), daBatch.(*daBatchV0).l1MessagePopped) // skip 7, include 3 assert.Equal(t, uint64(10), daBatch.(*daBatchV0).totalL1MessagePopped) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(257), daBatch.(*daBatchV0).l1MessagePopped) // skip 255, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV0).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 1 - daBatch, err = codecv0.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(256), daBatch.(*daBatchV0).l1MessagePopped) // skip 254, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV0).totalL1MessagePopped) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv0.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(42), daBatch.(*daBatchV0).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 10 - daBatch, err = codecv0.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv0.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(32), daBatch.(*daBatchV0).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index e83724b..b5d1120 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -115,7 +115,7 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + skippedL1MessageBitmap, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, fmt.Errorf("failed to construct skipped bitmap, index: %d, err: %w", batch.Index, err) } @@ -137,9 +137,9 @@ func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { l1MessagePopped, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped dataHash, // dataHash - batch.ParentBatchHash, // parentBatchHash blobVersionedHash, // blobVersionedHash - bitmapBytes, // skippedL1MessageBitmap + batch.ParentBatchHash, // parentBatchHash + skippedL1MessageBitmap, // skippedL1MessageBitmap blob, // blob z, // z ) diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index bbc8dea..701bb6c 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -244,72 +244,72 @@ func TestCodecV1BatchEncode(t *testing.T) { require.NoError(t, err) // empty batch - batch := &daBatchV1{ + daBatchV1 := &daBatchV1{ daBatchV0: daBatchV0{ version: CodecV1, }, } - encoded := hex.EncodeToString(batch.Encode()) + encoded := hex.EncodeToString(daBatchV1.Encode()) assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv1.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101af944924715b48be6ce3c35aef7500a50e909265599bd2b3e544ac59fc75530000000000000000000000000000000000000000000000000000000000000000", encoded) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "01000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6010c54fa675ed1b78f269827177019b0814a4ac4d269c68037e2c41cf08f94110000000000000000000000000000000000000000000000000000000000000000", encoded) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01ea66c4de196d36e2c3a5d7c0045100b9e46ef65be8f7a921ef20e6f2e99ebd000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b401a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "01000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d520801a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a60000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8014ae5927a983081a8bcdbcce19e926c9e4c56e2dc89c91c32c034b875b8a1ca00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e13476701b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) @@ -320,123 +320,192 @@ func TestCodecV1BatchHash(t *testing.T) { require.NoError(t, err) // empty batch - batch := &daBatchV1{ + daBatchV1 := &daBatchV1{ daBatchV0: daBatchV0{ version: CodecV1, }, } - assert.Equal(t, common.HexToHash("0x4b6fe410f63051f6e93532087b42ece79fb7b966e2ba5845e6cd1c091f27e564"), batch.Hash()) + assert.Equal(t, common.HexToHash("0x4b6fe410f63051f6e93532087b42ece79fb7b966e2ba5845e6cd1c091f27e564"), daBatchV1.Hash()) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv1.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xd557b02638c0385d5124f7fc188a025b33f8819b7f78c000751404997148ab8b"), daBatch.Hash()) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xf13c7e249d00941c59fe4cd970241bbd6753eede8e043c438165674031792b3b"), daBatch.Hash()) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xb64208f07fab641f7ebf831686d05ad667da0c7bfabcbd9c878cc22cbc8032fd"), daBatch.Hash()) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x4f7426d164e885574a661838406083f5292b0a1bc6dc20c51129eed0723b8a27"), daBatch.Hash()) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xfce89ec2aed85cebeb20eea722e3ae4ec622bff49218dbe249a2d358e2e85451"), daBatch.Hash()) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x8fc063179b709bab338674278bb7b70dce2879a4e11ea857b3a202fb3313559f"), daBatch.Hash()) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xf1c94cdf45967bc60bfccd599edd8cb07fd0201f41ab068637834f86140f62bf"), daBatch.Hash()) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xfef0b56bd889529e3a1d884c88dd1c867e084fdc1369496907be8f865f43f0e0"), daBatch.Hash()) } +func TestCodecV1NewDABatchFromBytes(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) + + testCases := []struct { + name string + jsonFile string + }{ + {"Empty Batch", ""}, + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var batch *Batch + var daBatch DABatch + var err error + + if tc.jsonFile == "" { + // Empty daBatch + daBatch = &daBatchV1{ + daBatchV0: daBatchV0{ + version: CodecV1, + }, + } + } else { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + batch = &Batch{Chunks: []*Chunk{chunk}} + daBatch, err = codecv1.NewDABatch(batch) + assert.NoError(t, err) + } + + // Encode the DABatch + encodedBytes := daBatch.Encode() + + // Decode the bytes back into a DABatch + decodedDABatch, err := codecv1.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + // Compare the hashes of the original and decoded DABatch + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) + }) + } + + // Test with multiple blocks and chunks in a batch + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + chunk2 := &Chunk{Blocks: []*Block{block4, block5}} + batch := &Batch{Chunks: []*Chunk{chunk1, chunk2}} + daBatch, err := codecv1.NewDABatch(batch) + assert.NoError(t, err) + + encodedBytes := daBatch.Encode() + decodedDABatch, err := codecv1.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash()) +} + func TestCodecV1BatchDataHash(t *testing.T) { codecv1, err := CodecFromVersion(CodecV1) require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv1.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541"), daBatch.DataHash()) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6"), daBatch.DataHash()) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f"), daBatch.DataHash()) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8"), daBatch.DataHash()) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) } @@ -600,74 +669,74 @@ func TestCodecV1BatchL1MessagePopped(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv1.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(11), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(11), daBatch.(*daBatchV1).totalL1MessagePopped) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(42), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 37 - daBatch, err = codecv1.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(5), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(10), daBatch.(*daBatchV1).l1MessagePopped) // skip 7, include 3 assert.Equal(t, uint64(10), daBatch.(*daBatchV1).totalL1MessagePopped) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(257), daBatch.(*daBatchV1).l1MessagePopped) // skip 255, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 1 - daBatch, err = codecv1.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(256), daBatch.(*daBatchV1).l1MessagePopped) // skip 254, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(42), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 10 - daBatch, err = codecv1.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(32), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) @@ -679,81 +748,81 @@ func TestCodecV1BlobEncodingAndHashing(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - batch, err := codecv1.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded := strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, // metadata "00"+"0001"+"000000e6"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00"+"00"+"000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+ // tx payload "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1", encoded) - assert.Equal(t, common.HexToHash("0x01af944924715b48be6ce3c35aef7500a50e909265599bd2b3e544ac59fc7553"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01af944924715b48be6ce3c35aef7500a50e909265599bd2b3e544ac59fc7553"), daBatch.(*daBatchV1).blobVersionedHash) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - batch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "000001000016310000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002f9162d82cf5502843b9b0a17843b9b0a17831197e28080b915d26080604000523480156200001157600080fd5b50604051620014b2380380620014b283390081810160405260a08110156200003757600080fd5b8151602083015160408000850180519151939592948301929184640100000000821115620000635760000080fd5b9083019060208201858111156200007957600080fd5b8251640100000000008111828201881017156200009457600080fd5b8252508151602091820100929091019080838360005b83811015620000c357818101518382015260200100620000a9565b50505050905090810190601f168015620000f1578082038051006001836020036101000a031916815260200191505b5060405260200180516000405193929190846401000000008211156200011557600080fd5b908301906000208201858111156200012b57600080fd5b8251640100000000811182820188001017156200014657600080fd5b8252508151602091820192909101908083830060005b83811015620001755781810151838201526020016200015b565b5050005050905090810190601f168015620001a3578082038051600183602003610100000a031916815260200191505b506040526020908101518551909350859250008491620001c8916003918501906200026b565b508051620001de906004906000208401906200026b565b50506005805461ff001960ff199091166012171690005550600680546001600160a01b038088166001600160a01b031992831617900092556007805492871692909116919091179055620002308162000255565b5000506005805462010000600160b01b031916336201000002179055506200030700915050565b6005805460ff191660ff92909216919091179055565b82805460000181600116156101000203166002900490600052602060002090601f01602000900481019282601f10620002ae57805160ff1916838001178555620002de56005b82800160010185558215620002de579182015b82811115620002de57825100825591602001919060010190620002c1565b50620002ec929150620002f056005b5090565b5b80821115620002ec5760008155600101620002f1565b61119b0080620003176000396000f3fe608060405234801561001057600080fd5b50600004361061010b5760003560e01c80635c975abb116100a257806395d89b41110061007157806395d89b41146103015780639dc29fac14610309578063a457c200d714610335578063a9059cbb14610361578063dd62ed3e1461038d5761010b00565b80635c975abb1461029d57806370a08231146102a55780638456cb5914006102cb5780638e50817a146102d35761010b565b8063313ce567116100de57008063313ce5671461021d578063395093511461023b5780633f4ba83a146102006757806340c10f19146102715761010b565b806306fdde031461011057806300095ea7b31461018d57806318160ddd146101cd57806323b872dd146101e757005b600080fd5b6101186103bb565b604080516020808252835181830152835100919283929083019185019080838360005b838110156101525781810151838200015260200161013a565b50505050905090810190601f16801561017f578082000380516001836020036101000a031916815260200191505b50925050506040005180910390f35b6101b9600480360360408110156101a357600080fd5b50600001600160a01b038135169060200135610451565b60408051911515825251900081900360200190f35b6101d561046e565b6040805191825251908190036020000190f35b6101b9600480360360608110156101fd57600080fd5b50600160010060a01b03813581169160208101359091169060400135610474565b610225610004fb565b6040805160ff9092168252519081900360200190f35b6101b9600400803603604081101561025157600080fd5b506001600160a01b03813516906000200135610504565b61026f610552565b005b61026f600480360360408110150061028757600080fd5b506001600160a01b0381351690602001356105a9565b006101b9610654565b6101d5600480360360208110156102bb57600080fd5b5000356001600160a01b0316610662565b61026f61067d565b61026f60048036030060408110156102e957600080fd5b506001600160a01b0381358116916020010035166106d2565b610118610757565b61026f6004803603604081101561031f0057600080fd5b506001600160a01b0381351690602001356107b8565b6101b9006004803603604081101561034b57600080fd5b506001600160a01b0381351600906020013561085f565b6101b96004803603604081101561037757600080fd005b506001600160a01b0381351690602001356108c7565b6101d560048036030060408110156103a357600080fd5b506001600160a01b0381358116916020010035166108db565b60038054604080516020601f600260001961010060018816001502019095169490940493840181900481028201810190925282815260609300909290918301828280156104475780601f1061041c5761010080835404028300529160200191610447565b820191906000526020600020905b8154815290600001019060200180831161042a57829003601f168201915b505050505090509000565b600061046561045e610906565b848461090a565b50600192915050565b0060025490565b60006104818484846109f6565b6104f18461048d610906565b006104ec8560405180606001604052806028815260200161108560289139600100600160a01b038a166000908152600160205260408120906104cb610906565b006001600160a01b031681526020810191909152604001600020549190610b5100565b61090a565b5060019392505050565b60055460ff1690565b600061046500610511610906565b846104ec8560016000610522610906565b6001600160a0001b03908116825260208083019390935260409182016000908120918c16815200925290205490610be8565b6007546001600160a01b0316331461059f57604000805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0818005b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a7610c0049565b565b600554610100900460ff16156105f9576040805162461bcd60e5001b815260206004820152601060248201526f14185d5cd8589b194e881c185d005cd95960821b604482015290519081900360640190fd5b600654600160016000a01b03163314610646576040805162461bcd60e51b81526020600482015260000b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908190000360640190fd5b6106508282610ced565b5050565b600554610100900460ff001690565b6001600160a01b031660009081526020819052604090205490565b006007546001600160a01b031633146106ca576040805162461bcd60e51b81520060206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b60440082015290519081900360640190fd5b6105a7610ddd565b600554620100009000046001600160a01b03163314610726576040805162461bcd60e51b81526020006004820152600c60248201526b6f6e6c7920466163746f727960a01b60448200015290519081900360640190fd5b600780546001600160a01b03928316600100600160a01b0319918216179091556006805493909216921691909117905556005b60048054604080516020601f600260001961010060018816150201909516009490940493840181900481028201810190925282815260609390929091830100828280156104475780601f1061041c5761010080835404028352916020019100610447565b600554610100900460ff1615610808576040805162461bcd60e5001b815260206004820152601060248201526f14185d5cd8589b194e881c185d005cd95960821b604482015290519081900360640190fd5b600654600160016000a01b03163314610855576040805162461bcd60e51b81526020600482015260000b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908190000360640190fd5b6106508282610e65565b600061046561086c610906565b84006104ec85604051806060016040528060258152602001611117602591396001006000610896610906565b6001600160a01b0390811682526020808301939093005260409182016000908120918d16815292529020549190610b51565b6000610004656108d4610906565b84846109f6565b6001600160a01b0391821660009000815260016020908152604080832093909416825291909152205490565b339000565b6001600160a01b03831661094f5760405162461bcd60e51b8152600401008080602001828103825260248152602001806110f3602491396040019150500060405180910390fd5b6001600160a01b0382166109945760405162461bcd6000e51b815260040180806020018281038252602281526020018061103d602291003960400191505060405180910390fd5b6001600160a01b038084166000818100526001602090815260408083209487168084529482529182902085905581510085815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b00200ac8c7c3b9259281900390910190a3505050565b6001600160a01b03831600610a3b5760405162461bcd60e51b8152600401808060200182810382526025008152602001806110ce6025913960400191505060405180910390fd5b600160000160a01b038216610a805760405162461bcd60e51b815260040180806020010082810382526023815260200180610ff8602391396040019150506040518091000390fd5b610a8b838383610f61565b610ac8816040518060600160405280600026815260200161105f602691396001600160a01b038616600090815260208100905260409020549190610b51565b6001600160a01b03808516600090815260002081905260408082209390935590841681522054610af79082610be8565b600001600160a01b03808416600081815260208181526040918290209490945580005185815290519193928716927fddf252ad1be2c89b69c2b068fc378daa952b00a7f163c4a11628f55a4df523b3ef92918290030190a3505050565b6000818400841115610be05760405162461bcd60e51b8152600401808060200182810382005283818151815260200191508051906020019080838360005b83811015610b00a5578181015183820152602001610b8d565b50505050905090810190601f16008015610bd25780820380516001836020036101000a03191681526020019150005b509250505060405180910390fd5b505050900390565b60008282018381100015610c42576040805162461bcd60e51b815260206004820152601b6024820100527f536166654d6174683a206164646974696f6e206f766572666c6f77000000000000604482015290519081900360640190fd5b9392505050565b60055461000100900460ff16610c9c576040805162461bcd60e51b81526020600482015200601460248201527314185d5cd8589b194e881b9bdd081c185d5cd95960621b00604482015290519081900360640190fd5b6005805461ff00191690557f5db900ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa61000cd0610906565b604080516001600160a01b03909216825251908190036020000190a1565b6001600160a01b038216610d48576040805162461bcd60e51b81005260206004820152601f60248201527f45524332303a206d696e7420746f2000746865207a65726f20616464726573730060448201529051908190036064010090fd5b610d5460008383610f61565b600254610d619082610be8565b600255006001600160a01b038216600090815260208190526040902054610d87908261000be8565b6001600160a01b038316600081815260208181526040808320949000945583518581529351929391927fddf252ad1be2c89b69c2b068fc378daa95002ba7f163c4a11628f55a4df523b3ef9281900390910190a35050565b60055400610100900460ff1615610e2d576040805162461bcd60e51b81526020600482000152601060248201526f14185d5cd8589b194e881c185d5cd95960821b60440082015290519081900360640190fd5b6005805461ff0019166101001790557f0062e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc544b05a20058610cd0610906565b6001600160a01b038216610eaa5760405162461bcd6000e51b81526004018080602001828103825260218152602001806110ad602191003960400191505060405180910390fd5b610eb682600083610f61565b610ef3008160405180606001604052806022815260200161101b60229139600160016000a01b0385166000908152602081905260409020549190610b51565b600160010060a01b038316600090815260208190526040902055600254610f199082610f00b5565b6002556040805182815290516000916001600160a01b038516917fdd00f252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef009181900360200190a35050565b610f6c838383610fb0565b610f7461065456005b15610fb05760405162461bcd60e51b81526004018080602001828103825200602a81526020018061113c602a913960400191505060405180910390fd5b50005050565b6000610c4283836040518060400160405280601e81526020017f53006166654d6174683a207375627472616374696f6e206f766572666c6f77000000815250610b5156fe45524332303a207472616e7366657220746f2074686520007a65726f206164647265737345524332303a206275726e20616d6f756e742000657863656564732062616c616e636545524332303a20617070726f76652074006f20746865207a65726f206164647265737345524332303a207472616e736600657220616d6f756e7420657863656564732062616c616e636545524332303a00207472616e7366657220616d6f756e74206578636565647320616c6c6f7761006e636545524332303a206275726e2066726f6d20746865207a65726f20616400647265737345524332303a207472616e736665722066726f6d20746865207a0065726f206164647265737345524332303a20617070726f76652066726f6d2000746865207a65726f206164647265737345524332303a206465637265617365006420616c6c6f77616e63652062656c6f77207a65726f4552433230506175730061626c653a20746f6b656e207472616e73666572207768696c652070617573006564a2646970667358221220e96342bec8f6c2bf72815a39998973b64c3bed0057770f402e9a7b7eeda0265d4c64736f6c634300060c0033000000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b63000000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000000009570045544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e173700f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bd00a52095d44b8a9af7", encoded) - assert.Equal(t, common.HexToHash("0x010c54fa675ed1b78f269827177019b0814a4ac4d269c68037e2c41cf08f9411"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x010c54fa675ed1b78f269827177019b0814a4ac4d269c68037e2c41cf08f9411"), daBatch.(*daBatchV1).blobVersionedHash) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - batch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0000010000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e10580808080008", encoded) - assert.Equal(t, common.HexToHash("0x01ea66c4de196d36e2c3a5d7c0045100b9e46ef65be8f7a921ef20e6f2e99ebd"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01ea66c4de196d36e2c3a5d7c0045100b9e46ef65be8f7a921ef20e6f2e99ebd"), daBatch.(*daBatchV1).blobVersionedHash) // this batch only contains L1 txs block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - batch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "000001", encoded) - assert.Equal(t, common.HexToHash("0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6"), daBatch.(*daBatchV1).blobVersionedHash) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - batch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "000001", encoded) - assert.Equal(t, common.HexToHash("0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6"), daBatch.(*daBatchV1).blobVersionedHash) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - batch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "000001", encoded) - assert.Equal(t, common.HexToHash("0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6"), daBatch.(*daBatchV1).blobVersionedHash) // 15 chunks - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, // metadata "00"+"000f"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"00"+"00"+"0000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+ // tx payload "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea003f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ece00a0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86d00f514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288b00baf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf000d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f0010c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f002b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b009aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d0002c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b00219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d199600b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a120940100bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000800083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393e00b095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f87938000aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b600e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae9900c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cb00d19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8007101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce941100ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b002cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec005bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e830700a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de10200513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c57008fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e900a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea000f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f7730016a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6e00ed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2ade00ceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7b00a5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd7300e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9a00ec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d0200c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c400c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808300019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a0041e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfa00ec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc060015b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f03998586600d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e0081065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f8710080843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca2008a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e9000cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c004d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a100209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e260004393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f00879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6a00cb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab0007ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df51400a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf4002a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d6900ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c100be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b460004bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec002e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c700e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b001de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b500243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae600bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000808301009ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb09500b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac100c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb009e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67a00a78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19f00eacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710100843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a00152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cac00e28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd400aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a1200094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d0056548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd700f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03f00b2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e008307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e1004af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bd00e27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483590096fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b100bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d825006f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e8106005f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f8718084003b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a15002d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc3002b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d190096b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a120940001bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e260439003eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f87930080aac1c09c6eed32f1", encoded) - assert.Equal(t, common.HexToHash("0x01521b20f341588dea5978efb00d7b077a986598a6001fc2e5859d77f3ffc284"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01521b20f341588dea5978efb00d7b077a986598a6001fc2e5859d77f3ffc284"), daBatch.(*daBatchV1).blobVersionedHash) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - batch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0000020000173700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a17843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380620014b2833981810160405260a0811015620000375760000080fd5b81516020830151604080850180519151939592948301929184640100000000008211156200006357600080fd5b908301906020820185811115620000007957600080fd5b8251640100000000811182820188101715620000945760000080fd5b82525081516020918201929091019080838360005b8381101562000000c3578181015183820152602001620000a9565b50505050905090810190601f00168015620000f15780820380516001836020036101000a03191681526020010091505b5060405260200180516040519392919084640100000000821115620000011557600080fd5b9083019060208201858111156200012b57600080fd5b8200516401000000008111828201881017156200014657600080fd5b8252508151006020918201929091019080838360005b8381101562000175578181015183820001526020016200015b565b50505050905090810190601f168015620001a3570080820380516001836020036101000a031916815260200191505b506040526000209081015185519093508592508491620001c8916003918501906200026b56005b508051620001de9060049060208401906200026b565b50506005805461ff00001960ff1990911660121716905550600680546001600160a01b03808816600001600160a01b031992831617909255600780549287169290911691909117900055620002308162000255565b50506005805462010000600160b01b031916330062010000021790555062000307915050565b6005805460ff191660ff9290920016919091179055565b82805460018160011615610100020316600290049060000052602060002090601f016020900481019282601f10620002ae57805160ff001916838001178555620002de565b82800160010185558215620002de57918200015b82811115620002de578251825591602001919060010190620002c1565b0050620002ec929150620002f0565b5090565b5b80821115620002ec576000810055600101620002f1565b61119b80620003176000396000f3fe60806040523400801561001057600080fd5b506004361061010b5760003560e01c80635c975a00bb116100a257806395d89b411161007157806395d89b41146103015780639d00c29fac14610309578063a457c2d714610335578063a9059cbb1461036157800063dd62ed3e1461038d5761010b565b80635c975abb1461029d57806370a0820031146102a55780638456cb59146102cb5780638e50817a146102d35761010b00565b8063313ce567116100de578063313ce5671461021d57806339509351140061023b5780633f4ba83a1461026757806340c10f19146102715761010b565b00806306fdde0314610110578063095ea7b31461018d57806318160ddd14610100cd57806323b872dd146101e7575b600080fd5b6101186103bb565b604080510060208082528351818301528351919283929083019185019080838360005b830081101561015257818101518382015260200161013a565b5050505090509081000190601f16801561017f5780820380516001836020036101000a03191681520060200191505b509250505060405180910390f35b6101b960048036036040810010156101a357600080fd5b506001600160a01b03813516906020013561045100565b604080519115158252519081900360200190f35b6101d561046e565b6000408051918252519081900360200190f35b6101b960048036036060811015610001fd57600080fd5b506001600160a01b0381358116916020810135909116900060400135610474565b6102256104fb565b6040805160ff909216825251908100900360200190f35b6101b96004803603604081101561025157600080fd5b50006001600160a01b038135169060200135610504565b61026f610552565b005b0061026f6004803603604081101561028757600080fd5b506001600160a01b030081351690602001356105a9565b6101b9610654565b6101d560048036036020008110156102bb57600080fd5b50356001600160a01b0316610662565b61026f0061067d565b61026f600480360360408110156102e957600080fd5b50600160000160a01b03813581169160200135166106d2565b610118610757565b61026f006004803603604081101561031f57600080fd5b506001600160a01b038135160090602001356107b8565b6101b96004803603604081101561034b57600080fd005b506001600160a01b03813516906020013561085f565b6101b9600480360300604081101561037757600080fd5b506001600160a01b038135169060200135006108c7565b6101d5600480360360408110156103a357600080fd5b50600160000160a01b03813581169160200135166108db565b6003805460408051602060001f6002600019610100600188161502019095169490940493840181900481020082018101909252828152606093909290918301828280156104475780601f100061041c57610100808354040283529160200191610447565b82019190600052006020600020905b81548152906001019060200180831161042a57829003601f00168201915b5050505050905090565b600061046561045e610906565b84846100090a565b50600192915050565b60025490565b60006104818484846109f656005b6104f18461048d610906565b6104ec8560405180606001604052806028810052602001611085602891396001600160a01b038a16600090815260016020520060408120906104cb610906565b6001600160a01b03168152602081019190910052604001600020549190610b51565b61090a565b5060019392505050565b6000055460ff1690565b6000610465610511610906565b846104ec856001600061000522610906565b6001600160a01b0390811682526020808301939093526040009182016000908120918c168152925290205490610be8565b600754600160010060a01b0316331461059f576040805162461bcd60e51b81526020600482015200600b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908100900360640190fd5b6105a7610c49565b565b600554610100900460ff1615610005f9576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610646576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610ced565b500050565b600554610100900460ff1690565b6001600160a01b03166000908152006020819052604090205490565b6007546001600160a01b031633146106ca57006040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0008185b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a700610ddd565b6005546201000090046001600160a01b0316331461072657604000805162461bcd60e51b815260206004820152600c60248201526b6f6e6c792000466163746f727960a01b604482015290519081900360640190fd5b60078054006001600160a01b039283166001600160a01b0319918216179091556006805400939092169216919091179055565b60048054604080516020601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c5761010000808354040283529160200191610447565b600554610100900460ff161561000808576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610855576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610e65565b60000061046561086c610906565b846104ec85604051806060016040528060258100526020016111176025913960016000610896610906565b6001600160a01b0300908116825260208083019390935260409182016000908120918d1681529252009020549190610b51565b60006104656108d4610906565b84846109f6565b600001600160a01b0391821660009081526001602090815260408083209390941600825291909152205490565b3390565b6001600160a01b03831661094f576040005162461bcd60e51b8152600401808060200182810382526024815260200180006110f36024913960400191505060405180910390fd5b6001600160a01b038200166109945760405162461bcd60e51b81526004018080602001828103825260002281526020018061103d6022913960400191505060405180910390fd5b600100600160a01b0380841660008181526001602090815260408083209487168084005294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f7142007d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9259281900390910190a350500050565b6001600160a01b038316610a3b5760405162461bcd60e51b8152600400018080602001828103825260258152602001806110ce602591396040019150005060405180910390fd5b6001600160a01b038216610a805760405162461bcd0060e51b8152600401808060200182810382526023815260200180610ff8602300913960400191505060405180910390fd5b610a8b838383610f61565b610ac8008160405180606001604052806026815260200161105f60269139600160016000a01b0386166000908152602081905260409020549190610b51565b600160010060a01b03808516600090815260208190526040808220939093559084168152002054610af79082610be8565b6001600160a01b03808416600081815260208100815260409182902094909455805185815290519193928716927fddf252ad1b00e2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9291829000030190a3505050565b60008184841115610be05760405162461bcd60e51b810052600401808060200182810382528381815181526020019150805190602001009080838360005b83811015610ba5578181015183820152602001610b8d565b0050505050905090810190601f168015610bd2578082038051600183602003610001000a031916815260200191505b509250505060405180910390fd5b50505000900390565b600082820183811015610c42576040805162461bcd60e51b81520060206004820152601b60248201527f536166654d6174683a20616464697469006f6e206f766572666c6f77000000000060448201529051908190036064019000fd5b9392505050565b600554610100900460ff16610c9c576040805162461b00cd60e51b815260206004820152601460248201527314185d5cd8589b194e88001b9bdd081c185d5cd95960621b604482015290519081900360640190fd5b600005805461ff00191690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a500e8aa4e537bd38aeae4b073aa610cd0610906565b604080516001600160a01b00039092168252519081900360200190a1565b6001600160a01b038216610d4800576040805162461bcd60e51b815260206004820152601f60248201527f4552004332303a206d696e7420746f20746865207a65726f20616464726573730060004482015290519081900360640190fd5b610d5460008383610f61565b60025400610d619082610be8565b6002556001600160a01b03821660009081526020810090526040902054610d879082610be8565b6001600160a01b038316600081810052602081815260408083209490945583518581529351929391927fddf252ad001be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef928190000390910190a35050565b600554610100900460ff1615610e2d57604080516200461bcd60e51b815260206004820152601060248201526f14185d5cd8589b19004e881c185d5cd95960821b604482015290519081900360640190fd5b600580005461ff0019166101001790557f62e78cea01bee320cd4e420270b5ea74000d0011b0c9f74754ebdbfc544b05a258610cd0610906565b6001600160a01b03820016610eaa5760405162461bcd60e51b8152600401808060200182810382526000218152602001806110ad6021913960400191505060405180910390fd5b610e00b682600083610f61565b610ef3816040518060600160405280602281526020000161101b602291396001600160a01b038516600090815260208190526040900020549190610b51565b6001600160a01b03831660009081526020819052604000902055600254610f199082610fb5565b600255604080518281529051600091006001600160a01b038516917fddf252ad1be2c89b69c2b068fc378daa952ba700f163c4a11628f55a4df523b3ef9181900360200190a35050565b610f6c83830083610fb0565b610f74610654565b15610fb05760405162461bcd60e51b81520060040180806020018281038252602a81526020018061113c602a91396040010091505060405180910390fd5b505050565b6000610c428383604051806040010060405280601e81526020017f536166654d6174683a20737562747261637469006f6e206f766572666c6f770000815250610b5156fe45524332303a20747261006e7366657220746f20746865207a65726f206164647265737345524332303a00206275726e20616d6f756e7420657863656564732062616c616e63654552430032303a20617070726f766520746f20746865207a65726f20616464726573730045524332303a207472616e7366657220616d6f756e742065786365656473200062616c616e636545524332303a207472616e7366657220616d6f756e7420650078636565647320616c6c6f77616e636545524332303a206275726e2066726f006d20746865207a65726f206164647265737345524332303a207472616e73660065722066726f6d20746865207a65726f206164647265737345524332303a2000617070726f76652066726f6d20746865207a65726f20616464726573734552004332303a2064656372656173656420616c6c6f77616e63652062656c6f7720007a65726f45524332305061757361626c653a20746f6b656e207472616e7366006572207768696c6520706175736564a2646970667358221220e96342bec8f600c2bf72815a39998973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c63004300060c00330000000000000000000000001c5a77d9fa7ef466951b2f01f70024bca3a5820b630000000000000000000000001c5a77d9fa7ef466951b2f0100f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000095745544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a9002e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e7400229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a258d0017bf244c4df02d40343a7626a9d321e105808080808", encoded) - assert.Equal(t, common.HexToHash("0x01b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf"), daBatch.(*daBatchV1).blobVersionedHash) } func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { @@ -762,8 +831,8 @@ func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv1.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(batch) assert.NoError(t, err) verifyData, err := daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -771,8 +840,8 @@ func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -780,8 +849,8 @@ func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -789,8 +858,8 @@ func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -798,8 +867,8 @@ func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -807,16 +876,16 @@ func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) // 15 chunks - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -824,8 +893,8 @@ func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv1.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -852,8 +921,8 @@ func TestCodecV1DecodeDAChunksRawTx(t *testing.T) { chunkBytes1, err := daChunk1.Encode() assert.NoError(t, err) - originalBatch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} - batch, err := codecv1.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} + daBatch, err := codecv1.NewDABatch(batch) assert.NoError(t, err) daChunksRawTx, err := codecv1.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) @@ -873,7 +942,7 @@ func TestCodecV1DecodeDAChunksRawTx(t *testing.T) { daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) - blob := batch.Blob() + blob := daBatch.Blob() err = codecv1.DecodeTxsFromBlob(blob, daChunksRawTx) assert.NoError(t, err) diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go index 2bd448b..bb35dc0 100644 --- a/encoding/codecv1_types.go +++ b/encoding/codecv1_types.go @@ -84,7 +84,7 @@ type daBatchV1 struct { } // newDABatchV1 is a constructor for daBatchV1. -func newDABatchV1(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point) *daBatchV1 { +func newDABatchV1(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, blobVersionedHash, parentBatchHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point) *daBatchV1 { return &daBatchV1{ daBatchV0: daBatchV0{ version: version, diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 1512f62..0a7b297 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -62,7 +62,7 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + skippedL1MessageBitmap, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, fmt.Errorf("failed to construct skipped bitmap, index: %d, err: %w", batch.Index, err) } @@ -84,9 +84,9 @@ func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { l1MessagePopped, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped dataHash, // dataHash - batch.ParentBatchHash, // parentBatchHash blobVersionedHash, // blobVersionedHash - bitmapBytes, // skippedL1MessageBitmap + batch.ParentBatchHash, // parentBatchHash + skippedL1MessageBitmap, // skippedL1MessageBitmap blob, // blob z, // z ) diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index 8e0fccd..df49db6 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -244,72 +244,72 @@ func TestCodecV2BatchEncode(t *testing.T) { require.NoError(t, err) // empty batch - batch := &daBatchV1{ + daBatchV1 := &daBatchV1{ daBatchV0: daBatchV0{ version: CodecV2, }, } - encoded := hex.EncodeToString(batch.Encode()) + encoded := hex.EncodeToString(daBatchV1.Encode()) assert.Equal(t, "02000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv2.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd70000000000000000000000000000000000000000000000000000000000000000", encoded) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "02000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad40000000000000000000000000000000000000000000000000000000000000000", encoded) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc53394137000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "02000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc5339413700000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa002900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb3363200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) @@ -320,123 +320,192 @@ func TestCodecV2BatchHash(t *testing.T) { require.NoError(t, err) // empty batch - batch := &daBatchV1{ + daBatchV1 := &daBatchV1{ daBatchV0: daBatchV0{ version: CodecV2, }, } - assert.Equal(t, common.HexToHash("0x8839b8a7b8dfebdc8e829f6fe543578ccdc8da1307e1e1581541a1e2a8fa5592"), batch.Hash()) + assert.Equal(t, common.HexToHash("0x8839b8a7b8dfebdc8e829f6fe543578ccdc8da1307e1e1581541a1e2a8fa5592"), daBatchV1.Hash()) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv2.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x57553c35f981626b4d1a73c816aa8d8fad83c460fc049c5792581763f7e21b13"), daBatch.Hash()) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x0f8e5b5205c5d809bf09047f37b558f4eb388c9c4eb23291cd97810d06654409"), daBatch.Hash()) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc59155dc0ae7d7d3fc29f0a9c6042f14dc58e3a1f9c0417f52bac2c4a8b33014"), daBatch.Hash()) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x417509641fb0c0d1c07d80e64aab13934f828cb4f09608722bf8126a68c04617"), daBatch.Hash()) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xe9c82b48e2a54c9206f57897cb870536bd22066d2af3d03aafe8a6a39add7635"), daBatch.Hash()) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x5e3d20c5b3f56cc5a28e7431241b3ce3d484b12cfb0b3228f378b196beeb3a53"), daBatch.Hash()) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x19b99491401625d92e16f7df6705219cc55e48e4b08db7bc4020e6934076f5f7"), daBatch.Hash()) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc5daf2ea5a3107c13b2994fb547336a7dca25cd352c051b6d9b9759d77e95fd2"), daBatch.Hash()) } +func TestCodecV2NewDABatchFromBytes(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + testCases := []struct { + name string + jsonFile string + }{ + {"Empty Batch", ""}, + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var batch *Batch + var daBatch DABatch + var err error + + if tc.jsonFile == "" { + // Empty daBatch + daBatch = &daBatchV1{ + daBatchV0: daBatchV0{ + version: CodecV2, + }, + } + } else { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + batch = &Batch{Chunks: []*Chunk{chunk}} + daBatch, err = codecv2.NewDABatch(batch) + assert.NoError(t, err) + } + + // Encode the DABatch + encodedBytes := daBatch.Encode() + + // Decode the bytes back into a DABatch + decodedDABatch, err := codecv2.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + // Compare the hashes of the original and decoded DABatch + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) + }) + } + + // Test with multiple blocks and chunks in a batch + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + chunk2 := &Chunk{Blocks: []*Block{block4, block5}} + batch := &Batch{Chunks: []*Chunk{chunk1, chunk2}} + daBatch, err := codecv2.NewDABatch(batch) + assert.NoError(t, err) + + encodedBytes := daBatch.Encode() + decodedDABatch, err := codecv2.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash()) +} + func TestCodecV2BatchDataHash(t *testing.T) { codecv2, err := CodecFromVersion(CodecV2) require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv2.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541"), daBatch.DataHash()) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6"), daBatch.DataHash()) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f"), daBatch.DataHash()) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8"), daBatch.DataHash()) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) } @@ -600,74 +669,74 @@ func TestCodecV2BatchL1MessagePopped(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv2.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(11), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(11), daBatch.(*daBatchV1).totalL1MessagePopped) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(42), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 37 - daBatch, err = codecv2.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(5), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(10), daBatch.(*daBatchV1).l1MessagePopped) // skip 7, include 3 assert.Equal(t, uint64(10), daBatch.(*daBatchV1).totalL1MessagePopped) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(257), daBatch.(*daBatchV1).l1MessagePopped) // skip 255, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 1 - daBatch, err = codecv2.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(256), daBatch.(*daBatchV1).l1MessagePopped) // skip 254, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(42), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 10 - daBatch, err = codecv2.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(32), daBatch.(*daBatchV1).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) @@ -679,75 +748,75 @@ func TestCodecV2BlobEncodingAndHashing(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - batch, err := codecv2.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded := strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) - assert.Equal(t, common.HexToHash("0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7"), daBatch.(*daBatchV1).blobVersionedHash) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - batch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) - assert.Equal(t, common.HexToHash("0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4"), daBatch.(*daBatchV1).blobVersionedHash) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - batch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) - assert.Equal(t, common.HexToHash("0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c"), daBatch.(*daBatchV1).blobVersionedHash) // this batch only contains L1 txs block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - batch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), daBatch.(*daBatchV1).blobVersionedHash) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - batch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), daBatch.(*daBatchV1).blobVersionedHash) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - batch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), daBatch.(*daBatchV1).blobVersionedHash) // 45 chunks - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "006024281d0700140d002d000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f100040041e1491b3e82c9b61d60d39a727", encoded) - assert.Equal(t, common.HexToHash("0x01fc79efca1213db1aa0183865b0a360dc152662cde34ee6a34e7607b96c1c89"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01fc79efca1213db1aa0183865b0a360dc152662cde34ee6a34e7607b96c1c89"), daBatch.(*daBatchV1).blobVersionedHash) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - batch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV1).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) - assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), batch.(*daBatchV1).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), daBatch.(*daBatchV1).blobVersionedHash) } func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { @@ -756,8 +825,8 @@ func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv2.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(batch) assert.NoError(t, err) verifyData, err := daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -765,8 +834,8 @@ func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -774,8 +843,8 @@ func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -783,8 +852,8 @@ func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -792,8 +861,8 @@ func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -801,16 +870,16 @@ func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) // 15 chunks - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -818,8 +887,8 @@ func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv2.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -846,8 +915,8 @@ func TestCodecV2DecodeDAChunksRawTx(t *testing.T) { chunkBytes1, err := daChunk1.Encode() assert.NoError(t, err) - originalBatch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} - batch, err := codecv2.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} + daBatch, err := codecv2.NewDABatch(batch) assert.NoError(t, err) daChunksRawTx, err := codecv2.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) @@ -867,7 +936,7 @@ func TestCodecV2DecodeDAChunksRawTx(t *testing.T) { daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) - blob := batch.Blob() + blob := daBatch.Blob() err = codecv2.DecodeTxsFromBlob(blob, daChunksRawTx) assert.NoError(t, err) @@ -1125,7 +1194,7 @@ func TestCodecV2BatchCompressedDataCompatibilityCheck(t *testing.T) { {"Single Block 05", []string{"testdata/blockTrace_05.json"}}, {"Single Block 06", []string{"testdata/blockTrace_06.json"}}, {"Single Block 07", []string{"testdata/blockTrace_07.json"}}, - {"Multiple Blocks", []string{"testdata/blockTrace_02.json", "testdata/blockTrace_03.json", "testdata/blockTrace_04.json", "testdata/blockTrace_05.json", "testdata/blockTrace_06.json", "testdata/blockTrace_07.json"}}, + {"Multiple Blocks And Chunks", []string{"testdata/blockTrace_02.json", "testdata/blockTrace_03.json", "testdata/blockTrace_04.json", "testdata/blockTrace_05.json", "testdata/blockTrace_06.json", "testdata/blockTrace_07.json"}}, } for _, tc := range testCases { diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 4fbcc43..135f048 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -256,62 +256,62 @@ func TestCodecV3BatchEncode(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv3.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "030000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "03000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "030000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "030000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "03000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) @@ -331,114 +331,183 @@ func TestCodecV3BatchHash(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv3.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc5065afb8f29f620ae1edb4c6ebaf7380faf4226fb83ee920d70d489fe51c5c2"), daBatch.Hash()) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x9ec8eabaa13229ec9c9d0687133afd7435afcfe260fc4c73fea052c0911522ac"), daBatch.Hash()) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xda944b66dcaa6dc1442be2230233e97286ee1ed3c51cde155a36643b293b07c4"), daBatch.Hash()) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x20e2324fac82e484c569eb286a221c61151c2b3c38a63b289f6ef6c30fb31e49"), daBatch.Hash()) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc962bce28a34a4eb9ec81393edcf2e6367e84aad9c4fc5641da6f18f54053ed5"), daBatch.Hash()) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x405e0fc4b7efbe5b6d1dcc63c1f3253bbb6fbefedd1afe6b2067629f9da1f1cc"), daBatch.Hash()) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x3d5d24c951cb55e56f3b4e2defcd8f32d6d048565e6723ac7cdff7ed5e580e3a"), daBatch.Hash()) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb"), daBatch.Hash()) } +func TestCodecV3NewDABatchFromBytes(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + testCases := []struct { + name string + jsonFile string + }{ + {"Empty Batch", ""}, + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var batch *Batch + var daBatch DABatch + var err error + + if tc.jsonFile == "" { + // Empty daBatch + daBatch = &daBatchV3{ + daBatchV0: daBatchV0{ + version: CodecV3, + }, + } + } else { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + batch = &Batch{Chunks: []*Chunk{chunk}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + } + + // Encode the DABatch + encodedBytes := daBatch.Encode() + + // Decode the bytes back into a DABatch + decodedDABatch, err := codecv3.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + // Compare the hashes of the original and decoded DABatch + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) + }) + } + + // Test with multiple blocks and chunks in a batch + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + chunk2 := &Chunk{Blocks: []*Block{block4, block5}} + batch := &Batch{Chunks: []*Chunk{chunk1, chunk2}} + daBatch, err := codecv3.NewDABatch(batch) + assert.NoError(t, err) + + encodedBytes := daBatch.Encode() + decodedDABatch, err := codecv3.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash()) +} + func TestCodecV3BatchDataHash(t *testing.T) { codecv3, err := CodecFromVersion(CodecV3) require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv3.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541"), daBatch.DataHash()) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6"), daBatch.DataHash()) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f"), daBatch.DataHash()) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8"), daBatch.DataHash()) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) } @@ -791,74 +860,74 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv3.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(11), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(11), daBatch.(*daBatchV3).totalL1MessagePopped) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(42), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 37 - daBatch, err = codecv3.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(5), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(10), daBatch.(*daBatchV3).l1MessagePopped) // skip 7, include 3 assert.Equal(t, uint64(10), daBatch.(*daBatchV3).totalL1MessagePopped) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(257), daBatch.(*daBatchV3).l1MessagePopped) // skip 255, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 1 - daBatch, err = codecv3.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(256), daBatch.(*daBatchV3).l1MessagePopped) // skip 254, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(42), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 10 - daBatch, err = codecv3.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(32), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) @@ -870,75 +939,75 @@ func TestCodecV3BlobEncodingAndHashing(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - batch, err := codecv3.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(batch) assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded := strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) - assert.Equal(t, common.HexToHash("0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7"), daBatch.(*daBatchV3).blobVersionedHash) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - batch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) - assert.Equal(t, common.HexToHash("0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4"), daBatch.(*daBatchV3).blobVersionedHash) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - batch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) - assert.Equal(t, common.HexToHash("0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c"), daBatch.(*daBatchV3).blobVersionedHash) // this batch only contains L1 txs block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - batch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), daBatch.(*daBatchV3).blobVersionedHash) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - batch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), daBatch.(*daBatchV3).blobVersionedHash) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - batch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), daBatch.(*daBatchV3).blobVersionedHash) // 45 chunks - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "006024281d0700140d002d000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f100040041e1491b3e82c9b61d60d39a727", encoded) - assert.Equal(t, common.HexToHash("0x01fc79efca1213db1aa0183865b0a360dc152662cde34ee6a34e7607b96c1c89"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01fc79efca1213db1aa0183865b0a360dc152662cde34ee6a34e7607b96c1c89"), daBatch.(*daBatchV3).blobVersionedHash) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - batch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) - assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), daBatch.(*daBatchV3).blobVersionedHash) } func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { @@ -947,8 +1016,8 @@ func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv3.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(batch) assert.NoError(t, err) verifyData, err := daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -956,8 +1025,8 @@ func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -965,8 +1034,8 @@ func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -974,8 +1043,8 @@ func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -983,8 +1052,8 @@ func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -992,16 +1061,16 @@ func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) // 45 chunks - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -1009,8 +1078,8 @@ func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv3.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -1037,8 +1106,8 @@ func TestCodecV3DecodeDAChunksRawTx(t *testing.T) { chunkBytes1, err := daChunk1.Encode() assert.NoError(t, err) - originalBatch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} - batch, err := codecv3.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} + daBatch, err := codecv3.NewDABatch(batch) assert.NoError(t, err) daChunksRawTx, err := codecv3.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) @@ -1058,7 +1127,7 @@ func TestCodecV3DecodeDAChunksRawTx(t *testing.T) { daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) - blob := batch.Blob() + blob := daBatch.Blob() err = codecv3.DecodeTxsFromBlob(blob, daChunksRawTx) assert.NoError(t, err) @@ -1319,7 +1388,7 @@ func TestCodecV3BatchCompressedDataCompatibilityCheck(t *testing.T) { {"Single Block 05", []string{"testdata/blockTrace_05.json"}}, {"Single Block 06", []string{"testdata/blockTrace_06.json"}}, {"Single Block 07", []string{"testdata/blockTrace_07.json"}}, - {"Multiple Blocks", []string{"testdata/blockTrace_02.json", "testdata/blockTrace_03.json", "testdata/blockTrace_04.json", "testdata/blockTrace_05.json", "testdata/blockTrace_06.json", "testdata/blockTrace_07.json"}}, + {"Multiple Blocks And Chunks", []string{"testdata/blockTrace_02.json", "testdata/blockTrace_03.json", "testdata/blockTrace_04.json", "testdata/blockTrace_05.json", "testdata/blockTrace_06.json", "testdata/blockTrace_07.json"}}, } for _, tc := range testCases { diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 1730dff..950f411 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -255,62 +255,62 @@ func TestCodecV4BatchEncode(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv4.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "040000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f2900000000000000000000000000000000000000000000000000000000000000000000000063807b2a26451ed31542ed15543973f8bc8c3b6382ba0cba5650a7faf14625377029203c1b6db22aa24613cb68dee10ca50bbbc88fc15b8a6abf9dcf3ad382a2642e480d", encoded) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "04000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df7105000000000000000000000000000000000000000000000000000000000000000000000000063807b2d30702c0ea39553a0601a9c6fc5b27c076ddfc1044001fb0a8ad1fd9016304a61233de2770e0fb9a5578e5f633846ef9fa4c2ab8b80b8f9a30f09be07cda8d725", encoded) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "040000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93000000000000000000000000000000000000000000000000000000000000000000000000646b6e1338122423f3cebb92645f9ac93c8ee50edb75ea93a951f278007e721a7b9f995824895b00195499dfe77d201cf3627050d866abb2685f87e10466c4fcaf3a8588", encoded) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "040000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "040000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "04000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "040000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d80113ba3d5c53a035f4b4ec6f8a2ba9ab521bccab9f90e3a713ab5fffc0adec57000000000000000000000000000000000000000000000000000000000000000000000000646b6ed012e49b70b64652e5cab5dfdd1f58958d863de1d7fcb959e09f147a98b0b895171560f81b17ec3a2fe1c8ed2d308ca5bf002d7e3c18db9682a8d0f5379bf213aa", encoded) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26000000000000000000000000000000000000000000000000000000000000000000000000646b6ed046aedf214a661b6b37b9c325fef4484ff3613a6fb52719609bf02a66bc7ba23b6e9b7bcbe3be0ba95654f16f715bf7e39ef87a84199340423f6487cf56058085", encoded) @@ -330,114 +330,183 @@ func TestCodecV4BatchHash(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv4.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x53d6da35c9b6f0413b6ebb80f4a8c19b0e3279481ddf602398a54d3b4e5d4f2c"), daBatch.Hash()) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x08feefdb19215bb0f51f85a3b02a0954ac7da67681e274db49b9102f4c6e0857"), daBatch.Hash()) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc56c5e51993342232193d1d93124bae30a5b1444eebf49b2dd5f2c5962d4d54d"), daBatch.Hash()) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x2c32177c8b4c6289d977361c7fd0f1a6ea15add64da2eb8caf0420ac9b35231e"), daBatch.Hash()) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x909bebbebdbf5ba9c85c6894e839c0b044d2878c457c4942887e3d64469ad342"), daBatch.Hash()) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x53765a37bbd72655df586b530d79cb4ad0fb814d72ddc95e01e0ede579f45117"), daBatch.Hash()) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x74ccf9cc265f423cc6e6e53ed294000637a832cdc93c76485855289bebb6764a"), daBatch.Hash()) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x8d5ee00a80d7dbdc083d0cdedd35c2cb722e5944f9d88f7450c9186f3ef3da44"), daBatch.Hash()) } +func TestCodecV4NewDABatchFromBytes(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + testCases := []struct { + name string + jsonFile string + }{ + {"Empty Batch", ""}, + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var batch *Batch + var daBatch DABatch + var err error + + if tc.jsonFile == "" { + // Empty daBatch + daBatch = &daBatchV3{ + daBatchV0: daBatchV0{ + version: CodecV4, + }, + } + } else { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + batch = &Batch{Chunks: []*Chunk{chunk}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + } + + // Encode the DABatch + encodedBytes := daBatch.Encode() + + // Decode the bytes back into a DABatch + decodedDABatch, err := codecv4.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + // Compare the hashes of the original and decoded DABatch + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) + }) + } + + // Test with multiple blocks and chunks in a batch + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + chunk2 := &Chunk{Blocks: []*Block{block4, block5}} + batch := &Batch{Chunks: []*Chunk{chunk1, chunk2}} + daBatch, err := codecv4.NewDABatch(batch) + assert.NoError(t, err) + + encodedBytes := daBatch.Encode() + decodedDABatch, err := codecv4.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash()) +} + func TestCodecV4BatchDataHash(t *testing.T) { codecv4, err := CodecFromVersion(CodecV4) require.NoError(t, err) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv4.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541"), daBatch.DataHash()) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6"), daBatch.DataHash()) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f"), daBatch.DataHash()) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8"), daBatch.DataHash()) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) } @@ -790,74 +859,74 @@ func TestCodecV4BatchL1MessagePopped(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv4.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(11), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(11), daBatch.(*daBatchV3).totalL1MessagePopped) block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(42), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 37 - daBatch, err = codecv4.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(5), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(10), daBatch.(*daBatchV3).l1MessagePopped) // skip 7, include 3 assert.Equal(t, uint64(10), daBatch.(*daBatchV3).totalL1MessagePopped) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(257), daBatch.(*daBatchV3).l1MessagePopped) // skip 255, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 1 - daBatch, err = codecv4.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(256), daBatch.(*daBatchV3).l1MessagePopped) // skip 254, include 2 assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(42), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) - originalBatch.TotalL1MessagePoppedBefore = 10 - daBatch, err = codecv4.NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) assert.Equal(t, uint64(32), daBatch.(*daBatchV3).l1MessagePopped) assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) @@ -869,75 +938,75 @@ func TestCodecV4BlobEncodingAndHashing(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - batch, err := codecv4.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(batch) assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded := strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "0001609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c400c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808300019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a0041e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfa00ec288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf00039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f7731600a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed0032f1030060b26d07d8b028b005", encoded) - assert.Equal(t, common.HexToHash("0x01e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f29"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f29"), daBatch.(*daBatchV3).blobVersionedHash) block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - batch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "000160e7159d580094830001000016310002f9162d82cf5502843b9b0a1783110097e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a0811037815160208301516040808501800051915193959294830192918464018211639083019060208201858179825181001182820188101794825250918201929091019080838360005b83c357818101005183820152602001620000a9565b50505050905090810190601f16f1578082000380516001836020036101000a031916819150805160405193929190011501002b01460175015b01a39081015185519093508592508491620001c891600391008501906200026b565b508051620001de90600490602084506005805461ff00001960ff1990911660121716905550600680546001600160a01b0380881619920083161790925560078054928716929091169190911790556200023081620002005562010000600160b01b03191633021790555062000307915050565b60ff19001660ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de010060010185558215620002de579182015b8202de5782518255916020019190600001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301570080639dc29fac14610309578063a457c2d714610335578063a9059cbb1461030061578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610002a55780638456cb59146102cb5780638e50817a146102d3313ce56711610000de571461021d578063395093511461023b5780633f4ba83a146102675780630040c10f191461027106fdde0314610110578063095ea7b31461018d5780631800160ddd146101cd57806323b872e7575b6101186103bb565b6040805160208000825283518183015283519192839290830161015261013a61017f9250508091000390f35b6101b9600480360360408110156101a381351690602001356104510091151582525190819003602001d561046e60fd81169160208101359091169000604074565b6102256104fb60ff90921640025105046f610552565b005b6102006f028705a956610654d520bb3516610662067d56e90135166106d21861075700031f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282008152606093909290918301828280156104475780601f1061041c57610100800083540402835291610447565b825b8154815260200180831161042a5782900300601f16820191565b600061046561045e610906565b848461090a565b506001009202548184f6565b6104f18461048d6104ec8560405180606080602861108500602891398a166000908152600160205260408120906104cb81019190915260004001600020549190610b51565b935460ff160511016000610522908116825200602080830193909352604091820120918c168152925290205490610be8565b00600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b001b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090000460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606004606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616300746f727960a0079283918216179091559390921660041561080808550e6508006c2511176025006108968dd491824080832093909416825233831661094f5700040180806020018281038252602401806110f36024913960400191fd821661000994223d60228084166000819487168084529482529182902085905581518500815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b20000ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a008b838383610f61565b610ac881265f60268685808220939093559084168152002054610af7908220409490945580905191937fddf252ad1be2c89b69c2b06800fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111500610be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53006166654d6174683a206164646974696f6e206f766572666c6f7700610c9c140073621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537b00d38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e742074006f20746865207a65726f72657373610d546000600254610d61025590205461000d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e42020070b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad602161000eb68260000ef3221b85839020550f199082610fb540805182600091851691009120565b610f6cb07415610fb02a113c602a00610c428383401e7375627472006163815250fe7472616e736665726275726e20616d6f756e742065786365650064732062616c616e6365617070726f7665616c6c6f7766726f6d646563726500617365642062656c6f775061757361626c653a20746f6b656e7768696c652000706175736564a2646970667358221220e96342bec8f6c2bf72815a3999897300b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a7700d9fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e0400c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f055003c633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5200095d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60300126013290b6398528818e2c8484081888c4890142465a631e63178f994004800f46ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a80049670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fa00b388531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee55000b5e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b163008aa1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637100664c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d44700c0318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a300958d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa800b597b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b001b3f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242008009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a07700b85b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc800bea3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf96244333647009fbd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1000392cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d4614217006fcdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15b00c9975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e4500e579308f554787b4d1f74e389823923f5d268be545466a2dd449963ad2540700bd3a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe276800a9091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c3953600c5de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98800998d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) - assert.Equal(t, common.HexToHash("0x01ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df71050"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df71050"), daBatch.(*daBatchV3).blobVersionedHash) block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - batch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "000120d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df0002d40343a7626a9d321e105808080808001002c0a1801", encoded) - assert.Equal(t, common.HexToHash("0x01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93"), daBatch.(*daBatchV3).blobVersionedHash) // this batch only contains L1 txs block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - batch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "00000001", encoded) - assert.Equal(t, common.HexToHash("0x016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a"), daBatch.(*daBatchV3).blobVersionedHash) block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - batch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "00000001", encoded) - assert.Equal(t, common.HexToHash("0x016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a"), daBatch.(*daBatchV3).blobVersionedHash) block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - batch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "00000001", encoded) - assert.Equal(t, common.HexToHash("0x016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a"), daBatch.(*daBatchV3).blobVersionedHash) // 45 chunks - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "00016024281d0700140d002d000000e6f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1040041e1491b3e82c9b61d60d39a727", encoded) - assert.Equal(t, common.HexToHash("0x0128a4e122c179a7c34ab1f22ceadf6fa66d2bb0d229933fe1ed061dd8b1fb5f"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x0128a4e122c179a7c34ab1f22ceadf6fa66d2bb0d229933fe1ed061dd8b1fb5f"), daBatch.(*daBatchV3).blobVersionedHash) chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - batch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.(*daBatchV3).blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") assert.Equal(t, "000160ed16256000449200020000173700f87180843b9aec2e8307a12094c0c400c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808300019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a0041e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfa00ec288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf00039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f7731600a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed0032f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348000156200001157600080fd5b50604051620014b238038083398181016040526000a0811037815160208301516040808501805191519395929483019291846401008211639083019060208201858179825181118282018810179482525091820100929091019080838360005b83c3578181015183820152602001620000a9565b0050505050905090810190601f16f15780820380516001836020036101000a030019168191508051604051939291900115012b01460175015b01a3908101518500519093508592508491620001c8916003918501906200026b565b50805162000001de90600490602084506005805461ff001960ff199091166012171690555000600680546001600160a01b03808816199283161790925560078054928716920090911691909117905562000230816200025562010000600160b01b0319163300021790555062000307915050565b60ff191660ff929092565b828160011615006101000203166002900490600052602060002090601f01602090048101928200601f10620002ae5780518380011785de0160010185558215620002de57918200015b8202de57825182559160200191906001c1565b50620002ec9291f0565b005090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb11610000a257806395d89b4111610071146103015780639dc29fac14610309578063a40057c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576100010b565b1461029d57806370a08231146102a55780638456cb59146102cb570080638e50817a146102d3313ce567116100de571461021d57806339509351140061023b5780633f4ba83a1461026757806340c10f191461027106fdde031461000110578063095ea7b31461018d57806318160ddd146101cd57806323b872e700575b6101186103bb565b6040805160208082528351818301528351919283920090830161015261013a61017f92505080910390f35b6101b960048036036040008110156101a3813516906020013561045191151582525190819003602001d50061046e60fd811691602081013590911690604074565b6102256104fb60ff9000921640025105046f610552565b005b61026f028705a956610654d520bb351600610662067d56e90135166106d218610757031f07b856034b085f77c7d5a30800db565b6003805420601f600260001961010060018816150201909516949094000493840181900481028201810190925282815260609390929091830182828000156104475780601f1061041c576101008083540402835291610447565b825b008154815260200180831161042a57829003601f16820191565b60006104656100045e610906565b848461090a565b5060019202548184f6565b6104f1846104008d6104ec85604051806060806028611085602891398a16600090815260016000205260408120906104cb810191909152604001600020549190610b51565b93005460ff160511016000610522908116825260208083019390935260409182010020918c168152925290205490610be8565b600716331461059f5762461bcd6000e51b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529000640190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5c00d8589b194e881c185d5cd95960826006064606508282610ced90905260400600ca0ddd900407260c6b6f6e6c7920466163746f727960a007928391821617900091559390921660041561080808550e65086c2511176025006108968dd49182004080832093909416825233831661094f5704018080602001828103825260240001806110f36024913960400191fd8216610994223d60228084166000819487001680845294825291829020859055815185815291517f8c5be1e5ebec7d5bd1004f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831600610a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac88126005f602686858082209390935590841681522054610af790822040949094558000905191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f5005a4df523b3ef9291829003008184841115610be08381815191508051900ba5000b8d0bd2fd900300828201610c421b7f536166654d6174683a20616464697400696f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e600ff9c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a1821661000d481f7f45524332303a206d696e7420746f20746865207a65726f7265737300610d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdb00fc544b05a2588216610eaa6021ad6021610eb68260000ef3221b8583902055000f199082610fb5408051826000918516919120565b610f6cb07415610fb02a00113c602a00610c428383401e73756274726163815250fe7472616e73666572006275726e20616d6f756e7420657863656564732062616c616e636561707072006f7665616c6c6f7766726f6d6465637265617365642062656c6f77506175730061626c653a20746f6b656e7768696c6520706175736564a264697066735822001220e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda000265d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a500820b63a0e012095745544820636f696e04c001a0235c1a8d40e8c34789039700f1a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd0697044006e74229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a00258d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a914009a111111110549d2740105c410e61ca4d603126013290b6398528818e2c848004081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb8000ccba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc500c5ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69800511c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be007ea27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0500238c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e923001dd28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af001ff932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb410002cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a0013b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f00061093a37810212ba36db205219fab4032428009178588ad21f754085dd80700b09af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403300355c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d369005c0904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5200463d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71160024bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80004421f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7ac00db3071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38980023923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f6700ea8d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c608007efc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac008533de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c63006da70ee60a586fdb282babf53e01", encoded) - assert.Equal(t, common.HexToHash("0x0121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26"), batch.(*daBatchV3).blobVersionedHash) + assert.Equal(t, common.HexToHash("0x0121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26"), daBatch.(*daBatchV3).blobVersionedHash) } func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { @@ -946,8 +1015,8 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") chunk2 := &Chunk{Blocks: []*Block{block2}} - originalBatch := &Batch{Chunks: []*Chunk{chunk2}} - daBatch, err := codecv4.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(batch) assert.NoError(t, err) verifyData, err := daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -955,8 +1024,8 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") chunk3 := &Chunk{Blocks: []*Block{block3}} - originalBatch = &Batch{Chunks: []*Chunk{chunk3}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -964,8 +1033,8 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") chunk4 := &Chunk{Blocks: []*Block{block4}} - originalBatch = &Batch{Chunks: []*Chunk{chunk4}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -973,8 +1042,8 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") chunk5 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk5}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -982,8 +1051,8 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") chunk6 := &Chunk{Blocks: []*Block{block6}} - originalBatch = &Batch{Chunks: []*Chunk{chunk6}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -991,16 +1060,16 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") chunk7 := &Chunk{Blocks: []*Block{block7}} - originalBatch = &Batch{Chunks: []*Chunk{chunk7}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) assert.Equal(t, "04e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a92139250d65777a7748934f3e2992f17a66affd58b341854cf7a0837d976903f412189ad04ea1003bdc602ebf33d3af43e23a9c69bb3a38a5e633154ada88e361cc633194fc01bab0d496c1541654f112f5ed258d3bde8ca0ca38b69c26d8813c268", hex.EncodeToString(verifyData)) // 45 chunks - originalBatch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -1008,8 +1077,8 @@ func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} chunk9 := &Chunk{Blocks: []*Block{block5}} - originalBatch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} - daBatch, err = codecv4.NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(batch) assert.NoError(t, err) verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -1036,8 +1105,8 @@ func TestCodecV4DecodeDAChunksRawTx(t *testing.T) { chunkBytes1, err := daChunk1.Encode() assert.NoError(t, err) - originalBatch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} - batch, err := codecv4.NewDABatch(originalBatch) + batch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} + daBatch, err := codecv4.NewDABatch(batch) assert.NoError(t, err) daChunksRawTx1, err := codecv4.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) @@ -1057,7 +1126,7 @@ func TestCodecV4DecodeDAChunksRawTx(t *testing.T) { daChunksRawTx1[1].Blocks[1].(*daBlockV0).baseFee = nil assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx1[1].Blocks[1]) - blob := batch.Blob() + blob := daBatch.Blob() err = codecv4.DecodeTxsFromBlob(blob, daChunksRawTx1) assert.NoError(t, err) @@ -1511,7 +1580,7 @@ func TestCodecV4BatchCompressedDataCompatibilityCheck(t *testing.T) { {"Single Block 05", []string{"testdata/blockTrace_05.json"}, false}, {"Single Block 06", []string{"testdata/blockTrace_06.json"}, false}, {"Single Block 07", []string{"testdata/blockTrace_07.json"}, false}, - {"Multiple Blocks", []string{"testdata/blockTrace_02.json", "testdata/blockTrace_03.json", "testdata/blockTrace_04.json", "testdata/blockTrace_05.json", "testdata/blockTrace_06.json", "testdata/blockTrace_07.json"}, true}, + {"Multiple Blocks And Chunks", []string{"testdata/blockTrace_02.json", "testdata/blockTrace_03.json", "testdata/blockTrace_04.json", "testdata/blockTrace_05.json", "testdata/blockTrace_06.json", "testdata/blockTrace_07.json"}, true}, } for _, tc := range testCases { From 60b93622dfd7ce823cad878ed37ce7491f9ce31e Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 18 Oct 2024 14:52:11 +0800 Subject: [PATCH 125/126] fix golint --- encoding/codecv0_test.go | 14 +++++++------- encoding/codecv1_test.go | 10 +++++----- encoding/codecv2_test.go | 10 +++++----- encoding/codecv3_test.go | 10 +++++----- encoding/codecv4_test.go | 10 +++++----- 5 files changed, 27 insertions(+), 27 deletions(-) diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go index 830d243..32a29cf 100644 --- a/encoding/codecv0_test.go +++ b/encoding/codecv0_test.go @@ -194,8 +194,8 @@ func TestCodecV0BatchEncode(t *testing.T) { require.NoError(t, err) // empty batch - daBatchV0 := daBatchV0{version: CodecV0} - encoded := hex.EncodeToString(daBatchV0.Encode()) + emptyDABatchV0 := daBatchV0{version: CodecV0} + encoded := hex.EncodeToString(emptyDABatchV0.Encode()) assert.Equal(t, "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") @@ -349,7 +349,7 @@ func TestCodecV0NewDABatchFromBytes(t *testing.T) { t.Run(tc.name, func(t *testing.T) { var batch *Batch var daBatch DABatch - var err error + var createErr1 error if tc.jsonFile == "" { // Empty daBatch @@ -358,16 +358,16 @@ func TestCodecV0NewDABatchFromBytes(t *testing.T) { block := readBlockFromJSON(t, tc.jsonFile) chunk := &Chunk{Blocks: []*Block{block}} batch = &Batch{Chunks: []*Chunk{chunk}} - daBatch, err = codecv0.NewDABatch(batch) - assert.NoError(t, err) + daBatch, createErr1 = codecv0.NewDABatch(batch) + assert.NoError(t, createErr1) } // Encode the DABatch encodedBytes := daBatch.Encode() // Decode the bytes back into a DABatch - decodedDABatch, err := codecv0.NewDABatchFromBytes(encodedBytes) - assert.NoError(t, err) + decodedDABatch, createErr2 := codecv0.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, createErr2) // Compare the hashes of the original and decoded DABatch assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) diff --git a/encoding/codecv1_test.go b/encoding/codecv1_test.go index 701bb6c..a9a38ca 100644 --- a/encoding/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -403,7 +403,7 @@ func TestCodecV1NewDABatchFromBytes(t *testing.T) { t.Run(tc.name, func(t *testing.T) { var batch *Batch var daBatch DABatch - var err error + var createErr1 error if tc.jsonFile == "" { // Empty daBatch @@ -416,16 +416,16 @@ func TestCodecV1NewDABatchFromBytes(t *testing.T) { block := readBlockFromJSON(t, tc.jsonFile) chunk := &Chunk{Blocks: []*Block{block}} batch = &Batch{Chunks: []*Chunk{chunk}} - daBatch, err = codecv1.NewDABatch(batch) - assert.NoError(t, err) + daBatch, createErr1 = codecv1.NewDABatch(batch) + assert.NoError(t, createErr1) } // Encode the DABatch encodedBytes := daBatch.Encode() // Decode the bytes back into a DABatch - decodedDABatch, err := codecv1.NewDABatchFromBytes(encodedBytes) - assert.NoError(t, err) + decodedDABatch, createErr2 := codecv1.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, createErr2) // Compare the hashes of the original and decoded DABatch assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) diff --git a/encoding/codecv2_test.go b/encoding/codecv2_test.go index df49db6..0e6c088 100644 --- a/encoding/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -403,7 +403,7 @@ func TestCodecV2NewDABatchFromBytes(t *testing.T) { t.Run(tc.name, func(t *testing.T) { var batch *Batch var daBatch DABatch - var err error + var createErr1 error if tc.jsonFile == "" { // Empty daBatch @@ -416,16 +416,16 @@ func TestCodecV2NewDABatchFromBytes(t *testing.T) { block := readBlockFromJSON(t, tc.jsonFile) chunk := &Chunk{Blocks: []*Block{block}} batch = &Batch{Chunks: []*Chunk{chunk}} - daBatch, err = codecv2.NewDABatch(batch) - assert.NoError(t, err) + daBatch, createErr1 = codecv2.NewDABatch(batch) + assert.NoError(t, createErr1) } // Encode the DABatch encodedBytes := daBatch.Encode() // Decode the bytes back into a DABatch - decodedDABatch, err := codecv2.NewDABatchFromBytes(encodedBytes) - assert.NoError(t, err) + decodedDABatch, createErr2 := codecv2.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, createErr2) // Compare the hashes of the original and decoded DABatch assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go index 135f048..03d4453 100644 --- a/encoding/codecv3_test.go +++ b/encoding/codecv3_test.go @@ -405,7 +405,7 @@ func TestCodecV3NewDABatchFromBytes(t *testing.T) { t.Run(tc.name, func(t *testing.T) { var batch *Batch var daBatch DABatch - var err error + var createErr1 error if tc.jsonFile == "" { // Empty daBatch @@ -418,16 +418,16 @@ func TestCodecV3NewDABatchFromBytes(t *testing.T) { block := readBlockFromJSON(t, tc.jsonFile) chunk := &Chunk{Blocks: []*Block{block}} batch = &Batch{Chunks: []*Chunk{chunk}} - daBatch, err = codecv3.NewDABatch(batch) - assert.NoError(t, err) + daBatch, createErr1 = codecv3.NewDABatch(batch) + assert.NoError(t, createErr1) } // Encode the DABatch encodedBytes := daBatch.Encode() // Decode the bytes back into a DABatch - decodedDABatch, err := codecv3.NewDABatchFromBytes(encodedBytes) - assert.NoError(t, err) + decodedDABatch, createErr2 := codecv3.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, createErr2) // Compare the hashes of the original and decoded DABatch assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go index 950f411..c83601e 100644 --- a/encoding/codecv4_test.go +++ b/encoding/codecv4_test.go @@ -404,7 +404,7 @@ func TestCodecV4NewDABatchFromBytes(t *testing.T) { t.Run(tc.name, func(t *testing.T) { var batch *Batch var daBatch DABatch - var err error + var createErr1 error if tc.jsonFile == "" { // Empty daBatch @@ -417,16 +417,16 @@ func TestCodecV4NewDABatchFromBytes(t *testing.T) { block := readBlockFromJSON(t, tc.jsonFile) chunk := &Chunk{Blocks: []*Block{block}} batch = &Batch{Chunks: []*Chunk{chunk}} - daBatch, err = codecv4.NewDABatch(batch) - assert.NoError(t, err) + daBatch, createErr1 = codecv4.NewDABatch(batch) + assert.NoError(t, createErr1) } // Encode the DABatch encodedBytes := daBatch.Encode() // Decode the bytes back into a DABatch - decodedDABatch, err := codecv4.NewDABatchFromBytes(encodedBytes) - assert.NoError(t, err) + decodedDABatch, createErr2 := codecv4.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, createErr2) // Compare the hashes of the original and decoded DABatch assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) From 0e831aae778a849ec15a261972abf634fca5e8b9 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 18 Oct 2024 15:00:49 +0800 Subject: [PATCH 126/126] tweaks --- encoding/bitmap.go | 6 +++--- encoding/codecv0.go | 4 ++-- encoding/codecv3.go | 4 ++-- encoding/codecv4.go | 4 ++-- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/encoding/bitmap.go b/encoding/bitmap.go index e2e2ab8..19f9a02 100644 --- a/encoding/bitmap.go +++ b/encoding/bitmap.go @@ -54,14 +54,14 @@ func constructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePo } } - bitmapBytes := make([]byte, len(skippedBitmap)*skippedL1MessageBitmapByteSize) + skippedL1MessageBitmap := make([]byte, len(skippedBitmap)*skippedL1MessageBitmapByteSize) for ii, num := range skippedBitmap { bytes := num.Bytes() padding := skippedL1MessageBitmapByteSize - len(bytes) - copy(bitmapBytes[skippedL1MessageBitmapByteSize*ii+padding:], bytes) + copy(skippedL1MessageBitmap[skippedL1MessageBitmapByteSize*ii+padding:], bytes) } - return bitmapBytes, nextIndex, nil + return skippedL1MessageBitmap, nextIndex, nil } // decodeBitmap decodes skipped L1 message bitmap of the batch from bytes to big.Int's. diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 7e693db..cbe4af3 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -179,7 +179,7 @@ func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + skippedL1MessageBitmap, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, fmt.Errorf("failed to construct skipped bitmap, index: %d, err: %w", batch.Index, err) } @@ -196,7 +196,7 @@ func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { totalL1MessagePoppedAfter, // totalL1MessagePopped dataHash, // dataHash batch.ParentBatchHash, // parentBatchHash - bitmapBytes, // skippedL1MessageBitmap + skippedL1MessageBitmap, // skippedL1MessageBitmap ) return daBatch, nil diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 51e3d7c..3ea65f1 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -41,7 +41,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + skippedL1MessageBitmap, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -69,7 +69,7 @@ func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { dataHash, // dataHash batch.ParentBatchHash, // parentBatchHash blobVersionedHash, // blobVersionedHash - bitmapBytes, // skippedL1MessageBitmap + skippedL1MessageBitmap, // skippedL1MessageBitmap blob, // blob z, // z blobBytes, // blobBytes diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 6c98f89..4d51fe5 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -65,7 +65,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + skippedL1MessageBitmap, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -98,7 +98,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { dataHash, // dataHash batch.ParentBatchHash, // parentBatchHash blobVersionedHash, // blobVersionedHash - bitmapBytes, // skippedL1MessageBitmap + skippedL1MessageBitmap, // skippedL1MessageBitmap blob, // blob z, // z blobBytes, // blobBytes