diff --git a/encoding/bitmap.go b/encoding/bitmap.go index 5631983..19f9a02 100644 --- a/encoding/bitmap.go +++ b/encoding/bitmap.go @@ -7,8 +7,8 @@ import ( "github.com/scroll-tech/go-ethereum/core/types" ) -// ConstructSkippedBitmap constructs skipped L1 message bitmap of the batch. -func ConstructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePoppedBefore uint64) ([]byte, uint64, error) { +// constructSkippedBitmap constructs skipped L1 message bitmap of the batch. +func constructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePoppedBefore uint64) ([]byte, uint64, error) { // skipped L1 message bitmap, an array of 256-bit bitmaps var skippedBitmap []*big.Int @@ -54,39 +54,29 @@ func ConstructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePo } } - bitmapBytes := make([]byte, len(skippedBitmap)*32) + skippedL1MessageBitmap := make([]byte, len(skippedBitmap)*skippedL1MessageBitmapByteSize) for ii, num := range skippedBitmap { bytes := num.Bytes() - padding := 32 - len(bytes) - copy(bitmapBytes[32*ii+padding:], bytes) + padding := skippedL1MessageBitmapByteSize - len(bytes) + copy(skippedL1MessageBitmap[skippedL1MessageBitmapByteSize*ii+padding:], bytes) } - return bitmapBytes, nextIndex, nil + return skippedL1MessageBitmap, nextIndex, nil } -// DecodeBitmap decodes skipped L1 message bitmap of the batch from bytes to big.Int's -func DecodeBitmap(skippedL1MessageBitmap []byte, totalL1MessagePopped int) ([]*big.Int, error) { +// decodeBitmap decodes skipped L1 message bitmap of the batch from bytes to big.Int's. +func decodeBitmap(skippedL1MessageBitmap []byte, totalL1MessagePopped int) ([]*big.Int, error) { length := len(skippedL1MessageBitmap) - if length%32 != 0 { - return nil, fmt.Errorf("skippedL1MessageBitmap length doesn't match, skippedL1MessageBitmap length should be equal 0 modulo 32, length of skippedL1MessageBitmap: %v", length) + if length%skippedL1MessageBitmapByteSize != 0 { + return nil, fmt.Errorf("skippedL1MessageBitmap length doesn't match, skippedL1MessageBitmap length should be equal 0 modulo %v, length of skippedL1MessageBitmap: %v", skippedL1MessageBitmapByteSize, length) } if length*8 < totalL1MessagePopped { return nil, fmt.Errorf("skippedL1MessageBitmap length is too small, skippedL1MessageBitmap length should be at least %v, length of skippedL1MessageBitmap: %v", (totalL1MessagePopped+7)/8, length) } var skippedBitmap []*big.Int - for index := 0; index < length/32; index++ { - bitmap := big.NewInt(0).SetBytes(skippedL1MessageBitmap[index*32 : index*32+32]) + for index := 0; index < length/skippedL1MessageBitmapByteSize; index++ { + bitmap := big.NewInt(0).SetBytes(skippedL1MessageBitmap[index*skippedL1MessageBitmapByteSize : index*skippedL1MessageBitmapByteSize+skippedL1MessageBitmapByteSize]) skippedBitmap = append(skippedBitmap, bitmap) } return skippedBitmap, nil } - -// IsL1MessageSkipped checks if index is skipped in bitmap -func IsL1MessageSkipped(skippedBitmap []*big.Int, index uint64) bool { - if index > uint64(len(skippedBitmap))*256 { - return false - } - quo := index / 256 - rem := index % 256 - return skippedBitmap[quo].Bit(int(rem)) != 0 -} diff --git a/encoding/bitmap_test.go b/encoding/bitmap_test.go new file mode 100644 index 0000000..d5abe30 --- /dev/null +++ b/encoding/bitmap_test.go @@ -0,0 +1,44 @@ +package encoding + +import ( + "encoding/hex" + "math/big" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDecodeBitmap(t *testing.T) { + bitmapHex := "0000000000000000000000000000000000000000000000000000001ffffffbff" + skippedL1MessageBitmap, err := hex.DecodeString(bitmapHex) + assert.NoError(t, err) + + decodedBitmap, err := decodeBitmap(skippedL1MessageBitmap, 42) + assert.NoError(t, err) + + isL1MessageSkipped := func(skippedBitmap []*big.Int, index uint64) bool { + if index >= uint64(len(skippedBitmap))*256 { + return false + } + quo := index / 256 + rem := index % 256 + return skippedBitmap[quo].Bit(int(rem)) == 1 + } + + assert.True(t, isL1MessageSkipped(decodedBitmap, 0)) + assert.True(t, isL1MessageSkipped(decodedBitmap, 9)) + assert.False(t, isL1MessageSkipped(decodedBitmap, 10)) + assert.True(t, isL1MessageSkipped(decodedBitmap, 11)) + assert.True(t, isL1MessageSkipped(decodedBitmap, 36)) + assert.False(t, isL1MessageSkipped(decodedBitmap, 37)) + assert.False(t, isL1MessageSkipped(decodedBitmap, 38)) + assert.False(t, isL1MessageSkipped(decodedBitmap, 39)) + assert.False(t, isL1MessageSkipped(decodedBitmap, 40)) + assert.False(t, isL1MessageSkipped(decodedBitmap, 41)) + + _, err = decodeBitmap([]byte{0x00}, 8) + assert.Error(t, err) + + _, err = decodeBitmap([]byte{0x00, 0x00, 0x00, 0x00}, 33) + assert.Error(t, err) +} diff --git a/encoding/codecv0.go b/encoding/codecv0.go new file mode 100644 index 0000000..cbe4af3 --- /dev/null +++ b/encoding/codecv0.go @@ -0,0 +1,424 @@ +package encoding + +import ( + "encoding/binary" + "errors" + "fmt" + "math" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +type DACodecV0 struct{} + +// codecv0MaxNumChunks is the maximum number of chunks that a batch can contain. +const codecv0MaxNumChunks = 15 + +// Version returns the codec version. +func (d *DACodecV0) Version() CodecVersion { + return CodecV0 +} + +// MaxNumChunksPerBatch returns the maximum number of chunks per batch. +func (d *DACodecV0) MaxNumChunksPerBatch() int { + return codecv0MaxNumChunks +} + +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (d *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { + if !block.Header.Number.IsUint64() { + return nil, errors.New("block number is not uint64") + } + + // note: numL1Messages includes skipped messages + numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) + if numL1Messages > math.MaxUint16 { + return nil, errors.New("number of L1 messages exceeds max uint16") + } + + // note: numTransactions includes skipped messages + numL2Transactions := block.NumL2Transactions() + numTransactions := numL1Messages + numL2Transactions + if numTransactions > math.MaxUint16 { + return nil, errors.New("number of transactions exceeds max uint16") + } + + daBlock := newDABlockV0( + block.Header.Number.Uint64(), // number + block.Header.Time, // timestamp + block.Header.BaseFee, // baseFee + block.Header.GasLimit, // gasLimit + uint16(numTransactions), // numTransactions + uint16(numL1Messages), // numL1Messages + ) + + return daBlock, nil +} + +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (d *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + if chunk == nil { + return nil, errors.New("chunk is nil") + } + + if len(chunk.Blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(chunk.Blocks) > math.MaxUint8 { + return nil, fmt.Errorf("number of blocks (%d) exceeds maximum allowed (%d)", len(chunk.Blocks), math.MaxUint8) + } + + blocks := make([]DABlock, 0, len(chunk.Blocks)) + txs := make([][]*types.TransactionData, 0, len(chunk.Blocks)) + + for _, block := range chunk.Blocks { + b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + blocks = append(blocks, b) + totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) + txs = append(txs, block.Transactions) + } + + if len(blocks) != len(txs) { + return nil, fmt.Errorf("number of blocks (%d) does not match number of transactions (%d)", len(blocks), len(txs)) + } + + return &daChunkV0{ + blocks: blocks, + transactions: txs, + }, nil +} + +// DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. +func (d *DACodecV0) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) { + chunks := make([]*DAChunkRawTx, 0, len(chunkBytes)) + for _, chunk := range chunkBytes { + if len(chunk) < 1 { + return nil, fmt.Errorf("invalid chunk, length is less than 1") + } + + numBlocks := int(chunk[0]) + if len(chunk) < 1+numBlocks*blockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*blockContextByteSize) + } + + blocks := make([]DABlock, numBlocks) + for i := 0; i < numBlocks; i++ { + startIdx := 1 + i*blockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + blockContextByteSize + blocks[i] = &daBlockV0{} + err := blocks[i].Decode(chunk[startIdx:endIdx]) + if err != nil { + return nil, err + } + } + + var transactions []types.Transactions + currentIndex := 1 + numBlocks*blockContextByteSize + for _, block := range blocks { + var blockTransactions types.Transactions + // ignore L1 msg transactions from the block, consider only L2 transactions + txNum := int(block.NumTransactions()) - int(block.NumL1Messages()) + if txNum < 0 { + return nil, fmt.Errorf("invalid transaction count: NumL1Messages (%d) exceeds NumTransactions (%d)", block.NumL1Messages(), block.NumTransactions()) + } + for i := 0; i < txNum; i++ { + if len(chunk) < currentIndex+txLenByteSize { + return nil, fmt.Errorf("chunk size doesn't match, next tx size is less then 4, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+txLenByteSize, i) + } + txLen := int(binary.BigEndian.Uint32(chunk[currentIndex : currentIndex+txLenByteSize])) + if len(chunk) < currentIndex+txLenByteSize+txLen { + return nil, fmt.Errorf("chunk size doesn't match with next tx length, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+txLenByteSize+txLen, i) + } + txData := chunk[currentIndex+txLenByteSize : currentIndex+txLenByteSize+txLen] + tx := &types.Transaction{} + err := tx.UnmarshalBinary(txData) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal tx, pos of tx in chunk bytes: %d. tx num without l1 msgs: %d, err: %w", currentIndex, i, err) + } + blockTransactions = append(blockTransactions, tx) + currentIndex += txLenByteSize + txLen + } + transactions = append(transactions, blockTransactions) + } + + chunks = append(chunks, &DAChunkRawTx{ + Blocks: blocks, + Transactions: transactions, + }) + } + return chunks, nil +} + +// DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks +func (d *DACodecV0) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { + return nil +} + +// NewDABatch creates a DABatch from the provided Batch. +func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { + // this encoding can only support a fixed number of chunks per batch + if len(batch.Chunks) > d.MaxNumChunksPerBatch() { + return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) + } + + if len(batch.Chunks) == 0 { + return nil, errors.New("batch must contain at least one chunk") + } + + // compute batch data hash + dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, fmt.Errorf("failed to compute batch data hash, index: %d, err: %w", batch.Index, err) + } + + // skipped L1 messages bitmap + skippedL1MessageBitmap, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, fmt.Errorf("failed to construct skipped bitmap, index: %d, err: %w", batch.Index, err) + } + + if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { + return nil, fmt.Errorf("batch index: %d, totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", batch.Index, totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + } + l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore + + daBatch := newDABatchV0( + CodecV0, // version + batch.Index, // batchIndex + l1MessagePopped, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + skippedL1MessageBitmap, // skippedL1MessageBitmap + ) + + return daBatch, nil +} + +// NewDABatchFromBytes decodes the given byte slice into a DABatch. +func (d *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { + if len(data) < daBatchV0EncodedMinLength { + return nil, fmt.Errorf("insufficient data for DABatch, expected at least %d bytes but got %d", daBatchV0EncodedMinLength, len(data)) + } + + if CodecVersion(data[daBatchOffsetVersion]) != CodecV0 { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV0, data[daBatchOffsetVersion]) + } + + return newDABatchV0( + CodecVersion(data[daBatchOffsetVersion]), // version + binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV0OffsetL1MessagePopped]), // batchIndex + binary.BigEndian.Uint64(data[daBatchV0OffsetL1MessagePopped:daBatchV0OffsetTotalL1MessagePopped]), // l1MessagePopped + binary.BigEndian.Uint64(data[daBatchV0OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped + common.BytesToHash(data[daBatchOffsetDataHash:daBatchV0OffsetParentBatchHash]), // dataHash + common.BytesToHash(data[daBatchV0OffsetParentBatchHash:daBatchV0OffsetSkippedL1MessageBitmap]), // parentBatchHash + data[daBatchV0OffsetSkippedL1MessageBitmap:], // skippedL1MessageBitmap + ), nil +} + +// EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. +func (d *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { + var size uint64 + for _, txData := range b.Transactions { + if txData.Type == types.L1MessageTxType { + continue + } + size += payloadLengthBytes + txPayloadLength, err := getTxPayloadLength(txData) + if err != nil { + return 0, err + } + size += txPayloadLength + } + size += blockContextByteSize + return size, nil +} + +// EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. +func (d *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { + var total uint64 + var numL1Messages uint64 + for _, txData := range b.Transactions { + if txData.Type == types.L1MessageTxType { + numL1Messages++ + continue + } + + txPayloadLength, err := getTxPayloadLength(txData) + if err != nil { + return 0, err + } + total += calldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero + total += calldataNonZeroByteGas * 4 // 4 bytes payload length + total += getKeccak256Gas(txPayloadLength) // l2 tx hash + } + + total += calldataNonZeroByteGas * blockContextByteSize + + // sload + total += coldSloadGas * numL1Messages // numL1Messages times cold sload in L1MessageQueue + + // staticcall + total += warmAddressAccessGas * numL1Messages // numL1Messages times call to L1MessageQueue + total += warmAddressAccessGas * numL1Messages // numL1Messages times warm address access to L1MessageQueue + + total += getMemoryExpansionCost(functionSignatureBytes+defaultParameterBytes) * numL1Messages // staticcall to proxy + total += warmAddressAccessGas * numL1Messages // read admin in proxy + total += warmAddressAccessGas * numL1Messages // read impl in proxy + total += warmAddressAccessGas * numL1Messages // access impl + total += getMemoryExpansionCost(functionSignatureBytes+defaultParameterBytes) * numL1Messages // delegatecall to impl + + return total, nil +} + +// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. +func (d *DACodecV0) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + var totalL1CommitCalldataSize uint64 + for _, block := range c.Blocks { + blockL1CommitCalldataSize, err := d.EstimateBlockL1CommitCalldataSize(block) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += blockL1CommitCalldataSize + } + return totalL1CommitCalldataSize, nil +} + +// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. +func (d *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + var totalTxNum uint64 + var totalL1CommitGas uint64 + for _, block := range c.Blocks { + totalTxNum += uint64(len(block.Transactions)) + blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) + if err != nil { + return 0, err + } + totalL1CommitGas += blockL1CommitGas + } + + numBlocks := uint64(len(c.Blocks)) + totalL1CommitGas += warmSloadGas * numBlocks // numBlocks times warm sload + totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + + totalL1CommitGas += getKeccak256Gas(blockContextBytesForHashing*numBlocks + common.HashLength*totalTxNum) // chunk hash + return totalL1CommitGas, nil +} + +// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. +func (d *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { + var totalL1CommitGas uint64 + + // Add extra gas costs + totalL1CommitGas += extraGasCost // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * coldSloadGas // 4 one-time cold sload for commitBatch + totalL1CommitGas += sstoreGas // 1 time sstore + totalL1CommitGas += baseTxGas // base gas for tx + totalL1CommitGas += calldataNonZeroByteGas // version in calldata + + // adjusting gas: + // add 1 time cold sload (2100 gas) for L1MessageQueue + // add 1 time cold address access (2600 gas) for L1MessageQueue + // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) + totalL1CommitGas += (coldSloadGas + coldAddressAccessGas - warmSloadGas - warmAddressAccessGas) + totalL1CommitGas += getKeccak256Gas(daBatchV0EncodedMinLength + skippedL1MessageBitmapByteSize) // parent batch header hash, length is estimated as (constant part) + (1 skippedL1MessageBitmap) + totalL1CommitGas += calldataNonZeroByteGas * (daBatchV0EncodedMinLength + skippedL1MessageBitmapByteSize) // parent batch header in calldata + + // adjust batch data hash gas cost + totalL1CommitGas += getKeccak256Gas(uint64(common.HashLength * len(b.Chunks))) + + totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore + + for _, chunk := range b.Chunks { + chunkL1CommitGas, err := d.EstimateChunkL1CommitGas(chunk) + if err != nil { + return 0, err + } + totalL1CommitGas += chunkL1CommitGas + + totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) + totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk + + totalL1CommitGas += calldataNonZeroByteGas * (skippedL1MessageBitmapByteSize * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += getKeccak256Gas(daBatchV0EncodedMinLength + skippedL1MessageBitmapByteSize*(totalL1MessagePoppedInChunk+255)/256) + + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) + } + + return totalL1CommitGas, nil +} + +// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. +func (d *DACodecV0) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { + var totalL1CommitCalldataSize uint64 + for _, chunk := range b.Chunks { + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + } + return totalL1CommitCalldataSize, nil +} + +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +func (d *DACodecV0) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + return true, nil +} + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +func (d *DACodecV0) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + return true, nil +} + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. +func (d *DACodecV0) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + return 0, 0, nil +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a batch. +func (d *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + return 0, 0, nil +} + +// JSONFromBytes for CodecV0 returns empty values. +func (c *DACodecV0) JSONFromBytes(data []byte) ([]byte, error) { + // DACodecV0 doesn't need this, so just return empty values + return nil, nil +} + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a batch in the contracts, +// the latter is used in the public input to the provers. +func (d *DACodecV0) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + dataBytes := make([]byte, 0, len(chunks)*common.HashLength) + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := d.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil +} diff --git a/encoding/codecv0/codecv0.go b/encoding/codecv0/codecv0.go deleted file mode 100644 index 2cc8e8e..0000000 --- a/encoding/codecv0/codecv0.go +++ /dev/null @@ -1,544 +0,0 @@ -package codecv0 - -import ( - "encoding/binary" - "encoding/hex" - "errors" - "fmt" - "math" - "math/big" - "strings" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - - "github.com/scroll-tech/da-codec/encoding" -) - -const BlockContextByteSize = 60 -const TxLenByteSize = 4 - -// DABlock represents a Data Availability Block. -type DABlock struct { - BlockNumber uint64 - Timestamp uint64 - BaseFee *big.Int - GasLimit uint64 - NumTransactions uint16 - NumL1Messages uint16 -} - -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk struct { - Blocks []*DABlock - Transactions [][]*types.TransactionData -} - -// DAChunkRawTx groups consecutive DABlocks with their L2 transactions, L1 msgs are loaded in another place. -type DAChunkRawTx struct { - Blocks []*DABlock - Transactions []types.Transactions -} - -// DABatch contains metadata about a batch of DAChunks. -type DABatch struct { - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte -} - -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - if !block.Header.Number.IsUint64() { - return nil, errors.New("block number is not uint64") - } - - // note: numL1Messages includes skipped messages - numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) - if numL1Messages > math.MaxUint16 { - return nil, errors.New("number of L1 messages exceeds max uint16") - } - - // note: numTransactions includes skipped messages - numL2Transactions := block.NumL2Transactions() - numTransactions := numL1Messages + numL2Transactions - if numTransactions > math.MaxUint16 { - return nil, errors.New("number of transactions exceeds max uint16") - } - - daBlock := DABlock{ - BlockNumber: block.Header.Number.Uint64(), - Timestamp: block.Header.Time, - BaseFee: block.Header.BaseFee, - GasLimit: block.Header.GasLimit, - NumTransactions: uint16(numTransactions), - NumL1Messages: uint16(numL1Messages), - } - - return &daBlock, nil -} - -// Encode serializes the DABlock into a slice of bytes. -func (b *DABlock) Encode() []byte { - bytes := make([]byte, 60) - binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber) - binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) - if b.BaseFee != nil { - binary.BigEndian.PutUint64(bytes[40:], b.BaseFee.Uint64()) - } - binary.BigEndian.PutUint64(bytes[48:], b.GasLimit) - binary.BigEndian.PutUint16(bytes[56:], b.NumTransactions) - binary.BigEndian.PutUint16(bytes[58:], b.NumL1Messages) - return bytes -} - -// Decode populates the fields of a DABlock from a byte slice. -func (b *DABlock) Decode(bytes []byte) error { - if len(bytes) != 60 { - return errors.New("block encoding is not 60 bytes long") - } - - b.BlockNumber = binary.BigEndian.Uint64(bytes[0:8]) - b.Timestamp = binary.BigEndian.Uint64(bytes[8:16]) - b.BaseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) - b.GasLimit = binary.BigEndian.Uint64(bytes[48:56]) - b.NumTransactions = binary.BigEndian.Uint16(bytes[56:58]) - b.NumL1Messages = binary.BigEndian.Uint16(bytes[58:60]) - - return nil -} - -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - var blocks []*DABlock - var txs [][]*types.TransactionData - - if chunk == nil { - return nil, errors.New("chunk is nil") - } - - if len(chunk.Blocks) == 0 { - return nil, errors.New("number of blocks is 0") - } - - if len(chunk.Blocks) > 255 { - return nil, errors.New("number of blocks exceeds 1 byte") - } - - for _, block := range chunk.Blocks { - b, err := NewDABlock(block, totalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - blocks = append(blocks, b) - totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) - txs = append(txs, block.Transactions) - } - - daChunk := DAChunk{ - Blocks: blocks, - Transactions: txs, - } - - return &daChunk, nil -} - -// Encode serializes the DAChunk into a slice of bytes. -func (c *DAChunk) Encode() ([]byte, error) { - if len(c.Blocks) == 0 { - return nil, errors.New("number of blocks is 0") - } - - if len(c.Blocks) > 255 { - return nil, errors.New("number of blocks exceeds 1 byte") - } - - var chunkBytes []byte - chunkBytes = append(chunkBytes, byte(len(c.Blocks))) - - var l2TxDataBytes []byte - - for _, block := range c.Blocks { - chunkBytes = append(chunkBytes, block.Encode()...) - } - - for _, blockTxs := range c.Transactions { - for _, txData := range blockTxs { - if txData.Type == types.L1MessageTxType { - continue - } - - var txLen [4]byte - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(txData, false /* no mock */) - if err != nil { - return nil, err - } - binary.BigEndian.PutUint32(txLen[:], uint32(len(rlpTxData))) - l2TxDataBytes = append(l2TxDataBytes, txLen[:]...) - l2TxDataBytes = append(l2TxDataBytes, rlpTxData...) - } - } - - chunkBytes = append(chunkBytes, l2TxDataBytes...) - return chunkBytes, nil -} - -// DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. -func DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { - var chunks []*DAChunkRawTx - for _, chunk := range bytes { - if len(chunk) < 1 { - return nil, fmt.Errorf("invalid chunk, length is less than 1") - } - - numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) - } - - blocks := make([]*DABlock, numBlocks) - for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlock{} - err := blocks[i].Decode(chunk[startIdx:endIdx]) - if err != nil { - return nil, err - } - } - - var transactions []types.Transactions - currentIndex := 1 + numBlocks*BlockContextByteSize - for _, block := range blocks { - var blockTransactions types.Transactions - // ignore L1 msg transactions from the block, consider only L2 transactions - txNum := int(block.NumTransactions - block.NumL1Messages) - for i := 0; i < txNum; i++ { - if len(chunk) < currentIndex+TxLenByteSize { - return nil, fmt.Errorf("chunk size doesn't match, next tx size is less then 4, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+TxLenByteSize, i) - } - txLen := int(binary.BigEndian.Uint32(chunk[currentIndex : currentIndex+TxLenByteSize])) - if len(chunk) < currentIndex+TxLenByteSize+txLen { - return nil, fmt.Errorf("chunk size doesn't match with next tx length, byte length of chunk: %v, expected minimum length: %v, txNum without l1 msgs: %d", len(chunk), currentIndex+TxLenByteSize+txLen, i) - } - txData := chunk[currentIndex+TxLenByteSize : currentIndex+TxLenByteSize+txLen] - tx := &types.Transaction{} - err := tx.UnmarshalBinary(txData) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal tx, pos of tx in chunk bytes: %d. tx num without l1 msgs: %d, err: %w", currentIndex, i, err) - } - blockTransactions = append(blockTransactions, tx) - currentIndex += TxLenByteSize + txLen - } - transactions = append(transactions, blockTransactions) - } - - chunks = append(chunks, &DAChunkRawTx{ - Blocks: blocks, - Transactions: transactions, - }) - } - return chunks, nil -} - -// Hash computes the hash of the DAChunk data. -func (c *DAChunk) Hash() (common.Hash, error) { - chunkBytes, err := c.Encode() - if err != nil { - return common.Hash{}, err - } - - if len(chunkBytes) == 0 { - return common.Hash{}, errors.New("chunk data is empty and cannot be processed") - } - numBlocks := chunkBytes[0] - - // concatenate block contexts - var dataBytes []byte - for i := 0; i < int(numBlocks); i++ { - // only the first 58 bytes of each BlockContext are needed for the hashing process - dataBytes = append(dataBytes, chunkBytes[1+60*i:60*i+59]...) - } - - // concatenate l1 and l2 tx hashes - for _, blockTxs := range c.Transactions { - var l1TxHashes []byte - var l2TxHashes []byte - for _, txData := range blockTxs { - txHash := strings.TrimPrefix(txData.TxHash, "0x") - hashBytes, err := hex.DecodeString(txHash) - if err != nil { - return common.Hash{}, fmt.Errorf("failed to decode tx hash from TransactionData: hash=%v, err=%w", txData.TxHash, err) - } - if txData.Type == types.L1MessageTxType { - l1TxHashes = append(l1TxHashes, hashBytes...) - } else { - l2TxHashes = append(l2TxHashes, hashBytes...) - } - } - dataBytes = append(dataBytes, l1TxHashes...) - dataBytes = append(dataBytes, l2TxHashes...) - } - - hash := crypto.Keccak256Hash(dataBytes) - return hash, nil -} - -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { - // compute batch data hash - var dataBytes []byte - totalL1MessagePoppedBeforeChunk := batch.TotalL1MessagePoppedBefore - - for _, chunk := range batch.Chunks { - // build data hash - daChunk, err := NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) - if err != nil { - return nil, err - } - totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) - daChunkHash, err := daChunk.Hash() - if err != nil { - return nil, err - } - dataBytes = append(dataBytes, daChunkHash.Bytes()...) - } - - // compute data hash - dataHash := crypto.Keccak256Hash(dataBytes) - - // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - daBatch := DABatch{ - Version: uint8(encoding.CodecV0), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - } - - return &daBatch, nil -} - -// NewDABatchFromBytes decodes the given byte slice into a DABatch. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { - if len(data) < 89 { - return nil, fmt.Errorf("insufficient data for DABatch, expected at least 89 bytes but got %d", len(data)) - } - - b := &DABatch{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[57:89]), - SkippedL1MessageBitmap: data[89:], - } - - return b, nil -} - -// Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { - batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.ParentBatchHash[:]) - copy(batchBytes[89:], b.SkippedL1MessageBitmap[:]) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. -const CalldataNonZeroByteGas = 16 - -// GetKeccak256Gas calculates the gas cost for computing the keccak256 hash of a given size. -func GetKeccak256Gas(size uint64) uint64 { - return GetMemoryExpansionCost(size) + 30 + 6*((size+31)/32) -} - -// GetMemoryExpansionCost calculates the cost of memory expansion for a given memoryByteSize. -func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { - memorySizeWord := (memoryByteSize + 31) / 32 - memoryCost := (memorySizeWord*memorySizeWord)/512 + (3 * memorySizeWord) - return memoryCost -} - -// EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func EstimateBlockL1CommitCalldataSize(b *encoding.Block) (uint64, error) { - var size uint64 - for _, txData := range b.Transactions { - if txData.Type == types.L1MessageTxType { - continue - } - size += 4 // 4 bytes payload length - txPayloadLength, err := getTxPayloadLength(txData) - if err != nil { - return 0, err - } - size += txPayloadLength - } - size += 60 // 60 bytes BlockContext - return size, nil -} - -// EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { - var total uint64 - var numL1Messages uint64 - for _, txData := range b.Transactions { - if txData.Type == types.L1MessageTxType { - numL1Messages++ - continue - } - - txPayloadLength, err := getTxPayloadLength(txData) - if err != nil { - return 0, err - } - total += CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero - total += CalldataNonZeroByteGas * 4 // 4 bytes payload length - total += GetKeccak256Gas(txPayloadLength) // l2 tx hash - } - - // 60 bytes BlockContext calldata - total += CalldataNonZeroByteGas * 60 - - // sload - total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue - - // staticcall - total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue - total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl - - return total, nil -} - -// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { - var totalL1CommitCalldataSize uint64 - for _, block := range c.Blocks { - blockL1CommitCalldataSize, err := EstimateBlockL1CommitCalldataSize(block) - if err != nil { - return 0, err - } - totalL1CommitCalldataSize += blockL1CommitCalldataSize - } - return totalL1CommitCalldataSize, nil -} - -// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { - var totalTxNum uint64 - var totalL1CommitGas uint64 - for _, block := range c.Blocks { - totalTxNum += uint64(len(block.Transactions)) - blockL1CommitGas, err := EstimateBlockL1CommitGas(block) - if err != nil { - return 0, err - } - totalL1CommitGas += blockL1CommitGas - } - - numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * 60 // numBlocks of BlockContext in chunk - - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash - return totalL1CommitGas, nil -} - -// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { - var totalL1CommitGas uint64 - - // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata - - // adjusting gas: - // add 1 time cold sload (2100 gas) for L1MessageQueue - // add 1 time cold address access (2600 gas) for L1MessageQueue - // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) - totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata - - // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) - - totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore - - for _, chunk := range b.Chunks { - chunkL1CommitGas, err := EstimateChunkL1CommitGas(chunk) - if err != nil { - return 0, err - } - totalL1CommitGas += chunkL1CommitGas - - totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) - totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - - totalL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk) - if err != nil { - return 0, err - } - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) - } - - return totalL1CommitGas, nil -} - -// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { - var totalL1CommitCalldataSize uint64 - for _, chunk := range b.Chunks { - chunkL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk) - if err != nil { - return 0, err - } - totalL1CommitCalldataSize += chunkL1CommitCalldataSize - } - return totalL1CommitCalldataSize, nil -} - -func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(txData, false /* no mock */) - if err != nil { - return 0, err - } - return uint64(len(rlpTxData)), nil -} diff --git a/encoding/codecv0/codecv0_test.go b/encoding/codecv0/codecv0_test.go deleted file mode 100644 index 0a5b514..0000000 --- a/encoding/codecv0/codecv0_test.go +++ /dev/null @@ -1,643 +0,0 @@ -package codecv0 - -import ( - "encoding/hex" - "encoding/json" - "math/big" - "os" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/log" - - "github.com/scroll-tech/da-codec/encoding" -) - -func TestCodecV0(t *testing.T) { - glogger := log.NewGlogHandler(log.StreamHandler(os.Stderr, log.LogfmtFormat())) - glogger.Verbosity(log.LvlInfo) - log.Root().SetHandler(glogger) - - parentDABatch, err := NewDABatch(&encoding.Batch{ - Index: 0, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: common.Hash{}, - Chunks: nil, - }) - assert.NoError(t, err) - parentBatchHash := parentDABatch.Hash() - - block1 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block2 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block3 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block4 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block5 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block6 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - - blockL1CommitCalldataSize, err := EstimateBlockL1CommitCalldataSize(block1) - assert.NoError(t, err) - assert.Equal(t, uint64(298), blockL1CommitCalldataSize) - blockL1CommitGas, err := EstimateBlockL1CommitGas(block1) - assert.NoError(t, err) - assert.Equal(t, uint64(4900), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block2) - assert.NoError(t, err) - assert.Equal(t, uint64(5745), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block2) - assert.NoError(t, err) - assert.Equal(t, uint64(93613), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block3) - assert.NoError(t, err) - assert.Equal(t, uint64(96), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block3) - assert.NoError(t, err) - assert.Equal(t, uint64(4187), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block4) - assert.NoError(t, err) - assert.Equal(t, uint64(60), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block4) - assert.NoError(t, err) - assert.Equal(t, uint64(14020), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block5) - assert.NoError(t, err) - assert.Equal(t, uint64(60), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block5) - assert.NoError(t, err) - assert.Equal(t, uint64(8796), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block6) - assert.NoError(t, err) - assert.Equal(t, uint64(60), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block6) - assert.NoError(t, err) - assert.Equal(t, uint64(6184), blockL1CommitGas) - - // Test case: when the batch and chunk contains one block. - chunk := &encoding.Chunk{ - Blocks: []*encoding.Block{block1}, - } - chunkL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(298), chunkL1CommitCalldataSize) - chunkL1CommitGas, err := EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(6042), chunkL1CommitGas) - - daChunk, err := NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err := daChunk.Encode() - assert.NoError(t, err) - chunkHexString := hex.EncodeToString(chunkBytes) - assert.Equal(t, 299, len(chunkBytes)) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e81840002000000000073f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8b00000073f87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1", chunkHexString) - daChunkHash, err := daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0xde642c68122634b33fa1e6e4243b17be3bfd0dc6f996f204ef6d7522516bd840"), daChunkHash) - - batch := &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err := EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(298), batchL1CommitCalldataSize) - batchL1CommitGas, err := EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(162591), batchL1CommitGas) - - daBatch, err := NewDABatch(batch) - assert.NoError(t, err) - batchBytes := daBatch.Encode() - batchHexString := hex.EncodeToString(batchBytes) - assert.Equal(t, 89, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000000000000000000000008fbc5eecfefc5bd9d1618ecef1fed160a7838448383595a2257d4c9bd5c5fa3eb0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab0", batchHexString) - assert.Equal(t, 0, len(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(0), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(0), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xa906c7d2b6b68ea5fec3ff9d60d41858676e0d365e5d5ef07b2ce20fcf24ecd7"), daBatch.Hash()) - - decodedDABatch, err := NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes := decodedDABatch.Encode() - decodedBatchHexString := hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: when the batch and chunk contains two block. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block1, block2}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(6043), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(100742), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 6044, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x014916a83eccdb0d01e814b4d4ab90eb9049ba9a3cb0994919b86ad873bcd028"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(6043), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(257897), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 89, len(batchBytes)) - assert.Equal(t, "0000000000000000010000000000000000000000000000000074dd561a36921590926bee01fd0d53747c5f3e48e48a2d5538b9ab0e1511cfd7b0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab0", batchHexString) - assert.Equal(t, 0, len(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(0), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(0), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xb02e39b740756824d20b2cac322ac365121411ced9d6e34de98a0b247c6e23e6"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: when the chunk contains one block with 1 L1MsgTx. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block3}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(96), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(5329), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - chunkHexString = hex.EncodeToString(chunkBytes) - assert.Equal(t, 97, len(chunkBytes)) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b00000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e1058080808080", chunkHexString) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x9e643c8a9203df542e39d9bfdcb07c99575b3c3d557791329fef9d83cc4147d0"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(96), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(161889), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000000b000000000000000b34f419ce7e882295bdb5aec6cce56ffa788a5fed4744d7fbd77e4acbf409f1cab0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab000000000000000000000000000000000000000000000000000000000000003ff", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap := "00000000000000000000000000000000000000000000000000000000000003ff" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(11), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(11), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xa18f07cb56ab4f2db5914d9b5699c5932bea4b5c73e71c8cec79151c11e9e986"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: batch contains multiple chunks, chunk contains multiple blocks. - chunk1 := &encoding.Chunk{ - Blocks: []*encoding.Block{block1, block2, block3}, - } - chunk1L1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk1) - assert.NoError(t, err) - assert.Equal(t, uint64(6139), chunk1L1CommitCalldataSize) - chunk1L1CommitGas, err := EstimateChunkL1CommitGas(chunk1) - assert.NoError(t, err) - assert.Equal(t, uint64(106025), chunk1L1CommitGas) - - daChunk1, err := NewDAChunk(chunk1, 0) - assert.NoError(t, err) - chunkBytes1, err := daChunk1.Encode() - assert.NoError(t, err) - assert.Equal(t, 6140, len(chunkBytes1)) - - chunk2 := &encoding.Chunk{ - Blocks: []*encoding.Block{block4}, - } - chunk2L1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunk2L1CommitCalldataSize) - chunk2L1CommitGas, err := EstimateChunkL1CommitGas(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(15189), chunk2L1CommitGas) - - daChunk2, err := NewDAChunk(chunk2, 0) - assert.NoError(t, err) - chunkBytes2, err := daChunk2.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes2)) - - daChunksRawTx, err := DecodeDAChunksRawTx([][]byte{chunkBytes1, chunkBytes2}) - assert.NoError(t, err) - // assert number of chunks - assert.Equal(t, 2, len(daChunksRawTx)) - - // assert block in first chunk - assert.Equal(t, 3, len(daChunksRawTx[0].Blocks)) - assert.Equal(t, daChunk1.Blocks[0], daChunksRawTx[0].Blocks[0]) - assert.Equal(t, daChunk1.Blocks[1], daChunksRawTx[0].Blocks[1]) - daChunksRawTx[0].Blocks[2].BaseFee = nil - assert.Equal(t, daChunk1.Blocks[2], daChunksRawTx[0].Blocks[2]) - - // assert block in second chunk - assert.Equal(t, 1, len(daChunksRawTx[1].Blocks)) - daChunksRawTx[1].Blocks[0].BaseFee = nil - assert.Equal(t, daChunk2.Blocks[0], daChunksRawTx[1].Blocks[0]) - - // assert transactions in first chunk - assert.Equal(t, 3, len(daChunksRawTx[0].Transactions)) - // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs - assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) - assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) - assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[2])) - - assert.EqualValues(t, daChunk1.Transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) - assert.EqualValues(t, daChunk1.Transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) - - // assert transactions in second chunk - assert.Equal(t, 1, len(daChunksRawTx[1].Transactions)) - // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs - assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[0])) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk1, chunk2}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(6199), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(279054), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000002a000000000000002a1f9b3d942a6ee14e7afc52225c91fa44faa0a7ec511df9a2d9348d33bcd142fcb0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab00000000000000000000000000000000000000000000000000000001ffffffbff", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "0000000000000000000000000000000000000000000000000000001ffffffbff" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(42), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(42), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xf7bd6afe02764e4e6df23a374d753182b57fa77be71aaf1cd8365e15a51872d1"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - decodedBitmap, err := encoding.DecodeBitmap(decodedDABatch.SkippedL1MessageBitmap, int(decodedDABatch.L1MessagePopped)) - assert.NoError(t, err) - assert.True(t, encoding.IsL1MessageSkipped(decodedBitmap, 0)) - assert.True(t, encoding.IsL1MessageSkipped(decodedBitmap, 9)) - assert.False(t, encoding.IsL1MessageSkipped(decodedBitmap, 10)) - assert.True(t, encoding.IsL1MessageSkipped(decodedBitmap, 11)) - assert.True(t, encoding.IsL1MessageSkipped(decodedBitmap, 36)) - assert.False(t, encoding.IsL1MessageSkipped(decodedBitmap, 37)) - assert.False(t, encoding.IsL1MessageSkipped(decodedBitmap, 38)) - assert.False(t, encoding.IsL1MessageSkipped(decodedBitmap, 39)) - assert.False(t, encoding.IsL1MessageSkipped(decodedBitmap, 40)) - assert.False(t, encoding.IsL1MessageSkipped(decodedBitmap, 41)) - - // Test case: many consecutive L1 Msgs in 1 bitmap, no leading skipped msgs. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block4}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(15189), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 37, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(60), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(171730), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "0000000000000000010000000000000005000000000000002ac62fb58ec2d5393e00960f1cc23cab883b685296efa03d13ea2dd4c6de79cc55b0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab00000000000000000000000000000000000000000000000000000000000000000", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "0000000000000000000000000000000000000000000000000000000000000000" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(42), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(5), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0x841f4657b7eb723cae35377cf2963b51191edad6a3b182d4c8524cb928d2a413"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: many consecutive L1 Msgs in 1 bitmap, with leading skipped msgs. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block4}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(15189), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(60), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(171810), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4b0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab00000000000000000000000000000000000000000000000000000001fffffffff", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "0000000000000000000000000000000000000000000000000000001fffffffff" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(42), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(42), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xa28766a3617cf244cc397fc4ce4c23022ec80f152b9f618807ac7e7c11486612"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: many sparse L1 Msgs in 1 bitmap. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block5}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(9947), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(60), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(166504), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4db0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab000000000000000000000000000000000000000000000000000000000000001dd", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "00000000000000000000000000000000000000000000000000000000000001dd" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(10), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(10), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0x2fee2073639eb9795007f7e765b3318f92658822de40b2134d34a478a0e9058a"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: many L1 Msgs in each of 2 bitmaps. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block6}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(7326), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(60), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(164388), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 153, len(batchBytes)) - assert.Equal(t, "00000000000000000100000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208b0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab0fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", batchHexString) - assert.Equal(t, 64, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(257), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(257), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0x84206bc6d0076a233fc7120a0bec4e03bf2512207437768828384dddb335ba2e"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) -} - -func TestErrorPaths(t *testing.T) { - // Test case: when the chunk is nil. - _, err := NewDAChunk(nil, 100) - assert.Error(t, err) - assert.Contains(t, err.Error(), "chunk is nil") - - // Test case: when the chunk contains no blocks. - chunk := &encoding.Chunk{ - Blocks: []*encoding.Block{}, - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "number of blocks is 0") - - // Test case: when the chunk contains more than 255 blocks. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{}, - } - for i := 0; i < 256; i++ { - chunk.Blocks = append(chunk.Blocks, &encoding.Block{}) - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "number of blocks exceeds 1 byte") - - // Test case: Header.Number is not a uint64. - block := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block.Header.Number = new(big.Int).Lsh(block.Header.Number, 64) - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block}, - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "block number is not uint64") - - // Test case: number of transactions exceeds max uint16. - block = readBlockFromJSON(t, "../testdata/blockTrace_02.json") - for i := 0; i < 65537; i++ { - block.Transactions = append(block.Transactions, block.Transactions[0]) - } - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block}, - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "number of transactions exceeds max uint16") - - // Test case: decode transaction with hex string without 0x prefix error. - block = readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block.Transactions = block.Transactions[:1] - block.Transactions[0].Data = "not-a-hex" - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block}, - } - _, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.Error(t, err) - assert.Contains(t, err.Error(), "hex string without 0x prefix") - _, err = EstimateChunkL1CommitGas(chunk) - assert.Error(t, err) - assert.Contains(t, err.Error(), "hex string without 0x prefix") - - // Test case: number of L1 messages exceeds max uint16. - block = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - for i := 0; i < 65535; i++ { - tx := &block.Transactions[i] - txCopy := *tx - txCopy.Nonce = uint64(i + 1) - block.Transactions = append(block.Transactions, txCopy) - } - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block}, - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "number of L1 messages exceeds max uint16") -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/codecv0_test.go b/encoding/codecv0_test.go new file mode 100644 index 0000000..32a29cf --- /dev/null +++ b/encoding/codecv0_test.go @@ -0,0 +1,736 @@ +package encoding + +import ( + "encoding/hex" + "testing" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCodecV0BlockEncode(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + block := &daBlockV0{} + encoded := hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + daBlock, err := codecv0.NewDABlock(block2, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + daBlock, err = codecv0.NewDABlock(block3, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + daBlock, err = codecv0.NewDABlock(block4, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + daBlock, err = codecv0.NewDABlock(block5, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + daBlock, err = codecv0.NewDABlock(block6, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + daBlock, err = codecv0.NewDABlock(block7, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} + +func TestCodecV0ChunkEncode(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + daChunkV0 := &daChunkV0{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + + encodedBytes, err := daChunkV0.Encode() + assert.NoError(t, err) + encoded := hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e81840002000000000073f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8b00000073f87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e5000100000000163102f9162d82cf5502843b9b0a17843b9b0a17831197e28080b915d260806040523480156200001157600080fd5b50604051620014b2380380620014b2833981810160405260a08110156200003757600080fd5b815160208301516040808501805191519395929483019291846401000000008211156200006357600080fd5b9083019060208201858111156200007957600080fd5b82516401000000008111828201881017156200009457600080fd5b82525081516020918201929091019080838360005b83811015620000c3578181015183820152602001620000a9565b50505050905090810190601f168015620000f15780820380516001836020036101000a031916815260200191505b50604052602001805160405193929190846401000000008211156200011557600080fd5b9083019060208201858111156200012b57600080fd5b82516401000000008111828201881017156200014657600080fd5b82525081516020918201929091019080838360005b83811015620001755781810151838201526020016200015b565b50505050905090810190601f168015620001a35780820380516001836020036101000a031916815260200191505b5060405260209081015185519093508592508491620001c8916003918501906200026b565b508051620001de9060049060208401906200026b565b50506005805461ff001960ff1990911660121716905550600680546001600160a01b038088166001600160a01b0319928316179092556007805492871692909116919091179055620002308162000255565b50506005805462010000600160b01b0319163362010000021790555062000307915050565b6005805460ff191660ff92909216919091179055565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f10620002ae57805160ff1916838001178555620002de565b82800160010185558215620002de579182015b82811115620002de578251825591602001919060010190620002c1565b50620002ec929150620002f0565b5090565b5b80821115620002ec5760008155600101620002f1565b61119b80620003176000396000f3fe608060405234801561001057600080fd5b506004361061010b5760003560e01c80635c975abb116100a257806395d89b411161007157806395d89b41146103015780639dc29fac14610309578063a457c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d5761010b565b80635c975abb1461029d57806370a08231146102a55780638456cb59146102cb5780638e50817a146102d35761010b565b8063313ce567116100de578063313ce5671461021d578063395093511461023b5780633f4ba83a1461026757806340c10f19146102715761010b565b806306fdde0314610110578063095ea7b31461018d57806318160ddd146101cd57806323b872dd146101e7575b600080fd5b6101186103bb565b6040805160208082528351818301528351919283929083019185019080838360005b8381101561015257818101518382015260200161013a565b50505050905090810190601f16801561017f5780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b6101b9600480360360408110156101a357600080fd5b506001600160a01b038135169060200135610451565b604080519115158252519081900360200190f35b6101d561046e565b60408051918252519081900360200190f35b6101b9600480360360608110156101fd57600080fd5b506001600160a01b03813581169160208101359091169060400135610474565b6102256104fb565b6040805160ff9092168252519081900360200190f35b6101b96004803603604081101561025157600080fd5b506001600160a01b038135169060200135610504565b61026f610552565b005b61026f6004803603604081101561028757600080fd5b506001600160a01b0381351690602001356105a9565b6101b9610654565b6101d5600480360360208110156102bb57600080fd5b50356001600160a01b0316610662565b61026f61067d565b61026f600480360360408110156102e957600080fd5b506001600160a01b03813581169160200135166106d2565b610118610757565b61026f6004803603604081101561031f57600080fd5b506001600160a01b0381351690602001356107b8565b6101b96004803603604081101561034b57600080fd5b506001600160a01b03813516906020013561085f565b6101b96004803603604081101561037757600080fd5b506001600160a01b0381351690602001356108c7565b6101d5600480360360408110156103a357600080fd5b506001600160a01b03813581169160200135166108db565b60038054604080516020601f60026000196101006001881615020190951694909404938401819004810282018101909252828152606093909290918301828280156104475780601f1061041c57610100808354040283529160200191610447565b820191906000526020600020905b81548152906001019060200180831161042a57829003601f168201915b5050505050905090565b600061046561045e610906565b848461090a565b50600192915050565b60025490565b60006104818484846109f6565b6104f18461048d610906565b6104ec85604051806060016040528060288152602001611085602891396001600160a01b038a166000908152600160205260408120906104cb610906565b6001600160a01b031681526020810191909152604001600020549190610b51565b61090a565b5060019392505050565b60055460ff1690565b6000610465610511610906565b846104ec8560016000610522610906565b6001600160a01b03908116825260208083019390935260409182016000908120918c168152925290205490610be8565b6007546001600160a01b0316331461059f576040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a7610c49565b565b600554610100900460ff16156105f9576040805162461bcd60e51b815260206004820152601060248201526f14185d5cd8589b194e881c185d5cd95960821b604482015290519081900360640190fd5b6006546001600160a01b03163314610646576040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b604482015290519081900360640190fd5b6106508282610ced565b5050565b600554610100900460ff1690565b6001600160a01b031660009081526020819052604090205490565b6007546001600160a01b031633146106ca576040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a7610ddd565b6005546201000090046001600160a01b03163314610726576040805162461bcd60e51b815260206004820152600c60248201526b6f6e6c7920466163746f727960a01b604482015290519081900360640190fd5b600780546001600160a01b039283166001600160a01b03199182161790915560068054939092169216919091179055565b60048054604080516020601f60026000196101006001881615020190951694909404938401819004810282018101909252828152606093909290918301828280156104475780601f1061041c57610100808354040283529160200191610447565b600554610100900460ff1615610808576040805162461bcd60e51b815260206004820152601060248201526f14185d5cd8589b194e881c185d5cd95960821b604482015290519081900360640190fd5b6006546001600160a01b03163314610855576040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b604482015290519081900360640190fd5b6106508282610e65565b600061046561086c610906565b846104ec856040518060600160405280602581526020016111176025913960016000610896610906565b6001600160a01b03908116825260208083019390935260409182016000908120918d16815292529020549190610b51565b60006104656108d4610906565b84846109f6565b6001600160a01b03918216600090815260016020908152604080832093909416825291909152205490565b3390565b6001600160a01b03831661094f5760405162461bcd60e51b81526004018080602001828103825260248152602001806110f36024913960400191505060405180910390fd5b6001600160a01b0382166109945760405162461bcd60e51b815260040180806020018281038252602281526020018061103d6022913960400191505060405180910390fd5b6001600160a01b03808416600081815260016020908152604080832094871680845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9259281900390910190a3505050565b6001600160a01b038316610a3b5760405162461bcd60e51b81526004018080602001828103825260258152602001806110ce6025913960400191505060405180910390fd5b6001600160a01b038216610a805760405162461bcd60e51b8152600401808060200182810382526023815260200180610ff86023913960400191505060405180910390fd5b610a8b838383610f61565b610ac88160405180606001604052806026815260200161105f602691396001600160a01b0386166000908152602081905260409020549190610b51565b6001600160a01b038085166000908152602081905260408082209390935590841681522054610af79082610be8565b6001600160a01b038084166000818152602081815260409182902094909455805185815290519193928716927fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef92918290030190a3505050565b60008184841115610be05760405162461bcd60e51b81526004018080602001828103825283818151815260200191508051906020019080838360005b83811015610ba5578181015183820152602001610b8d565b50505050905090810190601f168015610bd25780820380516001836020036101000a031916815260200191505b509250505060405180910390fd5b505050900390565b600082820183811015610c42576040805162461bcd60e51b815260206004820152601b60248201527f536166654d6174683a206164646974696f6e206f766572666c6f770000000000604482015290519081900360640190fd5b9392505050565b600554610100900460ff16610c9c576040805162461bcd60e51b815260206004820152601460248201527314185d5cd8589b194e881b9bdd081c185d5cd95960621b604482015290519081900360640190fd5b6005805461ff00191690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0610906565b604080516001600160a01b039092168252519081900360200190a1565b6001600160a01b038216610d48576040805162461bcd60e51b815260206004820152601f60248201527f45524332303a206d696e7420746f20746865207a65726f206164647265737300604482015290519081900360640190fd5b610d5460008383610f61565b600254610d619082610be8565b6002556001600160a01b038216600090815260208190526040902054610d879082610be8565b6001600160a01b0383166000818152602081815260408083209490945583518581529351929391927fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9281900390910190a35050565b600554610100900460ff1615610e2d576040805162461bcd60e51b815260206004820152601060248201526f14185d5cd8589b194e881c185d5cd95960821b604482015290519081900360640190fd5b6005805461ff0019166101001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc544b05a258610cd0610906565b6001600160a01b038216610eaa5760405162461bcd60e51b81526004018080602001828103825260218152602001806110ad6021913960400191505060405180910390fd5b610eb682600083610f61565b610ef38160405180606001604052806022815260200161101b602291396001600160a01b0385166000908152602081905260409020549190610b51565b6001600160a01b038316600090815260208190526040902055600254610f199082610fb5565b6002556040805182815290516000916001600160a01b038516917fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9181900360200190a35050565b610f6c838383610fb0565b610f74610654565b15610fb05760405162461bcd60e51b815260040180806020018281038252602a81526020018061113c602a913960400191505060405180910390fd5b505050565b6000610c4283836040518060400160405280601e81526020017f536166654d6174683a207375627472616374696f6e206f766572666c6f770000815250610b5156fe45524332303a207472616e7366657220746f20746865207a65726f206164647265737345524332303a206275726e20616d6f756e7420657863656564732062616c616e636545524332303a20617070726f766520746f20746865207a65726f206164647265737345524332303a207472616e7366657220616d6f756e7420657863656564732062616c616e636545524332303a207472616e7366657220616d6f756e74206578636565647320616c6c6f77616e636545524332303a206275726e2066726f6d20746865207a65726f206164647265737345524332303a207472616e736665722066726f6d20746865207a65726f206164647265737345524332303a20617070726f76652066726f6d20746865207a65726f206164647265737345524332303a2064656372656173656420616c6c6f77616e63652062656c6f77207a65726f45524332305061757361626c653a20746f6b656e207472616e73666572207768696c6520706175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00330000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b630000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b6300000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000095745544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000045745544800000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda52095d44b8a9af7", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b00000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e1058080808080", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} + +func TestCodecV0ChunkHash(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + chunk := &daChunkV0{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + hash, err := chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) + + // invalid hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + _, err = chunk.Hash() + assert.Error(t, err) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xde642c68122634b33fa1e6e4243b17be3bfd0dc6f996f204ef6d7522516bd840", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xde29f4371cc396b2e7c536cdc7a7c20ac5c728cbb8af3247074c746ff452632b", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x9e643c8a9203df542e39d9bfdcb07c99575b3c3d557791329fef9d83cc4147d0", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv0.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) +} + +func TestCodecV0BatchEncode(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + // empty batch + emptyDABatchV0 := daBatchV0{version: CodecV0} + encoded := hex.EncodeToString(emptyDABatchV0.Encode()) + assert.Equal(t, "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv0.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000008fbc5eecfefc5bd9d1618ecef1fed160a7838448383595a2257d4c9bd5c5fa3e0000000000000000000000000000000000000000000000000000000000000000", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "0000000000000000000000000000000000000000000000000019d1fad630fcc61bd49949fa01e58d198f67a58f1c4aea43f32714ceaa9e0e760000000000000000000000000000000000000000000000000000000000000000", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "000000000000000000000000000000000b000000000000000b34f419ce7e882295bdb5aec6cce56ffa788a5fed4744d7fbd77e4acbf409f1ca000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "000000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "000000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "00000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d52080000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) + + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "000000000000000000000000000000002a000000000000002a908c20b6255fd8cd8fb3a7995e9980007ebedcfe359cee2d8e899aefe319836e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "000000000000000000000000000000002a000000000000002a1f9b3d942a6ee14e7afc52225c91fa44faa0a7ec511df9a2d9348d33bcd142fc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) +} + +func TestCodecV0BatchHash(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + // empty batch + daBatchV1 := &daBatchV1{ + daBatchV0: daBatchV0{ + version: CodecV0, + }, + } + assert.Equal(t, common.HexToHash("0x7f74e58579672e582998264e7e8191c51b6b8981afd0f9bf1a2ffc3abb39e678"), daBatchV1.Hash()) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x4605465b7470c8565b123330d7186805caf9a7f2656d8e9e744b62e14ca22c3d"), daBatch.Hash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x922e004553d563bde6560a827c6449200bfd84f92917dfa14d740f26e52c59bc"), daBatch.Hash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xfbb081f25d6d06aefd76f062eee50885faf5bb050c8f31d533fc8560e655b690"), daBatch.Hash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x99f9648e4d090f1222280bec95a3f1e39c6cbcd4bff21eb2ae94b1536bb23acc"), daBatch.Hash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xe0950d500d47df4e9c443978682bcccfc8d50983f99ec9232067333a7d32a9d2"), daBatch.Hash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x745a74773cdc7cd0b86b50305f6373c7efeaf051b38a71ea561333708e8a90d9"), daBatch.Hash()) + + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x85b5c152c5c0b25731bfab6f4d309e94a42ddf0f4c9235189e5cd19c5c008522"), daBatch.Hash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc5e787fa6a83374135c3b95bd8325bcc0440cd5eb2d71bb31ddca67dd2d44f64"), daBatch.Hash()) +} + +func TestCodecV0NewDABatchFromBytes(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + testCases := []struct { + name string + jsonFile string + }{ + {"Empty Batch", ""}, + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var batch *Batch + var daBatch DABatch + var createErr1 error + + if tc.jsonFile == "" { + // Empty daBatch + daBatch = &daBatchV0{version: CodecV0} + } else { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + batch = &Batch{Chunks: []*Chunk{chunk}} + daBatch, createErr1 = codecv0.NewDABatch(batch) + assert.NoError(t, createErr1) + } + + // Encode the DABatch + encodedBytes := daBatch.Encode() + + // Decode the bytes back into a DABatch + decodedDABatch, createErr2 := codecv0.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, createErr2) + + // Compare the hashes of the original and decoded DABatch + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) + }) + } + + // Test with multiple blocks and chunks in a batch + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + chunk2 := &Chunk{Blocks: []*Block{block4, block5}} + batch := &Batch{Chunks: []*Chunk{chunk1, chunk2}} + daBatch, err := codecv0.NewDABatch(batch) + assert.NoError(t, err) + + encodedBytes := daBatch.Encode() + decodedDABatch, err := codecv0.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash()) +} + +func TestCodecV0BatchDataHash(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x8fbc5eecfefc5bd9d1618ecef1fed160a7838448383595a2257d4c9bd5c5fa3e"), daBatch.DataHash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x19d1fad630fcc61bd49949fa01e58d198f67a58f1c4aea43f32714ceaa9e0e76"), daBatch.DataHash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x34f419ce7e882295bdb5aec6cce56ffa788a5fed4744d7fbd77e4acbf409f1ca"), daBatch.DataHash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) + + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x908c20b6255fd8cd8fb3a7995e9980007ebedcfe359cee2d8e899aefe319836e"), daBatch.DataHash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x1f9b3d942a6ee14e7afc52225c91fa44faa0a7ec511df9a2d9348d33bcd142fc"), daBatch.DataHash()) +} + +func TestCodecV0CalldataSizeEstimation(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2CalldataSize, err := codecv0.EstimateChunkL1CommitCalldataSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(298), chunk2CalldataSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2CalldataSize, err := codecv0.EstimateBatchL1CommitCalldataSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(298), batch2CalldataSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3CalldataSize, err := codecv0.EstimateChunkL1CommitCalldataSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(5745), chunk3CalldataSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3CalldataSize, err := codecv0.EstimateBatchL1CommitCalldataSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(5745), batch3CalldataSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4CalldataSize, err := codecv0.EstimateChunkL1CommitCalldataSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(96), chunk4CalldataSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4CalldataSize, err := codecv0.EstimateBatchL1CommitCalldataSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(96), batch4CalldataSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5CalldataSize, err := codecv0.EstimateChunkL1CommitCalldataSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(6043), chunk5CalldataSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6CalldataSize, err := codecv0.EstimateChunkL1CommitCalldataSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(96), chunk6CalldataSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5CalldataSize, err := codecv0.EstimateBatchL1CommitCalldataSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(6139), batch5CalldataSize) +} + +func TestCodecV0CommitGasEstimation(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + assert.NoError(t, err) + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2Gas, err := codecv0.EstimateChunkL1CommitGas(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(5082), chunk2Gas) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2Gas, err := codecv0.EstimateBatchL1CommitGas(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(161631), batch2Gas) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3Gas, err := codecv0.EstimateChunkL1CommitGas(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(93786), chunk3Gas) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3Gas, err := codecv0.EstimateBatchL1CommitGas(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(250908), batch3Gas) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4Gas, err := codecv0.EstimateChunkL1CommitGas(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(4369), chunk4Gas) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4Gas, err := codecv0.EstimateBatchL1CommitGas(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(160929), batch4Gas) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5Gas, err := codecv0.EstimateChunkL1CommitGas(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(98822), chunk5Gas) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6Gas, err := codecv0.EstimateChunkL1CommitGas(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(4369), chunk6Gas) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5Gas, err := codecv0.EstimateBatchL1CommitGas(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(260958), batch5Gas) +} + +func TestCodecV0BatchL1MessagePopped(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV0).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV0).totalL1MessagePopped) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV0).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV0).totalL1MessagePopped) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(11), daBatch.(*daBatchV0).l1MessagePopped) + assert.Equal(t, uint64(11), daBatch.(*daBatchV0).totalL1MessagePopped) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV0).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) + + batch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(5), daBatch.(*daBatchV0).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(10), daBatch.(*daBatchV0).l1MessagePopped) // skip 7, include 3 + assert.Equal(t, uint64(10), daBatch.(*daBatchV0).totalL1MessagePopped) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(257), daBatch.(*daBatchV0).l1MessagePopped) // skip 255, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV0).totalL1MessagePopped) + + batch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(256), daBatch.(*daBatchV0).l1MessagePopped) // skip 254, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV0).totalL1MessagePopped) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV0).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) + + batch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv0.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(32), daBatch.(*daBatchV0).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV0).totalL1MessagePopped) +} + +func TestCodecV0DecodeDAChunksRawTx(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk0 := &Chunk{Blocks: []*Block{block0, block1}} + daChunk0, err := codecv0.NewDAChunk(chunk0, 0) + assert.NoError(t, err) + chunkBytes0, err := daChunk0.Encode() + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + daChunk1, err := codecv0.NewDAChunk(chunk1, 0) + assert.NoError(t, err) + chunkBytes1, err := daChunk1.Encode() + assert.NoError(t, err) + + daChunksRawTx, err := codecv0.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) + assert.NoError(t, err) + // assert number of chunks + assert.Equal(t, 2, len(daChunksRawTx)) + + // assert block in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Blocks)) + assert.Equal(t, daChunk0.(*daChunkV0).blocks[0], daChunksRawTx[0].Blocks[0]) + assert.Equal(t, daChunk0.(*daChunkV0).blocks[1], daChunksRawTx[0].Blocks[1]) + + // assert block in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Blocks)) + daChunksRawTx[1].Blocks[0].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV0).blocks[0].(*daBlockV0), daChunksRawTx[1].Blocks[0]) + daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV0).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) + + // assert transactions in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) + assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) + + assert.EqualValues(t, daChunk0.(*daChunkV0).transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) + assert.EqualValues(t, daChunk0.(*daChunkV0).transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) + + // assert transactions in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) + assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) +} + +func TestDACodecV0SimpleMethods(t *testing.T) { + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + t.Run("Version", func(t *testing.T) { + version := codecv0.Version() + assert.Equal(t, CodecV0, version) + }) + + t.Run("CheckChunkCompressedDataCompatibility", func(t *testing.T) { + chunk := &Chunk{} + compatible, err := codecv0.CheckChunkCompressedDataCompatibility(chunk) + assert.NoError(t, err) + assert.True(t, compatible) + }) + + t.Run("CheckBatchCompressedDataCompatibility", func(t *testing.T) { + batch := &Batch{} + compatible, err := codecv0.CheckBatchCompressedDataCompatibility(batch) + assert.NoError(t, err) + assert.True(t, compatible) + }) + + t.Run("EstimateChunkL1CommitBatchSizeAndBlobSize", func(t *testing.T) { + chunk := &Chunk{} + batchSize, blobSize, err := codecv0.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk) + assert.NoError(t, err) + assert.Equal(t, uint64(0), batchSize) + assert.Equal(t, uint64(0), blobSize) + }) + + t.Run("EstimateBatchL1CommitBatchSizeAndBlobSize", func(t *testing.T) { + batch := &Batch{} + batchSize, blobSize, err := codecv0.EstimateBatchL1CommitBatchSizeAndBlobSize(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), batchSize) + assert.Equal(t, uint64(0), blobSize) + }) + + t.Run("JSONFromBytes", func(t *testing.T) { + data := []byte("test data") + json, err := codecv0.JSONFromBytes(data) + assert.NoError(t, err) + assert.Nil(t, json) + }) + + t.Run("DecodeTxsFromBlob", func(t *testing.T) { + blob := &kzg4844.Blob{} + chunks := []*DAChunkRawTx{} + err := codecv0.DecodeTxsFromBlob(blob, chunks) + assert.NoError(t, err) + }) +} diff --git a/encoding/codecv0_types.go b/encoding/codecv0_types.go new file mode 100644 index 0000000..0e6c958 --- /dev/null +++ b/encoding/codecv0_types.go @@ -0,0 +1,285 @@ +package encoding + +import ( + "encoding/binary" + "errors" + "fmt" + "math" + "math/big" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +const ( + numberOffset = 0 + timestampOffset = numberOffset + 8 + baseFeeOffset = timestampOffset + 8 + gasLimitOffset = baseFeeOffset + 32 + numTransactionsOffset = gasLimitOffset + 8 + numL1MessagesOffset = numTransactionsOffset + 2 +) + +// daBlockV0 represents a Data Availability Block. +type daBlockV0 struct { + number uint64 + timestamp uint64 + baseFee *big.Int + gasLimit uint64 + numTransactions uint16 + numL1Messages uint16 +} + +// newDABlockV0 is a constructor function for daBlockV0 that initializes the internal fields. +func newDABlockV0(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *daBlockV0 { + return &daBlockV0{ + number: number, + timestamp: timestamp, + baseFee: baseFee, + gasLimit: gasLimit, + numTransactions: numTransactions, + numL1Messages: numL1Messages, + } +} + +// Encode serializes the DABlock into a slice of bytes. +func (b *daBlockV0) Encode() []byte { + bytes := make([]byte, blockContextByteSize) + binary.BigEndian.PutUint64(bytes[numberOffset:timestampOffset], b.number) + binary.BigEndian.PutUint64(bytes[timestampOffset:baseFeeOffset], b.timestamp) + if b.baseFee != nil { + b.baseFee.FillBytes(bytes[baseFeeOffset:gasLimitOffset]) + } + binary.BigEndian.PutUint64(bytes[gasLimitOffset:numTransactionsOffset], b.gasLimit) + binary.BigEndian.PutUint16(bytes[numTransactionsOffset:numL1MessagesOffset], b.numTransactions) + binary.BigEndian.PutUint16(bytes[numL1MessagesOffset:], b.numL1Messages) + return bytes +} + +// Decode populates the fields of a DABlock from a byte slice. +func (b *daBlockV0) Decode(bytes []byte) error { + if len(bytes) != blockContextByteSize { + return errors.New("block encoding is not blockContextByteSize bytes long") + } + + b.number = binary.BigEndian.Uint64(bytes[numberOffset:timestampOffset]) + b.timestamp = binary.BigEndian.Uint64(bytes[timestampOffset:baseFeeOffset]) + b.baseFee = new(big.Int).SetBytes(bytes[baseFeeOffset:gasLimitOffset]) + b.gasLimit = binary.BigEndian.Uint64(bytes[gasLimitOffset:numTransactionsOffset]) + b.numTransactions = binary.BigEndian.Uint16(bytes[numTransactionsOffset:numL1MessagesOffset]) + b.numL1Messages = binary.BigEndian.Uint16(bytes[numL1MessagesOffset:]) + + return nil +} + +// Number returns the block number. +func (b *daBlockV0) Number() uint64 { + return b.number +} + +// Timestamp returns the block timestamp. +func (b *daBlockV0) Timestamp() uint64 { + return b.timestamp +} + +// BaseFee returns the block base fee. +func (b *daBlockV0) BaseFee() *big.Int { + return b.baseFee +} + +// GasLimit returns the block gas limit. +func (b *daBlockV0) GasLimit() uint64 { + return b.gasLimit +} + +// NumTransactions returns the number of transactions in the block. +func (b *daBlockV0) NumTransactions() uint16 { + return b.numTransactions +} + +// NumL1Messages returns the number of L1 messages in the block. +func (b *daBlockV0) NumL1Messages() uint16 { + return b.numL1Messages +} + +// DAChunkRawTx groups consecutive DABlocks with their L2 transactions, L1 msgs are loaded in another place. +type DAChunkRawTx struct { + Blocks []DABlock + Transactions []types.Transactions +} + +// daChunkV0 groups consecutive DABlocks with their transactions. +type daChunkV0 struct { + blocks []DABlock + transactions [][]*types.TransactionData +} + +// Encode serializes the DAChunk into a slice of bytes. +func (c *daChunkV0) Encode() ([]byte, error) { + if len(c.blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(c.blocks) > math.MaxUint8 { + return nil, fmt.Errorf("number of blocks (%d) exceeds maximum allowed (%d)", len(c.blocks), math.MaxUint8) + } + + var chunkBytes []byte + chunkBytes = append(chunkBytes, byte(len(c.blocks))) + + var l2TxDataBytes []byte + + for _, block := range c.blocks { + chunkBytes = append(chunkBytes, block.Encode()...) + } + + for _, blockTxs := range c.transactions { + for _, txData := range blockTxs { + if txData.Type == types.L1MessageTxType { + continue + } + + var txLen [4]byte + rlpTxData, err := convertTxDataToRLPEncoding(txData) + if err != nil { + return nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) + } + binary.BigEndian.PutUint32(txLen[:], uint32(len(rlpTxData))) + l2TxDataBytes = append(l2TxDataBytes, txLen[:]...) + l2TxDataBytes = append(l2TxDataBytes, rlpTxData...) + } + } + + chunkBytes = append(chunkBytes, l2TxDataBytes...) + return chunkBytes, nil +} + +// Hash computes the hash of the DAChunk data. +func (c *daChunkV0) Hash() (common.Hash, error) { + chunkBytes, err := c.Encode() + if err != nil { + return common.Hash{}, fmt.Errorf("failed to encode DAChunk: %w", err) + } + + if len(chunkBytes) == 0 { + return common.Hash{}, errors.New("chunk data is empty and cannot be processed") + } + numBlocks := chunkBytes[0] + + // concatenate block contexts + var dataBytes []byte + for i := 0; i < int(numBlocks); i++ { + start := 1 + blockContextByteSize*i + end := start + blockContextBytesForHashing + if end > len(chunkBytes) { + return common.Hash{}, fmt.Errorf("unexpected end index: %d, chunkBytes length: %d", end, len(chunkBytes)) + } + dataBytes = append(dataBytes, chunkBytes[start:end]...) + } + + // concatenate l1 and l2 tx hashes + for _, blockTxs := range c.transactions { + var l1TxHashes []byte + var l2TxHashes []byte + for _, txData := range blockTxs { + hashBytes := common.FromHex(txData.TxHash) + if len(hashBytes) != common.HashLength { + return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) + } + if txData.Type == types.L1MessageTxType { + l1TxHashes = append(l1TxHashes, hashBytes...) + } else { + l2TxHashes = append(l2TxHashes, hashBytes...) + } + } + dataBytes = append(dataBytes, l1TxHashes...) + dataBytes = append(dataBytes, l2TxHashes...) + } + + hash := crypto.Keccak256Hash(dataBytes) + return hash, nil +} + +// BlockRange returns the block range of the DAChunk. +func (c *daChunkV0) BlockRange() (uint64, uint64, error) { + if len(c.blocks) == 0 { + return 0, 0, errors.New("number of blocks is 0") + } + + return c.blocks[0].Number(), c.blocks[len(c.blocks)-1].Number(), nil +} + +// daBatchV0 contains metadata about a batch of DAChunks. +type daBatchV0 struct { + version CodecVersion + batchIndex uint64 + l1MessagePopped uint64 + totalL1MessagePopped uint64 + dataHash common.Hash + parentBatchHash common.Hash + skippedL1MessageBitmap []byte +} + +// newDABatchV0 is a constructor for daBatchV0. +func newDABatchV0(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash common.Hash, skippedL1MessageBitmap []byte) *daBatchV0 { + return &daBatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + } +} + +// Encode serializes the DABatchV0 into bytes. +func (b *daBatchV0) Encode() []byte { + batchBytes := make([]byte, daBatchV0EncodedMinLength+len(b.skippedL1MessageBitmap)) + batchBytes[daBatchOffsetVersion] = byte(b.version) + binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV0OffsetL1MessagePopped], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[daBatchV0OffsetL1MessagePopped:daBatchV0OffsetTotalL1MessagePopped], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[daBatchV0OffsetTotalL1MessagePopped:daBatchOffsetDataHash], b.totalL1MessagePopped) + copy(batchBytes[daBatchOffsetDataHash:daBatchV0OffsetParentBatchHash], b.dataHash[:]) + copy(batchBytes[daBatchV0OffsetParentBatchHash:daBatchV0OffsetSkippedL1MessageBitmap], b.parentBatchHash[:]) + copy(batchBytes[daBatchV0OffsetSkippedL1MessageBitmap:], b.skippedL1MessageBitmap[:]) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *daBatchV0) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// Blob returns the blob of the batch. +func (b *daBatchV0) Blob() *kzg4844.Blob { + return nil +} + +// BlobBytes returns the blob bytes of the batch. +func (b *daBatchV0) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *daBatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { + return nil, nil +} + +// Version returns the version of the DABatch. +func (b *daBatchV0) Version() CodecVersion { + return b.version +} + +// SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. +func (b *daBatchV0) SkippedL1MessageBitmap() []byte { + return b.skippedL1MessageBitmap +} + +// DataHash returns the data hash of the DABatch. +func (b *daBatchV0) DataHash() common.Hash { + return b.dataHash +} diff --git a/encoding/codecv1.go b/encoding/codecv1.go new file mode 100644 index 0000000..b5d1120 --- /dev/null +++ b/encoding/codecv1.go @@ -0,0 +1,458 @@ +package encoding + +import ( + "crypto/sha256" + "encoding/binary" + "errors" + "fmt" + "math" + "math/big" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +type DACodecV1 struct { + DACodecV0 +} + +// Version returns the codec version. +func (d *DACodecV1) Version() CodecVersion { + return CodecV1 +} + +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (d *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + if chunk == nil { + return nil, errors.New("chunk is nil") + } + + if len(chunk.Blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(chunk.Blocks) > math.MaxUint8 { + return nil, fmt.Errorf("number of blocks (%d) exceeds maximum allowed (%d)", len(chunk.Blocks), math.MaxUint8) + } + + blocks := make([]DABlock, 0, len(chunk.Blocks)) + txs := make([][]*types.TransactionData, 0, len(chunk.Blocks)) + + for _, block := range chunk.Blocks { + b, err := d.NewDABlock(block, totalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + blocks = append(blocks, b) + totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) + txs = append(txs, block.Transactions) + } + + daChunk := newDAChunkV1( + blocks, // blocks + txs, // transactions + ) + + return daChunk, nil +} + +// DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. +// Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata +func (d *DACodecV1) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) { + chunks := make([]*DAChunkRawTx, 0, len(chunkBytes)) + for _, chunk := range chunkBytes { + if len(chunk) < 1 { + return nil, fmt.Errorf("invalid chunk, length is less than 1") + } + + numBlocks := int(chunk[0]) + if len(chunk) < 1+numBlocks*blockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*blockContextByteSize) + } + + blocks := make([]DABlock, numBlocks) + for i := 0; i < numBlocks; i++ { + startIdx := 1 + i*blockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + blockContextByteSize + blocks[i] = &daBlockV0{} + err := blocks[i].Decode(chunk[startIdx:endIdx]) + if err != nil { + return nil, err + } + } + + chunks = append(chunks, &DAChunkRawTx{ + Blocks: blocks, + Transactions: nil, // Transactions field is still empty in the phase of DecodeDAChunksRawTx, because txs moved to blobs and filled in DecodeTxsFromBlob method. + }) + } + return chunks, nil +} + +// DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks +func (d *DACodecV1) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { + batchBytes := bytesFromBlobCanonical(blob) + return decodeTxsFromBytes(batchBytes[:], chunks, d.MaxNumChunksPerBatch()) +} + +// NewDABatch creates a DABatch from the provided Batch. +func (d *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { + // this encoding can only support a fixed number of chunks per batch + if len(batch.Chunks) > d.MaxNumChunksPerBatch() { + return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) + } + + if len(batch.Chunks) == 0 { + return nil, errors.New("batch must contain at least one chunk") + } + + // batch data hash + dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, fmt.Errorf("failed to compute batch data hash, index: %d, err: %w", batch.Index, err) + } + + // skipped L1 messages bitmap + skippedL1MessageBitmap, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, fmt.Errorf("failed to construct skipped bitmap, index: %d, err: %w", batch.Index, err) + } + + // blob payload + blob, blobVersionedHash, z, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch()) + if err != nil { + return nil, fmt.Errorf("failed to construct blob payload, index: %d, err: %w", batch.Index, err) + } + + if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { + return nil, fmt.Errorf("batch index: %d, totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", batch.Index, totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + } + l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore + + daBatch := newDABatchV1( + CodecV1, // version + batch.Index, // batchIndex + l1MessagePopped, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + dataHash, // dataHash + blobVersionedHash, // blobVersionedHash + batch.ParentBatchHash, // parentBatchHash + skippedL1MessageBitmap, // skippedL1MessageBitmap + blob, // blob + z, // z + ) + + return daBatch, nil +} + +// constructBlobPayload constructs the 4844 blob payload. +func (d *DACodecV1) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + maxNumChunksPerBatch*4 + + // the raw (un-padded) blob payload + blobBytes := make([]byte, metadataLength) + + // challenge digest preimage + // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash + challengePreimage := make([]byte, (1+maxNumChunksPerBatch+1)*common.HashLength) + + // the chunk data hash used for calculating the challenge preimage + var chunkDataHash common.Hash + + // blob metadata: num_chunks + binary.BigEndian.PutUint16(blobBytes[0:], uint16(len(chunks))) + + // encode blob metadata and L2 transactions, + // and simultaneously also build challenge preimage + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(blobBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into blob payload + rlpTxData, err := convertTxDataToRLPEncoding(tx) + if err != nil { + return nil, common.Hash{}, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) + } + blobBytes = append(blobBytes, rlpTxData...) + } + } + + // blob metadata: chunki_size + chunkSize := len(blobBytes) - currentChunkStartIndex + binary.BigEndian.PutUint32(blobBytes[2+4*chunkID:], uint32(chunkSize)) + + // challenge: compute chunk data hash + chunkDataHash = crypto.Keccak256Hash(blobBytes[currentChunkStartIndex:]) + copy(challengePreimage[common.HashLength+chunkID*common.HashLength:], chunkDataHash[:]) + } + + // if we have fewer than maxNumChunksPerBatch chunks, the rest + // of the blob metadata is correctly initialized to 0, + // but we need to add padding to the challenge preimage + for chunkID := len(chunks); chunkID < maxNumChunksPerBatch; chunkID++ { + // use the last chunk's data hash as padding + copy(challengePreimage[common.HashLength+chunkID*common.HashLength:], chunkDataHash[:]) + } + + // challenge: compute metadata hash + hash := crypto.Keccak256Hash(blobBytes[0:metadataLength]) + copy(challengePreimage[0:], hash[:]) + + // convert raw data to BLSFieldElements + blob, err := makeBlobCanonical(blobBytes) + if err != nil { + return nil, common.Hash{}, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) + } + + // compute blob versioned hash + c, err := kzg4844.BlobToCommitment(blob) + if err != nil { + return nil, common.Hash{}, nil, fmt.Errorf("failed to create blob commitment: %w", err) + } + blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) + + // challenge: append blob versioned hash + copy(challengePreimage[(1+maxNumChunksPerBatch)*common.HashLength:], blobVersionedHash[:]) + + // compute z = challenge_digest % BLS_MODULUS + challengeDigest := crypto.Keccak256Hash(challengePreimage) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), blsModulus) + pointBytes := pointBigInt.Bytes() + + // the challenge point z + var z kzg4844.Point + if len(pointBytes) > kzgPointByteSize { + return nil, common.Hash{}, nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes)) + } + start := kzgPointByteSize - len(pointBytes) + copy(z[start:], pointBytes) + + return blob, blobVersionedHash, &z, nil +} + +// NewDABatchFromBytes decodes the given byte slice into a DABatch. +// Note: This function only populates the batch header, it leaves the blob-related fields empty. +func (d *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { + if len(data) < daBatchV1EncodedMinLength { + return nil, fmt.Errorf("insufficient data for DABatch, expected at least %d bytes but got %d", daBatchV1EncodedMinLength, len(data)) + } + + if CodecVersion(data[daBatchOffsetVersion]) != CodecV1 { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV1, data[daBatchOffsetVersion]) + } + + return newDABatchV1( + CodecVersion(data[daBatchOffsetVersion]), // version + binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV1OffsetL1MessagePopped]), // batchIndex + binary.BigEndian.Uint64(data[daBatchV1OffsetL1MessagePopped:daBatchV1OffsetTotalL1MessagePopped]), // l1MessagePopped + binary.BigEndian.Uint64(data[daBatchV1OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped + common.BytesToHash(data[daBatchOffsetDataHash:daBatchV1OffsetBlobVersionedHash]), // dataHash + common.BytesToHash(data[daBatchV1OffsetBlobVersionedHash:daBatchV1OffsetParentBatchHash]), // blobVersionedHash + common.BytesToHash(data[daBatchV1OffsetParentBatchHash:daBatchV1OffsetSkippedL1MessageBitmap]), // parentBatchHash + data[daBatchV1OffsetSkippedL1MessageBitmap:], // skippedL1MessageBitmap + nil, // blob + nil, // z + ), nil +} + +func (d *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { + var dataSize uint64 + for _, block := range c.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + rlpTxData, err := convertTxDataToRLPEncoding(tx) + if err != nil { + return 0, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) + } + dataSize += uint64(len(rlpTxData)) + } + } + return dataSize, nil +} + +// EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. +func (d *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { + var total uint64 + var numL1Messages uint64 + for _, txData := range b.Transactions { + if txData.Type == types.L1MessageTxType { + numL1Messages++ + continue + } + } + + total += calldataNonZeroByteGas * blockContextByteSize + + // sload + total += coldSloadGas * numL1Messages // numL1Messages times cold sload in L1MessageQueue + + // staticcall + total += warmAddressAccessGas * numL1Messages // numL1Messages times call to L1MessageQueue + total += warmAddressAccessGas * numL1Messages // numL1Messages times warm address access to L1MessageQueue + + total += getMemoryExpansionCost(functionSignatureBytes+defaultParameterBytes) * numL1Messages // staticcall to proxy + total += warmAddressAccessGas * numL1Messages // read admin in proxy + total += warmAddressAccessGas * numL1Messages // read impl in proxy + total += warmAddressAccessGas * numL1Messages // access impl + total += getMemoryExpansionCost(functionSignatureBytes+defaultParameterBytes) * numL1Messages // delegatecall to impl + + return total, nil +} + +// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. +func (d *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + var totalNonSkippedL1Messages uint64 + var totalL1CommitGas uint64 + for _, block := range c.Blocks { + transactions := uint64(len(block.Transactions)) + l2Transactions := block.NumL2Transactions() + if transactions < l2Transactions { + return 0, fmt.Errorf("number of L2 transactions (%d) exceeds total transactions (%d)", l2Transactions, transactions) + } + totalNonSkippedL1Messages += transactions - l2Transactions + blockL1CommitGas, err := d.EstimateBlockL1CommitGas(block) + if err != nil { + return 0, err + } + totalL1CommitGas += blockL1CommitGas + } + + numBlocks := uint64(len(c.Blocks)) + totalL1CommitGas += warmSloadGas * numBlocks // numBlocks times warm sload + totalL1CommitGas += calldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + + totalL1CommitGas += getKeccak256Gas(58*numBlocks + common.HashLength*totalNonSkippedL1Messages) // chunk hash + return totalL1CommitGas, nil +} + +// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. +func (d *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { + var totalL1CommitGas uint64 + + // Add extra gas costs + totalL1CommitGas += extraGasCost // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * coldSloadGas // 4 one-time cold sload for commitBatch + totalL1CommitGas += sstoreGas // 1 time sstore + totalL1CommitGas += baseTxGas // base gas for tx + totalL1CommitGas += calldataNonZeroByteGas // version in calldata + + // adjusting gas: + // add 1 time cold sload (2100 gas) for L1MessageQueue + // add 1 time cold address access (2600 gas) for L1MessageQueue + // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) + totalL1CommitGas += (coldSloadGas + coldAddressAccessGas - warmSloadGas - warmAddressAccessGas) + totalL1CommitGas += getKeccak256Gas(daBatchV0EncodedMinLength + skippedL1MessageBitmapByteSize) // parent batch header hash, length is estimated as (constant part) + (1 skippedL1MessageBitmap) + totalL1CommitGas += calldataNonZeroByteGas * (daBatchV0EncodedMinLength + skippedL1MessageBitmapByteSize) // parent batch header in calldata + + // adjust batch data hash gas cost + totalL1CommitGas += getKeccak256Gas(uint64(common.HashLength * len(b.Chunks))) + + totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore + + for _, chunk := range b.Chunks { + chunkL1CommitGas, err := d.EstimateChunkL1CommitGas(chunk) + if err != nil { + return 0, err + } + totalL1CommitGas += chunkL1CommitGas + + totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) + totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk + + totalL1CommitGas += calldataNonZeroByteGas * (skippedL1MessageBitmapByteSize * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += getKeccak256Gas(daBatchV3OffsetParentBatchHash + skippedL1MessageBitmapByteSize*(totalL1MessagePoppedInChunk+255)/256) + + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitGas += getMemoryExpansionCost(chunkL1CommitCalldataSize) + } + + return totalL1CommitGas, nil +} + +// EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. +func (d *DACodecV1) EstimateBlockL1CommitCalldataSize(_ *Block) (uint64, error) { + return blockContextByteSize, nil +} + +// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. +func (d *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return uint64(blockContextByteSize * len(c.Blocks)), nil +} + +// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. +func (d *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { + var totalL1CommitCalldataSize uint64 + for _, chunk := range b.Chunks { + chunkL1CommitCalldataSize, err := d.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + } + return totalL1CommitCalldataSize, nil +} + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. +func (d *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + metadataSize := uint64(2 + 4*d.MaxNumChunksPerBatch()) + batchDataSize, err := d.chunkL1CommitBlobDataSize(c) + if err != nil { + return 0, 0, err + } + blobSize := calculatePaddedBlobSize(metadataSize + batchDataSize) + return metadataSize + batchDataSize, blobSize, nil +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a batch. +func (d *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + metadataSize := uint64(2 + 4*d.MaxNumChunksPerBatch()) + var batchDataSize uint64 + for _, c := range b.Chunks { + chunkDataSize, err := d.chunkL1CommitBlobDataSize(c) + if err != nil { + return 0, 0, err + } + batchDataSize += chunkDataSize + } + blobSize := calculatePaddedBlobSize(metadataSize + batchDataSize) + return metadataSize + batchDataSize, blobSize, nil +} + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a batch in the contracts, +// the latter is used in the public input to the provers. +func (d *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + dataBytes := make([]byte, 0, len(chunks)*common.HashLength) + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := d.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil +} diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1/codecv1.go deleted file mode 100644 index 25c6798..0000000 --- a/encoding/codecv1/codecv1.go +++ /dev/null @@ -1,648 +0,0 @@ -package codecv1 - -import ( - "crypto/sha256" - "encoding/binary" - "encoding/hex" - "errors" - "fmt" - "math/big" - "strings" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" -) - -// MaxNumChunks is the maximum number of chunks that a batch can contain. -const MaxNumChunks = 15 - -const BlockContextByteSize = codecv0.BlockContextByteSize - -// DABlock represents a Data Availability Block. -type DABlock = codecv0.DABlock - -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk codecv0.DAChunk - -// DAChunkRawTx groups consecutive DABlocks with their L2 transactions, L1 msgs are loaded in another place. -type DAChunkRawTx = codecv0.DAChunkRawTx - -// DABatch contains metadata about a batch of DAChunks. -type DABatch struct { - // header - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - BlobVersionedHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte - - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point -} - -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - return codecv0.NewDABlock(block, totalL1MessagePoppedBefore) -} - -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - if len(chunk.Blocks) == 0 { - return nil, errors.New("number of blocks is 0") - } - - if len(chunk.Blocks) > 255 { - return nil, errors.New("number of blocks exceeds 1 byte") - } - - var blocks []*DABlock - var txs [][]*types.TransactionData - - for _, block := range chunk.Blocks { - b, err := NewDABlock(block, totalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - blocks = append(blocks, b) - totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) - txs = append(txs, block.Transactions) - } - - daChunk := DAChunk{ - Blocks: blocks, - Transactions: txs, - } - - return &daChunk, nil -} - -// Encode serializes the DAChunk into a slice of bytes. -func (c *DAChunk) Encode() []byte { - var chunkBytes []byte - chunkBytes = append(chunkBytes, byte(len(c.Blocks))) - - for _, block := range c.Blocks { - blockBytes := block.Encode() - chunkBytes = append(chunkBytes, blockBytes...) - } - - return chunkBytes -} - -// DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. -// Beginning from codecv1 tx data posted to blobs, not to chunk bytes in calldata -func DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { - var chunks []*DAChunkRawTx - for _, chunk := range bytes { - if len(chunk) < 1 { - return nil, fmt.Errorf("invalid chunk, length is less than 1") - } - - numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) - } - - blocks := make([]*DABlock, numBlocks) - for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlock{} - err := blocks[i].Decode(chunk[startIdx:endIdx]) - if err != nil { - return nil, err - } - } - - var transactions []types.Transactions - - chunks = append(chunks, &DAChunkRawTx{ - Blocks: blocks, - Transactions: transactions, // Transactions field is still empty in the phase of DecodeDAChunksRawTx, because txs moved to bobs and filled in DecodeTxsFromBlob method. - }) - } - return chunks, nil -} - -// Hash computes the hash of the DAChunk data. -func (c *DAChunk) Hash() (common.Hash, error) { - var dataBytes []byte - - // concatenate block contexts - for _, block := range c.Blocks { - encodedBlock := block.Encode() - // only the first 58 bytes are used in the hashing process - dataBytes = append(dataBytes, encodedBlock[:58]...) - } - - // concatenate l1 tx hashes - for _, blockTxs := range c.Transactions { - for _, txData := range blockTxs { - if txData.Type != types.L1MessageTxType { - continue - } - - txHash := strings.TrimPrefix(txData.TxHash, "0x") - hashBytes, err := hex.DecodeString(txHash) - if err != nil { - return common.Hash{}, err - } - if len(hashBytes) != 32 { - return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) - } - dataBytes = append(dataBytes, hashBytes...) - } - } - - hash := crypto.Keccak256Hash(dataBytes) - return hash, nil -} - -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { - // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > MaxNumChunks { - return nil, errors.New("too many chunks in batch") - } - - if len(batch.Chunks) == 0 { - return nil, errors.New("too few chunks in batch") - } - - // batch data hash - dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - // blob payload - blob, blobVersionedHash, z, err := constructBlobPayload(batch.Chunks, false /* no mock */) - if err != nil { - return nil, err - } - - daBatch := DABatch{ - Version: uint8(encoding.CodecV1), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - blob: blob, - z: z, - } - - return &daBatch, nil -} - -// ComputeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - var dataBytes []byte - totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore - - for _, chunk := range chunks { - daChunk, err := NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) - if err != nil { - return common.Hash{}, err - } - totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) - chunkHash, err := daChunk.Hash() - if err != nil { - return common.Hash{}, err - } - dataBytes = append(dataBytes, chunkHash.Bytes()...) - } - - dataHash := crypto.Keccak256Hash(dataBytes) - return dataHash, nil -} - -// constructBlobPayload constructs the 4844 blob payload. -func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { - // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 - - // the raw (un-padded) blob payload - blobBytes := make([]byte, metadataLength) - - // challenge digest preimage - // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+MaxNumChunks+1)*32) - - // the chunk data hash used for calculating the challenge preimage - var chunkDataHash common.Hash - - // blob metadata: num_chunks - binary.BigEndian.PutUint16(blobBytes[0:], uint16(len(chunks))) - - // encode blob metadata and L2 transactions, - // and simultaneously also build challenge preimage - for chunkID, chunk := range chunks { - currentChunkStartIndex := len(blobBytes) - - for _, block := range chunk.Blocks { - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { - continue - } - - // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) - if err != nil { - return nil, common.Hash{}, nil, err - } - blobBytes = append(blobBytes, rlpTxData...) - } - } - - // blob metadata: chunki_size - if chunkSize := len(blobBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(blobBytes[2+4*chunkID:], uint32(chunkSize)) - } - - // challenge: compute chunk data hash - chunkDataHash = crypto.Keccak256Hash(blobBytes[currentChunkStartIndex:]) - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) - } - - // if we have fewer than MaxNumChunks chunks, the rest - // of the blob metadata is correctly initialized to 0, - // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ { - // use the last chunk's data hash as padding - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) - } - - // challenge: compute metadata hash - hash := crypto.Keccak256Hash(blobBytes[0:metadataLength]) - copy(challengePreimage[0:], hash[:]) - - // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) - if err != nil { - return nil, common.Hash{}, nil, err - } - - // compute blob versioned hash - c, err := kzg4844.BlobToCommitment(blob) - if err != nil { - return nil, common.Hash{}, nil, errors.New("failed to create blob commitment") - } - blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) - - // challenge: append blob versioned hash - copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:]) - - // compute z = challenge_digest % BLS_MODULUS - challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) - pointBytes := pointBigInt.Bytes() - - // the challenge point z - var z kzg4844.Point - start := 32 - len(pointBytes) - copy(z[start:], pointBytes) - - return blob, blobVersionedHash, &z, nil -} - -// DecodeTxsFromBytes decodes txs from blob bytes and writes to chunks -func DecodeTxsFromBytes(blobBytes []byte, chunks []*DAChunkRawTx, maxNumChunks int) error { - numChunks := int(binary.BigEndian.Uint16(blobBytes[0:2])) - if numChunks != len(chunks) { - return fmt.Errorf("blob chunk number is not same as calldata, blob num chunks: %d, calldata num chunks: %d", numChunks, len(chunks)) - } - index := 2 + maxNumChunks*4 - for chunkID, chunk := range chunks { - var transactions []types.Transactions - chunkSize := int(binary.BigEndian.Uint32(blobBytes[2+4*chunkID : 2+4*chunkID+4])) - - chunkBytes := blobBytes[index : index+chunkSize] - curIndex := 0 - for _, block := range chunk.Blocks { - var blockTransactions types.Transactions - txNum := int(block.NumTransactions - block.NumL1Messages) - for i := 0; i < txNum; i++ { - tx, nextIndex, err := GetNextTx(chunkBytes, curIndex) - if err != nil { - return fmt.Errorf("couldn't decode next tx from blob bytes: %w, index: %d", err, index+curIndex+4) - } - curIndex = nextIndex - blockTransactions = append(blockTransactions, tx) - } - transactions = append(transactions, blockTransactions) - } - chunk.Transactions = transactions - index += chunkSize - } - return nil -} - -// DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { - batchBytes := encoding.BytesFromBlobCanonical(blob) - return DecodeTxsFromBytes(batchBytes[:], chunks, MaxNumChunks) -} - -var errSmallLength error = fmt.Errorf("length of blob bytes is too small") - -// GetNextTx parses blob bytes to find length of payload of next Tx and decode it -func GetNextTx(bytes []byte, index int) (*types.Transaction, int, error) { - var nextIndex int - length := len(bytes) - if length < index+1 { - return nil, 0, errSmallLength - } - var txBytes []byte - if bytes[index] <= 0x7f { - // the first byte is transaction type, rlp encoding begins from next byte - txBytes = append(txBytes, bytes[index]) - index++ - } - if length < index+1 { - return nil, 0, errSmallLength - } - if bytes[index] >= 0xc0 && bytes[index] <= 0xf7 { - // length of payload is simply bytes[index] - 0xc0 - payloadLen := int(bytes[index] - 0xc0) - if length < index+1+payloadLen { - return nil, 0, errSmallLength - } - txBytes = append(txBytes, bytes[index:index+1+payloadLen]...) - nextIndex = index + 1 + payloadLen - } else if bytes[index] > 0xf7 { - // the length of payload is encoded in next bytes[index] - 0xf7 bytes - // length of bytes representation of length of payload - lenPayloadLen := int(bytes[index] - 0xf7) - if length < index+1+lenPayloadLen { - return nil, 0, errSmallLength - } - lenBytes := bytes[index+1 : index+1+lenPayloadLen] - for len(lenBytes) < 8 { - lenBytes = append([]byte{0x0}, lenBytes...) - } - payloadLen := binary.BigEndian.Uint64(lenBytes) - - if length < index+1+lenPayloadLen+int(payloadLen) { - return nil, 0, errSmallLength - } - txBytes = append(txBytes, bytes[index:index+1+lenPayloadLen+int(payloadLen)]...) - nextIndex = index + 1 + lenPayloadLen + int(payloadLen) - } else { - return nil, 0, fmt.Errorf("incorrect format of rlp encoding") - } - tx := &types.Transaction{} - err := tx.UnmarshalBinary(txBytes) - if err != nil { - return nil, 0, fmt.Errorf("failed to unmarshal tx, err: %w", err) - } - return tx, nextIndex, nil -} - -// NewDABatchFromBytes decodes the given byte slice into a DABatch. -// Note: This function only populates the batch header, it leaves the blob-related fields empty. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { - if len(data) < 121 { - return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) - } - - b := &DABatch{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - BlobVersionedHash: common.BytesToHash(data[57:89]), - ParentBatchHash: common.BytesToHash(data[89:121]), - SkippedL1MessageBitmap: data[121:], - } - - return b, nil -} - -// Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { - batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.BlobVersionedHash[:]) - copy(batchBytes[89:], b.ParentBatchHash[:]) - copy(batchBytes[121:], b.SkippedL1MessageBitmap[:]) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// BlobDataProof computes the abi-encoded blob verification data. -func (b *DABatch) BlobDataProof() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProof with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProof with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - return encoding.BlobDataProofFromValues(*b.z, y, commitment, proof), nil -} - -// Blob returns the blob of the batch. -func (b *DABatch) Blob() *kzg4844.Blob { - return b.blob -} - -// EstimateChunkL1CommitBlobSize estimates the size of the L1 commit blob for a single chunk. -func EstimateChunkL1CommitBlobSize(c *encoding.Chunk) (uint64, error) { - metadataSize := uint64(2 + 4*MaxNumChunks) // over-estimate: adding metadata length - chunkDataSize, err := chunkL1CommitBlobDataSize(c) - if err != nil { - return 0, err - } - return encoding.CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil -} - -// EstimateBatchL1CommitBlobSize estimates the total size of the L1 commit blob for a batch. -func EstimateBatchL1CommitBlobSize(b *encoding.Batch) (uint64, error) { - metadataSize := uint64(2 + 4*MaxNumChunks) - var batchDataSize uint64 - for _, c := range b.Chunks { - chunkDataSize, err := chunkL1CommitBlobDataSize(c) - if err != nil { - return 0, err - } - batchDataSize += chunkDataSize - } - return encoding.CalculatePaddedBlobSize(metadataSize + batchDataSize), nil -} - -func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { - var dataSize uint64 - for _, block := range c.Blocks { - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { - continue - } - - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) - if err != nil { - return 0, err - } - dataSize += uint64(len(rlpTxData)) - } - } - return dataSize, nil -} - -// CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. -const CalldataNonZeroByteGas = 16 - -// GetKeccak256Gas calculates the gas cost for computing the keccak256 hash of a given size. -func GetKeccak256Gas(size uint64) uint64 { - return GetMemoryExpansionCost(size) + 30 + 6*((size+31)/32) -} - -// GetMemoryExpansionCost calculates the cost of memory expansion for a given memoryByteSize. -func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { - memorySizeWord := (memoryByteSize + 31) / 32 - memoryCost := (memorySizeWord*memorySizeWord)/512 + (3 * memorySizeWord) - return memoryCost -} - -// EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { - var total uint64 - var numL1Messages uint64 - for _, txData := range b.Transactions { - if txData.Type == types.L1MessageTxType { - numL1Messages++ - continue - } - } - - // 60 bytes BlockContext calldata - total += CalldataNonZeroByteGas * 60 - - // sload - total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue - - // staticcall - total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue - total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl - - return total -} - -// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { - return uint64(60 * len(c.Blocks)) -} - -// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { - var totalNonSkippedL1Messages uint64 - var totalL1CommitGas uint64 - for _, block := range c.Blocks { - totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() - blockL1CommitGas := EstimateBlockL1CommitGas(block) - totalL1CommitGas += blockL1CommitGas - } - - numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash - return totalL1CommitGas -} - -// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { - var totalL1CommitGas uint64 - - // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata - - // adjusting gas: - // add 1 time cold sload (2100 gas) for L1MessageQueue - // add 1 time cold address access (2600 gas) for L1MessageQueue - // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) - totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata - - // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) - - totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore - - for _, chunk := range b.Chunks { - chunkL1CommitGas := EstimateChunkL1CommitGas(chunk) - totalL1CommitGas += chunkL1CommitGas - - totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) - totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - - totalL1CommitCalldataSize := EstimateChunkL1CommitCalldataSize(chunk) - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) - } - - return totalL1CommitGas -} - -// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { - var totalL1CommitCalldataSize uint64 - for _, chunk := range b.Chunks { - totalL1CommitCalldataSize += EstimateChunkL1CommitCalldataSize(chunk) - } - return totalL1CommitCalldataSize -} diff --git a/encoding/codecv1/codecv1_test.go b/encoding/codecv1_test.go similarity index 59% rename from encoding/codecv1/codecv1_test.go rename to encoding/codecv1_test.go index 6522c59..a9a38ca 100644 --- a/encoding/codecv1/codecv1_test.go +++ b/encoding/codecv1_test.go @@ -1,70 +1,76 @@ -package codecv1 +package encoding import ( "encoding/hex" - "encoding/json" - "os" + "fmt" + "math" "strings" "testing" - "github.com/stretchr/testify/assert" - + "github.com/agiledragon/gomonkey/v2" "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestCodecV1BlockEncode(t *testing.T) { - block := &DABlock{} - encoded := hex.EncodeToString(block.Encode()) + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) + + daBlockV0 := &daBlockV0{} + encoded := hex.EncodeToString(daBlockV0.Encode()) assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block, err := NewDABlock(trace2, 0) + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + daBlock, err := codecv1.NewDABlock(block2, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) + encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block, err = NewDABlock(trace3, 0) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + daBlock, err = codecv1.NewDABlock(block3, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) + encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block, err = NewDABlock(trace4, 0) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + daBlock, err = codecv1.NewDABlock(block4, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) + encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block, err = NewDABlock(trace5, 0) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + daBlock, err = codecv1.NewDABlock(block5, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) + encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block, err = NewDABlock(trace6, 0) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + daBlock, err = codecv1.NewDABlock(block6, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) + encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - block, err = NewDABlock(trace7, 0) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + daBlock, err = codecv1.NewDABlock(block7, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) + encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + // sanity check: v0 and v1 block encodings are identical - for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { - blockv0, err := codecv0.NewDABlock(trace, 0) + for _, block := range []*Block{block2, block3, block4, block5, block6, block7} { + blockv0, err := codecv0.NewDABlock(block, 0) assert.NoError(t, err) encodedv0 := hex.EncodeToString(blockv0.Encode()) - blockv1, err := NewDABlock(trace, 0) + blockv1, err := codecv1.NewDABlock(block, 0) assert.NoError(t, err) encodedv1 := hex.EncodeToString(blockv1.Encode()) @@ -73,877 +79,1009 @@ func TestCodecV1BlockEncode(t *testing.T) { } func TestCodecV1ChunkEncode(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) + // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - encoded := hex.EncodeToString(chunk.Encode()) + daBlock := &daBlockV0{} + daChunkV1 := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + + encodedBytes, err := daChunkV1.Encode() + assert.NoError(t, err) + encoded := hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) // transactions are not part of the encoding - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) - encoded = hex.EncodeToString(chunk.Encode()) + daChunkV1.transactions[0] = append(daChunkV1.transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) + encodedBytes, err = daChunkV1.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err := NewDAChunk(originalChunk, 0) + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) } func TestCodecV1ChunkHash(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) + // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} + daBlock := &daBlockV0{} + chunk := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} hash, err := chunk.Hash() assert.NoError(t, err) assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) // L1 transactions are part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) hash, err = chunk.Hash() assert.NoError(t, err) assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) // L2 transactions are not part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) hash, err = chunk.Hash() assert.NoError(t, err) assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) // numL1Messages are not part of the hash - chunk.Blocks[0].NumL1Messages = 1 + daBlock = chunk.blocks[0].(*daBlockV0) + daBlock.numL1Messages = 1 + chunk.blocks[0] = daBlock + hash, err = chunk.Hash() assert.NoError(t, err) assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) // invalid hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) _, err = chunk.Hash() assert.Error(t, err) - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv1.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - hash, err = chunk.Hash() + hash, err = daChunk.Hash() assert.NoError(t, err) assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - hash, err = chunk.Hash() + hash, err = daChunk.Hash() assert.NoError(t, err) assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - hash, err = chunk.Hash() + hash, err = daChunk.Hash() assert.NoError(t, err) assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - hash, err = chunk.Hash() + hash, err = daChunk.Hash() assert.NoError(t, err) assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - hash, err = chunk.Hash() + hash, err = daChunk.Hash() assert.NoError(t, err) assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv1.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - hash, err = chunk.Hash() + hash, err = daChunk.Hash() assert.NoError(t, err) assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) } func TestCodecV1BatchEncode(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) + // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV1)} - encoded := hex.EncodeToString(batch.Encode()) + daBatchV1 := &daBatchV1{ + daBatchV0: daBatchV0{ + version: CodecV1, + }, + } + encoded := hex.EncodeToString(daBatchV1.Encode()) assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101af944924715b48be6ce3c35aef7500a50e909265599bd2b3e544ac59fc75530000000000000000000000000000000000000000000000000000000000000000", encoded) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "01000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6010c54fa675ed1b78f269827177019b0814a4ac4d269c68037e2c41cf08f94110000000000000000000000000000000000000000000000000000000000000000", encoded) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01ea66c4de196d36e2c3a5d7c0045100b9e46ef65be8f7a921ef20e6f2e99ebd000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b401a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "01000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d520801a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a60000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8014ae5927a983081a8bcdbcce19e926c9e4c56e2dc89c91c32c034b875b8a1ca00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "010000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e13476701b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) } func TestCodecV1BatchHash(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) + // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV1)} - assert.Equal(t, "0x4b6fe410f63051f6e93532087b42ece79fb7b966e2ba5845e6cd1c091f27e564", batch.Hash().Hex()) + daBatchV1 := &daBatchV1{ + daBatchV0: daBatchV0{ + version: CodecV1, + }, + } + assert.Equal(t, common.HexToHash("0x4b6fe410f63051f6e93532087b42ece79fb7b966e2ba5845e6cd1c091f27e564"), daBatchV1.Hash()) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xd557b02638c0385d5124f7fc188a025b33f8819b7f78c000751404997148ab8b"), daBatch.Hash()) - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xd557b02638c0385d5124f7fc188a025b33f8819b7f78c000751404997148ab8b", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0xf13c7e249d00941c59fe4cd970241bbd6753eede8e043c438165674031792b3b"), daBatch.Hash()) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xf13c7e249d00941c59fe4cd970241bbd6753eede8e043c438165674031792b3b", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0xb64208f07fab641f7ebf831686d05ad667da0c7bfabcbd9c878cc22cbc8032fd"), daBatch.Hash()) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xb64208f07fab641f7ebf831686d05ad667da0c7bfabcbd9c878cc22cbc8032fd", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0x4f7426d164e885574a661838406083f5292b0a1bc6dc20c51129eed0723b8a27"), daBatch.Hash()) - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x4f7426d164e885574a661838406083f5292b0a1bc6dc20c51129eed0723b8a27", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0xfce89ec2aed85cebeb20eea722e3ae4ec622bff49218dbe249a2d358e2e85451"), daBatch.Hash()) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xfce89ec2aed85cebeb20eea722e3ae4ec622bff49218dbe249a2d358e2e85451", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0x8fc063179b709bab338674278bb7b70dce2879a4e11ea857b3a202fb3313559f"), daBatch.Hash()) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x8fc063179b709bab338674278bb7b70dce2879a4e11ea857b3a202fb3313559f", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0xf1c94cdf45967bc60bfccd599edd8cb07fd0201f41ab068637834f86140f62bf"), daBatch.Hash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xfef0b56bd889529e3a1d884c88dd1c867e084fdc1369496907be8f865f43f0e0"), daBatch.Hash()) +} + +func TestCodecV1NewDABatchFromBytes(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + testCases := []struct { + name string + jsonFile string + }{ + {"Empty Batch", ""}, + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var batch *Batch + var daBatch DABatch + var createErr1 error + + if tc.jsonFile == "" { + // Empty daBatch + daBatch = &daBatchV1{ + daBatchV0: daBatchV0{ + version: CodecV1, + }, + } + } else { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + batch = &Batch{Chunks: []*Chunk{chunk}} + daBatch, createErr1 = codecv1.NewDABatch(batch) + assert.NoError(t, createErr1) + } + + // Encode the DABatch + encodedBytes := daBatch.Encode() + + // Decode the bytes back into a DABatch + decodedDABatch, createErr2 := codecv1.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, createErr2) + + // Compare the hashes of the original and decoded DABatch + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) + }) + } + + // Test with multiple blocks and chunks in a batch + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + chunk2 := &Chunk{Blocks: []*Block{block4, block5}} + batch := &Batch{Chunks: []*Chunk{chunk1, chunk2}} + daBatch, err := codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xf1c94cdf45967bc60bfccd599edd8cb07fd0201f41ab068637834f86140f62bf", batch.Hash().Hex()) - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + encodedBytes := daBatch.Encode() + decodedDABatch, err := codecv1.NewDABatchFromBytes(encodedBytes) assert.NoError(t, err) - assert.Equal(t, "0xfef0b56bd889529e3a1d884c88dd1c867e084fdc1369496907be8f865f43f0e0", batch.Hash().Hex()) + + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash()) } func TestCodecV1BatchDataHash(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541"), daBatch.DataHash()) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6"), daBatch.DataHash()) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f"), daBatch.DataHash()) - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8"), daBatch.DataHash()) - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) } -func TestCodecV1BatchBlob(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) +func TestCodecV1CalldataSizeEstimation(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2CalldataSize, err := codecv1.EstimateChunkL1CommitCalldataSize(chunk2) assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, - // metadata - "00"+"0001"+"000000e6"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00"+"00"+"000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+ - // tx payload - "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1", encoded) - assert.Equal(t, "0x01af944924715b48be6ce3c35aef7500a50e909265599bd2b3e544ac59fc7553", batch.BlobVersionedHash.Hex()) + assert.Equal(t, uint64(60), chunk2CalldataSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2CalldataSize, err := codecv1.EstimateBatchL1CommitCalldataSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch2CalldataSize) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3CalldataSize, err := codecv1.EstimateChunkL1CommitCalldataSize(chunk3) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "000001000016310000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002f9162d82cf5502843b9b0a17843b9b0a17831197e28080b915d26080604000523480156200001157600080fd5b50604051620014b2380380620014b283390081810160405260a08110156200003757600080fd5b8151602083015160408000850180519151939592948301929184640100000000821115620000635760000080fd5b9083019060208201858111156200007957600080fd5b8251640100000000008111828201881017156200009457600080fd5b8252508151602091820100929091019080838360005b83811015620000c357818101518382015260200100620000a9565b50505050905090810190601f168015620000f1578082038051006001836020036101000a031916815260200191505b5060405260200180516000405193929190846401000000008211156200011557600080fd5b908301906000208201858111156200012b57600080fd5b8251640100000000811182820188001017156200014657600080fd5b8252508151602091820192909101908083830060005b83811015620001755781810151838201526020016200015b565b5050005050905090810190601f168015620001a3578082038051600183602003610100000a031916815260200191505b506040526020908101518551909350859250008491620001c8916003918501906200026b565b508051620001de906004906000208401906200026b565b50506005805461ff001960ff199091166012171690005550600680546001600160a01b038088166001600160a01b031992831617900092556007805492871692909116919091179055620002308162000255565b5000506005805462010000600160b01b031916336201000002179055506200030700915050565b6005805460ff191660ff92909216919091179055565b82805460000181600116156101000203166002900490600052602060002090601f01602000900481019282601f10620002ae57805160ff1916838001178555620002de56005b82800160010185558215620002de579182015b82811115620002de57825100825591602001919060010190620002c1565b50620002ec929150620002f056005b5090565b5b80821115620002ec5760008155600101620002f1565b61119b0080620003176000396000f3fe608060405234801561001057600080fd5b50600004361061010b5760003560e01c80635c975abb116100a257806395d89b41110061007157806395d89b41146103015780639dc29fac14610309578063a457c200d714610335578063a9059cbb14610361578063dd62ed3e1461038d5761010b00565b80635c975abb1461029d57806370a08231146102a55780638456cb5914006102cb5780638e50817a146102d35761010b565b8063313ce567116100de57008063313ce5671461021d578063395093511461023b5780633f4ba83a146102006757806340c10f19146102715761010b565b806306fdde031461011057806300095ea7b31461018d57806318160ddd146101cd57806323b872dd146101e757005b600080fd5b6101186103bb565b604080516020808252835181830152835100919283929083019185019080838360005b838110156101525781810151838200015260200161013a565b50505050905090810190601f16801561017f578082000380516001836020036101000a031916815260200191505b50925050506040005180910390f35b6101b9600480360360408110156101a357600080fd5b50600001600160a01b038135169060200135610451565b60408051911515825251900081900360200190f35b6101d561046e565b6040805191825251908190036020000190f35b6101b9600480360360608110156101fd57600080fd5b50600160010060a01b03813581169160208101359091169060400135610474565b610225610004fb565b6040805160ff9092168252519081900360200190f35b6101b9600400803603604081101561025157600080fd5b506001600160a01b03813516906000200135610504565b61026f610552565b005b61026f600480360360408110150061028757600080fd5b506001600160a01b0381351690602001356105a9565b006101b9610654565b6101d5600480360360208110156102bb57600080fd5b5000356001600160a01b0316610662565b61026f61067d565b61026f60048036030060408110156102e957600080fd5b506001600160a01b0381358116916020010035166106d2565b610118610757565b61026f6004803603604081101561031f0057600080fd5b506001600160a01b0381351690602001356107b8565b6101b9006004803603604081101561034b57600080fd5b506001600160a01b0381351600906020013561085f565b6101b96004803603604081101561037757600080fd005b506001600160a01b0381351690602001356108c7565b6101d560048036030060408110156103a357600080fd5b506001600160a01b0381358116916020010035166108db565b60038054604080516020601f600260001961010060018816001502019095169490940493840181900481028201810190925282815260609300909290918301828280156104475780601f1061041c5761010080835404028300529160200191610447565b820191906000526020600020905b8154815290600001019060200180831161042a57829003601f168201915b505050505090509000565b600061046561045e610906565b848461090a565b50600192915050565b0060025490565b60006104818484846109f6565b6104f18461048d610906565b006104ec8560405180606001604052806028815260200161108560289139600100600160a01b038a166000908152600160205260408120906104cb610906565b006001600160a01b031681526020810191909152604001600020549190610b5100565b61090a565b5060019392505050565b60055460ff1690565b600061046500610511610906565b846104ec8560016000610522610906565b6001600160a0001b03908116825260208083019390935260409182016000908120918c16815200925290205490610be8565b6007546001600160a01b0316331461059f57604000805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0818005b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a7610c0049565b565b600554610100900460ff16156105f9576040805162461bcd60e5001b815260206004820152601060248201526f14185d5cd8589b194e881c185d005cd95960821b604482015290519081900360640190fd5b600654600160016000a01b03163314610646576040805162461bcd60e51b81526020600482015260000b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908190000360640190fd5b6106508282610ced565b5050565b600554610100900460ff001690565b6001600160a01b031660009081526020819052604090205490565b006007546001600160a01b031633146106ca576040805162461bcd60e51b81520060206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b60440082015290519081900360640190fd5b6105a7610ddd565b600554620100009000046001600160a01b03163314610726576040805162461bcd60e51b81526020006004820152600c60248201526b6f6e6c7920466163746f727960a01b60448200015290519081900360640190fd5b600780546001600160a01b03928316600100600160a01b0319918216179091556006805493909216921691909117905556005b60048054604080516020601f600260001961010060018816150201909516009490940493840181900481028201810190925282815260609390929091830100828280156104475780601f1061041c5761010080835404028352916020019100610447565b600554610100900460ff1615610808576040805162461bcd60e5001b815260206004820152601060248201526f14185d5cd8589b194e881c185d005cd95960821b604482015290519081900360640190fd5b600654600160016000a01b03163314610855576040805162461bcd60e51b81526020600482015260000b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908190000360640190fd5b6106508282610e65565b600061046561086c610906565b84006104ec85604051806060016040528060258152602001611117602591396001006000610896610906565b6001600160a01b0390811682526020808301939093005260409182016000908120918d16815292529020549190610b51565b6000610004656108d4610906565b84846109f6565b6001600160a01b0391821660009000815260016020908152604080832093909416825291909152205490565b339000565b6001600160a01b03831661094f5760405162461bcd60e51b8152600401008080602001828103825260248152602001806110f3602491396040019150500060405180910390fd5b6001600160a01b0382166109945760405162461bcd6000e51b815260040180806020018281038252602281526020018061103d602291003960400191505060405180910390fd5b6001600160a01b038084166000818100526001602090815260408083209487168084529482529182902085905581510085815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b00200ac8c7c3b9259281900390910190a3505050565b6001600160a01b03831600610a3b5760405162461bcd60e51b8152600401808060200182810382526025008152602001806110ce6025913960400191505060405180910390fd5b600160000160a01b038216610a805760405162461bcd60e51b815260040180806020010082810382526023815260200180610ff8602391396040019150506040518091000390fd5b610a8b838383610f61565b610ac8816040518060600160405280600026815260200161105f602691396001600160a01b038616600090815260208100905260409020549190610b51565b6001600160a01b03808516600090815260002081905260408082209390935590841681522054610af79082610be8565b600001600160a01b03808416600081815260208181526040918290209490945580005185815290519193928716927fddf252ad1be2c89b69c2b068fc378daa952b00a7f163c4a11628f55a4df523b3ef92918290030190a3505050565b6000818400841115610be05760405162461bcd60e51b8152600401808060200182810382005283818151815260200191508051906020019080838360005b83811015610b00a5578181015183820152602001610b8d565b50505050905090810190601f16008015610bd25780820380516001836020036101000a03191681526020019150005b509250505060405180910390fd5b505050900390565b60008282018381100015610c42576040805162461bcd60e51b815260206004820152601b6024820100527f536166654d6174683a206164646974696f6e206f766572666c6f77000000000000604482015290519081900360640190fd5b9392505050565b60055461000100900460ff16610c9c576040805162461bcd60e51b81526020600482015200601460248201527314185d5cd8589b194e881b9bdd081c185d5cd95960621b00604482015290519081900360640190fd5b6005805461ff00191690557f5db900ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa61000cd0610906565b604080516001600160a01b03909216825251908190036020000190a1565b6001600160a01b038216610d48576040805162461bcd60e51b81005260206004820152601f60248201527f45524332303a206d696e7420746f2000746865207a65726f20616464726573730060448201529051908190036064010090fd5b610d5460008383610f61565b600254610d619082610be8565b600255006001600160a01b038216600090815260208190526040902054610d87908261000be8565b6001600160a01b038316600081815260208181526040808320949000945583518581529351929391927fddf252ad1be2c89b69c2b068fc378daa95002ba7f163c4a11628f55a4df523b3ef9281900390910190a35050565b60055400610100900460ff1615610e2d576040805162461bcd60e51b81526020600482000152601060248201526f14185d5cd8589b194e881c185d5cd95960821b60440082015290519081900360640190fd5b6005805461ff0019166101001790557f0062e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc544b05a20058610cd0610906565b6001600160a01b038216610eaa5760405162461bcd6000e51b81526004018080602001828103825260218152602001806110ad602191003960400191505060405180910390fd5b610eb682600083610f61565b610ef3008160405180606001604052806022815260200161101b60229139600160016000a01b0385166000908152602081905260409020549190610b51565b600160010060a01b038316600090815260208190526040902055600254610f199082610f00b5565b6002556040805182815290516000916001600160a01b038516917fdd00f252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef009181900360200190a35050565b610f6c838383610fb0565b610f7461065456005b15610fb05760405162461bcd60e51b81526004018080602001828103825200602a81526020018061113c602a913960400191505060405180910390fd5b50005050565b6000610c4283836040518060400160405280601e81526020017f53006166654d6174683a207375627472616374696f6e206f766572666c6f77000000815250610b5156fe45524332303a207472616e7366657220746f2074686520007a65726f206164647265737345524332303a206275726e20616d6f756e742000657863656564732062616c616e636545524332303a20617070726f76652074006f20746865207a65726f206164647265737345524332303a207472616e736600657220616d6f756e7420657863656564732062616c616e636545524332303a00207472616e7366657220616d6f756e74206578636565647320616c6c6f7761006e636545524332303a206275726e2066726f6d20746865207a65726f20616400647265737345524332303a207472616e736665722066726f6d20746865207a0065726f206164647265737345524332303a20617070726f76652066726f6d2000746865207a65726f206164647265737345524332303a206465637265617365006420616c6c6f77616e63652062656c6f77207a65726f4552433230506175730061626c653a20746f6b656e207472616e73666572207768696c652070617573006564a2646970667358221220e96342bec8f6c2bf72815a39998973b64c3bed0057770f402e9a7b7eeda0265d4c64736f6c634300060c0033000000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b63000000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000000009570045544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e173700f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bd00a52095d44b8a9af7", encoded) - assert.Equal(t, "0x010c54fa675ed1b78f269827177019b0814a4ac4d269c68037e2c41cf08f9411", batch.BlobVersionedHash.Hex()) + assert.Equal(t, uint64(60), chunk3CalldataSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3CalldataSize, err := codecv1.EstimateBatchL1CommitCalldataSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch3CalldataSize) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4CalldataSize, err := codecv1.EstimateChunkL1CommitCalldataSize(chunk4) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0000010000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e10580808080008", encoded) - assert.Equal(t, "0x01ea66c4de196d36e2c3a5d7c0045100b9e46ef65be8f7a921ef20e6f2e99ebd", batch.BlobVersionedHash.Hex()) + assert.Equal(t, uint64(60), chunk4CalldataSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4CalldataSize, err := codecv1.EstimateBatchL1CommitCalldataSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch4CalldataSize) - // this batch only contains L1 txs - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5CalldataSize, err := codecv1.EstimateChunkL1CommitCalldataSize(chunk5) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "000001", encoded) - assert.Equal(t, "0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6", batch.BlobVersionedHash.Hex()) + assert.Equal(t, uint64(120), chunk5CalldataSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6CalldataSize, err := codecv1.EstimateChunkL1CommitCalldataSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk6CalldataSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5CalldataSize, err := codecv1.EstimateBatchL1CommitCalldataSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(180), batch5CalldataSize) +} + +func TestCodecV1CommitGasEstimation(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2Gas, err := codecv1.EstimateChunkL1CommitGas(chunk2) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "000001", encoded) - assert.Equal(t, "0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6", batch.BlobVersionedHash.Hex()) + assert.Equal(t, uint64(1124), chunk2Gas) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2Gas, err := codecv1.EstimateBatchL1CommitGas(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(157649), batch2Gas) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3Gas, err := codecv1.EstimateChunkL1CommitGas(chunk3) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "000001", encoded) - assert.Equal(t, "0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6", batch.BlobVersionedHash.Hex()) + assert.Equal(t, uint64(1124), chunk3Gas) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3Gas, err := codecv1.EstimateBatchL1CommitGas(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(157649), batch3Gas) - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4Gas, err := codecv1.EstimateChunkL1CommitGas(chunk4) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, - // metadata - "00"+"000f"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"00"+"00"+"0000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+ - // tx payload - "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea003f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ece00a0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86d00f514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288b00baf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf000d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f0010c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f002b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b009aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d0002c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b00219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d199600b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a120940100bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000800083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393e00b095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f87938000aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b600e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae9900c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cb00d19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8007101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce941100ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b002cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec005bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e830700a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de10200513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c57008fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e900a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea000f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f7730016a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6e00ed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2ade00ceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7b00a5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd7300e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9a00ec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d0200c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c400c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808300019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a0041e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfa00ec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc060015b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f03998586600d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e0081065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f8710080843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca2008a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e9000cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c004d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a100209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e260004393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f00879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6a00cb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab0007ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df51400a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf4002a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d6900ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c100be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b460004bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec002e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c700e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b001de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b500243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae600bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000808301009ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb09500b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac100c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb009e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67a00a78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19f00eacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710100843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a00152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cac00e28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd400aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a1200094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d0056548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd700f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03f00b2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e008307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e1004af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bd00e27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483590096fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b100bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d825006f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e8106005f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f8718084003b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a15002d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc3002b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d190096b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a120940001bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e260439003eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f87930080aac1c09c6eed32f1", encoded) - assert.Equal(t, "0x01521b20f341588dea5978efb00d7b077a986598a6001fc2e5859d77f3ffc284", batch.BlobVersionedHash.Hex()) + assert.Equal(t, uint64(3745), chunk4Gas) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4Gas, err := codecv1.EstimateBatchL1CommitGas(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(160302), batch4Gas) - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5Gas, err := codecv1.EstimateChunkL1CommitGas(chunk5) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0000020000173700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a17843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380620014b2833981810160405260a0811015620000375760000080fd5b81516020830151604080850180519151939592948301929184640100000000008211156200006357600080fd5b908301906020820185811115620000007957600080fd5b8251640100000000811182820188101715620000945760000080fd5b82525081516020918201929091019080838360005b8381101562000000c3578181015183820152602001620000a9565b50505050905090810190601f00168015620000f15780820380516001836020036101000a03191681526020010091505b5060405260200180516040519392919084640100000000821115620000011557600080fd5b9083019060208201858111156200012b57600080fd5b8200516401000000008111828201881017156200014657600080fd5b8252508151006020918201929091019080838360005b8381101562000175578181015183820001526020016200015b565b50505050905090810190601f168015620001a3570080820380516001836020036101000a031916815260200191505b506040526000209081015185519093508592508491620001c8916003918501906200026b56005b508051620001de9060049060208401906200026b565b50506005805461ff00001960ff1990911660121716905550600680546001600160a01b03808816600001600160a01b031992831617909255600780549287169290911691909117900055620002308162000255565b50506005805462010000600160b01b031916330062010000021790555062000307915050565b6005805460ff191660ff9290920016919091179055565b82805460018160011615610100020316600290049060000052602060002090601f016020900481019282601f10620002ae57805160ff001916838001178555620002de565b82800160010185558215620002de57918200015b82811115620002de578251825591602001919060010190620002c1565b0050620002ec929150620002f0565b5090565b5b80821115620002ec576000810055600101620002f1565b61119b80620003176000396000f3fe60806040523400801561001057600080fd5b506004361061010b5760003560e01c80635c975a00bb116100a257806395d89b411161007157806395d89b41146103015780639d00c29fac14610309578063a457c2d714610335578063a9059cbb1461036157800063dd62ed3e1461038d5761010b565b80635c975abb1461029d57806370a0820031146102a55780638456cb59146102cb5780638e50817a146102d35761010b00565b8063313ce567116100de578063313ce5671461021d57806339509351140061023b5780633f4ba83a1461026757806340c10f19146102715761010b565b00806306fdde0314610110578063095ea7b31461018d57806318160ddd14610100cd57806323b872dd146101e7575b600080fd5b6101186103bb565b604080510060208082528351818301528351919283929083019185019080838360005b830081101561015257818101518382015260200161013a565b5050505090509081000190601f16801561017f5780820380516001836020036101000a03191681520060200191505b509250505060405180910390f35b6101b960048036036040810010156101a357600080fd5b506001600160a01b03813516906020013561045100565b604080519115158252519081900360200190f35b6101d561046e565b6000408051918252519081900360200190f35b6101b960048036036060811015610001fd57600080fd5b506001600160a01b0381358116916020810135909116900060400135610474565b6102256104fb565b6040805160ff909216825251908100900360200190f35b6101b96004803603604081101561025157600080fd5b50006001600160a01b038135169060200135610504565b61026f610552565b005b0061026f6004803603604081101561028757600080fd5b506001600160a01b030081351690602001356105a9565b6101b9610654565b6101d560048036036020008110156102bb57600080fd5b50356001600160a01b0316610662565b61026f0061067d565b61026f600480360360408110156102e957600080fd5b50600160000160a01b03813581169160200135166106d2565b610118610757565b61026f006004803603604081101561031f57600080fd5b506001600160a01b038135160090602001356107b8565b6101b96004803603604081101561034b57600080fd005b506001600160a01b03813516906020013561085f565b6101b9600480360300604081101561037757600080fd5b506001600160a01b038135169060200135006108c7565b6101d5600480360360408110156103a357600080fd5b50600160000160a01b03813581169160200135166108db565b6003805460408051602060001f6002600019610100600188161502019095169490940493840181900481020082018101909252828152606093909290918301828280156104475780601f100061041c57610100808354040283529160200191610447565b82019190600052006020600020905b81548152906001019060200180831161042a57829003601f00168201915b5050505050905090565b600061046561045e610906565b84846100090a565b50600192915050565b60025490565b60006104818484846109f656005b6104f18461048d610906565b6104ec8560405180606001604052806028810052602001611085602891396001600160a01b038a16600090815260016020520060408120906104cb610906565b6001600160a01b03168152602081019190910052604001600020549190610b51565b61090a565b5060019392505050565b6000055460ff1690565b6000610465610511610906565b846104ec856001600061000522610906565b6001600160a01b0390811682526020808301939093526040009182016000908120918c168152925290205490610be8565b600754600160010060a01b0316331461059f576040805162461bcd60e51b81526020600482015200600b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908100900360640190fd5b6105a7610c49565b565b600554610100900460ff1615610005f9576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610646576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610ced565b500050565b600554610100900460ff1690565b6001600160a01b03166000908152006020819052604090205490565b6007546001600160a01b031633146106ca57006040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0008185b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a700610ddd565b6005546201000090046001600160a01b0316331461072657604000805162461bcd60e51b815260206004820152600c60248201526b6f6e6c792000466163746f727960a01b604482015290519081900360640190fd5b60078054006001600160a01b039283166001600160a01b0319918216179091556006805400939092169216919091179055565b60048054604080516020601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c5761010000808354040283529160200191610447565b600554610100900460ff161561000808576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610855576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610e65565b60000061046561086c610906565b846104ec85604051806060016040528060258100526020016111176025913960016000610896610906565b6001600160a01b0300908116825260208083019390935260409182016000908120918d1681529252009020549190610b51565b60006104656108d4610906565b84846109f6565b600001600160a01b0391821660009081526001602090815260408083209390941600825291909152205490565b3390565b6001600160a01b03831661094f576040005162461bcd60e51b8152600401808060200182810382526024815260200180006110f36024913960400191505060405180910390fd5b6001600160a01b038200166109945760405162461bcd60e51b81526004018080602001828103825260002281526020018061103d6022913960400191505060405180910390fd5b600100600160a01b0380841660008181526001602090815260408083209487168084005294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f7142007d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9259281900390910190a350500050565b6001600160a01b038316610a3b5760405162461bcd60e51b8152600400018080602001828103825260258152602001806110ce602591396040019150005060405180910390fd5b6001600160a01b038216610a805760405162461bcd0060e51b8152600401808060200182810382526023815260200180610ff8602300913960400191505060405180910390fd5b610a8b838383610f61565b610ac8008160405180606001604052806026815260200161105f60269139600160016000a01b0386166000908152602081905260409020549190610b51565b600160010060a01b03808516600090815260208190526040808220939093559084168152002054610af79082610be8565b6001600160a01b03808416600081815260208100815260409182902094909455805185815290519193928716927fddf252ad1b00e2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9291829000030190a3505050565b60008184841115610be05760405162461bcd60e51b810052600401808060200182810382528381815181526020019150805190602001009080838360005b83811015610ba5578181015183820152602001610b8d565b0050505050905090810190601f168015610bd2578082038051600183602003610001000a031916815260200191505b509250505060405180910390fd5b50505000900390565b600082820183811015610c42576040805162461bcd60e51b81520060206004820152601b60248201527f536166654d6174683a20616464697469006f6e206f766572666c6f77000000000060448201529051908190036064019000fd5b9392505050565b600554610100900460ff16610c9c576040805162461b00cd60e51b815260206004820152601460248201527314185d5cd8589b194e88001b9bdd081c185d5cd95960621b604482015290519081900360640190fd5b600005805461ff00191690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a500e8aa4e537bd38aeae4b073aa610cd0610906565b604080516001600160a01b00039092168252519081900360200190a1565b6001600160a01b038216610d4800576040805162461bcd60e51b815260206004820152601f60248201527f4552004332303a206d696e7420746f20746865207a65726f20616464726573730060004482015290519081900360640190fd5b610d5460008383610f61565b60025400610d619082610be8565b6002556001600160a01b03821660009081526020810090526040902054610d879082610be8565b6001600160a01b038316600081810052602081815260408083209490945583518581529351929391927fddf252ad001be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef928190000390910190a35050565b600554610100900460ff1615610e2d57604080516200461bcd60e51b815260206004820152601060248201526f14185d5cd8589b19004e881c185d5cd95960821b604482015290519081900360640190fd5b600580005461ff0019166101001790557f62e78cea01bee320cd4e420270b5ea74000d0011b0c9f74754ebdbfc544b05a258610cd0610906565b6001600160a01b03820016610eaa5760405162461bcd60e51b8152600401808060200182810382526000218152602001806110ad6021913960400191505060405180910390fd5b610e00b682600083610f61565b610ef3816040518060600160405280602281526020000161101b602291396001600160a01b038516600090815260208190526040900020549190610b51565b6001600160a01b03831660009081526020819052604000902055600254610f199082610fb5565b600255604080518281529051600091006001600160a01b038516917fddf252ad1be2c89b69c2b068fc378daa952ba700f163c4a11628f55a4df523b3ef9181900360200190a35050565b610f6c83830083610fb0565b610f74610654565b15610fb05760405162461bcd60e51b81520060040180806020018281038252602a81526020018061113c602a91396040010091505060405180910390fd5b505050565b6000610c428383604051806040010060405280601e81526020017f536166654d6174683a20737562747261637469006f6e206f766572666c6f770000815250610b5156fe45524332303a20747261006e7366657220746f20746865207a65726f206164647265737345524332303a00206275726e20616d6f756e7420657863656564732062616c616e63654552430032303a20617070726f766520746f20746865207a65726f20616464726573730045524332303a207472616e7366657220616d6f756e742065786365656473200062616c616e636545524332303a207472616e7366657220616d6f756e7420650078636565647320616c6c6f77616e636545524332303a206275726e2066726f006d20746865207a65726f206164647265737345524332303a207472616e73660065722066726f6d20746865207a65726f206164647265737345524332303a2000617070726f76652066726f6d20746865207a65726f20616464726573734552004332303a2064656372656173656420616c6c6f77616e63652062656c6f7720007a65726f45524332305061757361626c653a20746f6b656e207472616e7366006572207768696c6520706175736564a2646970667358221220e96342bec8f600c2bf72815a39998973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c63004300060c00330000000000000000000000001c5a77d9fa7ef466951b2f01f70024bca3a5820b630000000000000000000000001c5a77d9fa7ef466951b2f0100f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000095745544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a9002e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e7400229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a258d0017bf244c4df02d40343a7626a9d321e105808080808", encoded) - assert.Equal(t, "0x01b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf", batch.BlobVersionedHash.Hex()) + assert.Equal(t, uint64(2202), chunk5Gas) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6Gas, err := codecv1.EstimateChunkL1CommitGas(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(3745), chunk6Gas) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5Gas, err := codecv1.EstimateBatchL1CommitGas(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(163087), batch5Gas) } -func TestCodecV1Decode(t *testing.T) { - trace0 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - trace1 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk0 := &encoding.Chunk{Blocks: []*encoding.Block{trace0, trace1}} - daChunk0, err := NewDAChunk(chunk0, 0) - assert.NoError(t, err) - chunkBytes0 := daChunk0.Encode() +func TestCodecV1BatchSizeAndBlobSizeEstimation(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk1 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - daChunk1, err := NewDAChunk(chunk1, 0) + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2BatchBytesSize, chunk2BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) assert.NoError(t, err) - chunkBytes1 := daChunk1.Encode() - - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk0, chunk1}} - batch, err := NewDABatch(originalBatch) + assert.Equal(t, uint64(292), chunk2BatchBytesSize) + assert.Equal(t, uint64(302), chunk2BlobSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2BatchBytesSize, batch2BlobSize, err := codecv1.EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) assert.NoError(t, err) + assert.Equal(t, uint64(292), batch2BatchBytesSize) + assert.Equal(t, uint64(302), batch2BlobSize) - daChunksRawTx, err := DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3BatchBytesSize, chunk3BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) assert.NoError(t, err) - // assert number of chunks - assert.Equal(t, 2, len(daChunksRawTx)) - - // assert block in first chunk - assert.Equal(t, 2, len(daChunksRawTx[0].Blocks)) - assert.Equal(t, daChunk0.Blocks[0], daChunksRawTx[0].Blocks[0]) - assert.Equal(t, daChunk0.Blocks[1], daChunksRawTx[0].Blocks[1]) + assert.Equal(t, uint64(5743), chunk3BatchBytesSize) + assert.Equal(t, uint64(5929), chunk3BlobSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3BatchBytesSize, batch3BlobSize, err := codecv1.EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(5743), batch3BatchBytesSize) + assert.Equal(t, uint64(5929), batch3BlobSize) - // assert block in second chunk - assert.Equal(t, 2, len(daChunksRawTx[1].Blocks)) - daChunksRawTx[1].Blocks[0].BaseFee = nil - assert.Equal(t, daChunk1.Blocks[0], daChunksRawTx[1].Blocks[0]) - daChunksRawTx[1].Blocks[1].BaseFee = nil - assert.Equal(t, daChunk1.Blocks[1], daChunksRawTx[1].Blocks[1]) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4BatchBytesSize, chunk4BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(94), chunk4BatchBytesSize) + assert.Equal(t, uint64(98), chunk4BlobSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + blob4BatchBytesSize, batch4BlobSize, err := codecv1.EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(94), blob4BatchBytesSize) + assert.Equal(t, uint64(98), batch4BlobSize) - blob := batch.Blob() - err = DecodeTxsFromBlob(blob, daChunksRawTx) + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5BatchBytesSize, chunk5BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(5973), chunk5BatchBytesSize) + assert.Equal(t, uint64(6166), chunk5BlobSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6BatchBytesSize, chunk6BlobSize, err := codecv1.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(94), chunk6BatchBytesSize) + assert.Equal(t, uint64(98), chunk6BlobSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5BatchBytesSize, batch5BlobSize, err := codecv1.EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) assert.NoError(t, err) + assert.Equal(t, uint64(6005), batch5BatchBytesSize) + assert.Equal(t, uint64(6199), batch5BlobSize) +} - // assert transactions in first chunk - assert.Equal(t, 2, len(daChunksRawTx[0].Transactions)) - // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs - assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) - assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) +func TestCodecV1BatchL1MessagePopped(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) - assert.EqualValues(t, daChunk0.Transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) - assert.EqualValues(t, daChunk0.Transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) - // assert transactions in second chunk - assert.Equal(t, 2, len(daChunksRawTx[1].Transactions)) - // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs - assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) - assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) -} + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) -func TestCodecV1BatchChallenge(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0d8e67f882c61159aa99b04ec4f6f3d90cb95cbfba6efd56cefc55ca15b290ef", hex.EncodeToString(batch.z[:])) + assert.Equal(t, uint64(11), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(11), daBatch.(*daBatchV1).totalL1MessagePopped) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "32da228f4945de828954675f9396debb169bbf336ba93f849a8fc7fee1bc9e58", hex.EncodeToString(batch.z[:])) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "09a37ab43d41bcae3000c090a341e4661a8dc705b3c93d01b9eda3a0b3f8d4a8", hex.EncodeToString(batch.z[:])) + assert.Equal(t, uint64(5), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", hex.EncodeToString(batch.z[:])) + assert.Equal(t, uint64(10), daBatch.(*daBatchV1).l1MessagePopped) // skip 7, include 3 + assert.Equal(t, uint64(10), daBatch.(*daBatchV1).totalL1MessagePopped) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", hex.EncodeToString(batch.z[:])) + assert.Equal(t, uint64(257), daBatch.(*daBatchV1).l1MessagePopped) // skip 255, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", hex.EncodeToString(batch.z[:])) + assert.Equal(t, uint64(256), daBatch.(*daBatchV1).l1MessagePopped) // skip 254, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "55dac3baa818133cfdce0f97ddbb950e341399756d7b49bc34107dd65ecd3a4b", hex.EncodeToString(batch.z[:])) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0b14dce4abfdeb3a69a341f7db6b1e16162c20826e6d964a829e20f671030cab", hex.EncodeToString(batch.z[:])) + assert.Equal(t, uint64(32), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) } -func repeat(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) -} +func TestCodecV1BlobEncodingAndHashing(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) -func TestCodecV1BatchChallengeWithStandardTestCases(t *testing.T) { - nRowsData := 126914 + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(batch) + assert.NoError(t, err) + encoded := strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, // metadata + "00"+"0001"+"000000e6"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00"+"00"+"000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+ + // tx payload + "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1", encoded) + assert.Equal(t, common.HexToHash("0x01af944924715b48be6ce3c35aef7500a50e909265599bd2b3e544ac59fc7553"), daBatch.(*daBatchV1).blobVersionedHash) - for _, tc := range []struct { - chunks [][]string - expectedz string - expectedy string - }{ - // single empty chunk - {chunks: [][]string{{}}, expectedz: "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", expectedy: "304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd08"}, - // single non-empty chunk - {chunks: [][]string{{"0x010203"}}, expectedz: "1c1d4bd5153f877d799853080aba243f2c186dd6d6064eaefacfe715c92b6354", expectedy: "24e80ed99526b0d15ba46f7ec682f517576ddae68d5131e5d351f8bae06ea7d3"}, - // multiple empty chunks - {chunks: [][]string{{}, {}}, expectedz: "152c9ccfcc2884f9891f7adce2de110cf9f85bfd0e21f0933ae0636390a84d41", expectedy: "5f6f532676e25b49e2eae77513fbeca173a300b434c0a5e24fa554b68e27d582"}, - // multiple non-empty chunks - {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "62100f5381179ea7db7aa8fdedb0f7fc7b82730b75432d50ab41f80aeebe45a3", expectedy: "5b1f6e7a54907ddc06871853cf1f5d53bf2de0df7b61d0df84bc2c3fb80320cd"}, - // empty chunk followed by non-empty chunk - {chunks: [][]string{{}, {"0x010203"}}, expectedz: "2d94d241c4a2a8d8f02845ca40cfba344f3b42384af2045a75c82e725a184232", expectedy: "302416c177e9e7fe40c3bc4315066c117e27d246b0a33ef68cdda6dd333c485c"}, - // non-empty chunk followed by empty chunk - {chunks: [][]string{{"0x070809"}, {}}, expectedz: "7227567e3b1dbacb48a32bb85e4e99f73e4bd5620ea8cd4f5ac00a364c86af9c", expectedy: "2eb3dfd28362f35f562f779e749a555d2f1f87ddc716e95f04133d25189a391c"}, - // max number of chunks all empty - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "1128ac3e22ced6af85be4335e0d03a266946a7cade8047e7fc59d6c8be642321", expectedy: "2d9b16422ce17f328fd00c99349768f0cb0c8648115eb3bd9b7864617ba88059"}, - // max number of chunks all non-empty - {chunks: [][]string{{"0x0a"}, {"0x0a0b"}, {"0x0a0b0c"}, {"0x0a0b0c0d"}, {"0x0a0b0c0d0e"}, {"0x0a0b0c0d0e0f"}, {"0x0a0b0c0d0e0f10"}, {"0x0a0b0c0d0e0f1011"}, {"0x0a0b0c0d0e0f101112"}, {"0x0a0b0c0d0e0f10111213"}, {"0x0a0b0c0d0e0f1011121314"}, {"0x0a0b0c0d0e0f101112131415"}, {"0x0a0b0c0d0e0f10111213141516"}, {"0x0a0b0c0d0e0f1011121314151617"}, {"0x0a0b0c0d0e0f101112131415161718"}}, expectedz: "1a4025a3d74e70b511007dd55a2e252478c48054c6383285e8a176f33d99853b", expectedy: "12071ac2571c11220432a27b8be549392892e9baf4c654748ca206def3843940"}, - // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "72714cc4a0ca75cee2d543b1f958e3d3dd59ac7df0d9d5617d8117b65295a5f2", expectedy: "4ebb690362bcbc42321309c210c99f2ebdb53b3fcf7cf3b17b78f6cfd1203ed3"}, - // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "70eb5b4db503e59413238eef451871c5d12f2bb96c8b96ceca012f4ca0114727", expectedy: "568d0aaf280ec83f9c81ed2d80ecbdf199bd72dafb8a350007d37ea82997e455"}, - // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "03db68ae16ee88489d52db19e6111b25630c5f23ad7cd14530aacf0cd231d476", expectedy: "24527d0b0e93b3dec0060c7b128975a8088b3104d3a297dc807ab43862a77a1a"}, - // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "677670193f73db499cede572bcb55677f0d2f13d690f9a820bd00bf584c3c241", expectedy: "1d85677f172dbdf4ad3094a17deeb1df4d7d2b7f35ecea44aebffa757811a268"}, - // max number of chunks but last is empty - {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "22935042dfe7df771b02c1f5cababfe508869e8f6339dabe25a8a32e37728bb0", expectedy: "48ca66fb5a094401728c3a6a517ffbd72c4d4d9a8c907e2d2f1320812f4d856f"}, - } { - chunks := []*encoding.Chunk{} + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "000001000016310000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002f9162d82cf5502843b9b0a17843b9b0a17831197e28080b915d26080604000523480156200001157600080fd5b50604051620014b2380380620014b283390081810160405260a08110156200003757600080fd5b8151602083015160408000850180519151939592948301929184640100000000821115620000635760000080fd5b9083019060208201858111156200007957600080fd5b8251640100000000008111828201881017156200009457600080fd5b8252508151602091820100929091019080838360005b83811015620000c357818101518382015260200100620000a9565b50505050905090810190601f168015620000f1578082038051006001836020036101000a031916815260200191505b5060405260200180516000405193929190846401000000008211156200011557600080fd5b908301906000208201858111156200012b57600080fd5b8251640100000000811182820188001017156200014657600080fd5b8252508151602091820192909101908083830060005b83811015620001755781810151838201526020016200015b565b5050005050905090810190601f168015620001a3578082038051600183602003610100000a031916815260200191505b506040526020908101518551909350859250008491620001c8916003918501906200026b565b508051620001de906004906000208401906200026b565b50506005805461ff001960ff199091166012171690005550600680546001600160a01b038088166001600160a01b031992831617900092556007805492871692909116919091179055620002308162000255565b5000506005805462010000600160b01b031916336201000002179055506200030700915050565b6005805460ff191660ff92909216919091179055565b82805460000181600116156101000203166002900490600052602060002090601f01602000900481019282601f10620002ae57805160ff1916838001178555620002de56005b82800160010185558215620002de579182015b82811115620002de57825100825591602001919060010190620002c1565b50620002ec929150620002f056005b5090565b5b80821115620002ec5760008155600101620002f1565b61119b0080620003176000396000f3fe608060405234801561001057600080fd5b50600004361061010b5760003560e01c80635c975abb116100a257806395d89b41110061007157806395d89b41146103015780639dc29fac14610309578063a457c200d714610335578063a9059cbb14610361578063dd62ed3e1461038d5761010b00565b80635c975abb1461029d57806370a08231146102a55780638456cb5914006102cb5780638e50817a146102d35761010b565b8063313ce567116100de57008063313ce5671461021d578063395093511461023b5780633f4ba83a146102006757806340c10f19146102715761010b565b806306fdde031461011057806300095ea7b31461018d57806318160ddd146101cd57806323b872dd146101e757005b600080fd5b6101186103bb565b604080516020808252835181830152835100919283929083019185019080838360005b838110156101525781810151838200015260200161013a565b50505050905090810190601f16801561017f578082000380516001836020036101000a031916815260200191505b50925050506040005180910390f35b6101b9600480360360408110156101a357600080fd5b50600001600160a01b038135169060200135610451565b60408051911515825251900081900360200190f35b6101d561046e565b6040805191825251908190036020000190f35b6101b9600480360360608110156101fd57600080fd5b50600160010060a01b03813581169160208101359091169060400135610474565b610225610004fb565b6040805160ff9092168252519081900360200190f35b6101b9600400803603604081101561025157600080fd5b506001600160a01b03813516906000200135610504565b61026f610552565b005b61026f600480360360408110150061028757600080fd5b506001600160a01b0381351690602001356105a9565b006101b9610654565b6101d5600480360360208110156102bb57600080fd5b5000356001600160a01b0316610662565b61026f61067d565b61026f60048036030060408110156102e957600080fd5b506001600160a01b0381358116916020010035166106d2565b610118610757565b61026f6004803603604081101561031f0057600080fd5b506001600160a01b0381351690602001356107b8565b6101b9006004803603604081101561034b57600080fd5b506001600160a01b0381351600906020013561085f565b6101b96004803603604081101561037757600080fd005b506001600160a01b0381351690602001356108c7565b6101d560048036030060408110156103a357600080fd5b506001600160a01b0381358116916020010035166108db565b60038054604080516020601f600260001961010060018816001502019095169490940493840181900481028201810190925282815260609300909290918301828280156104475780601f1061041c5761010080835404028300529160200191610447565b820191906000526020600020905b8154815290600001019060200180831161042a57829003601f168201915b505050505090509000565b600061046561045e610906565b848461090a565b50600192915050565b0060025490565b60006104818484846109f6565b6104f18461048d610906565b006104ec8560405180606001604052806028815260200161108560289139600100600160a01b038a166000908152600160205260408120906104cb610906565b006001600160a01b031681526020810191909152604001600020549190610b5100565b61090a565b5060019392505050565b60055460ff1690565b600061046500610511610906565b846104ec8560016000610522610906565b6001600160a0001b03908116825260208083019390935260409182016000908120918c16815200925290205490610be8565b6007546001600160a01b0316331461059f57604000805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0818005b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a7610c0049565b565b600554610100900460ff16156105f9576040805162461bcd60e5001b815260206004820152601060248201526f14185d5cd8589b194e881c185d005cd95960821b604482015290519081900360640190fd5b600654600160016000a01b03163314610646576040805162461bcd60e51b81526020600482015260000b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908190000360640190fd5b6106508282610ced565b5050565b600554610100900460ff001690565b6001600160a01b031660009081526020819052604090205490565b006007546001600160a01b031633146106ca576040805162461bcd60e51b81520060206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b60440082015290519081900360640190fd5b6105a7610ddd565b600554620100009000046001600160a01b03163314610726576040805162461bcd60e51b81526020006004820152600c60248201526b6f6e6c7920466163746f727960a01b60448200015290519081900360640190fd5b600780546001600160a01b03928316600100600160a01b0319918216179091556006805493909216921691909117905556005b60048054604080516020601f600260001961010060018816150201909516009490940493840181900481028201810190925282815260609390929091830100828280156104475780601f1061041c5761010080835404028352916020019100610447565b600554610100900460ff1615610808576040805162461bcd60e5001b815260206004820152601060248201526f14185d5cd8589b194e881c185d005cd95960821b604482015290519081900360640190fd5b600654600160016000a01b03163314610855576040805162461bcd60e51b81526020600482015260000b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908190000360640190fd5b6106508282610e65565b600061046561086c610906565b84006104ec85604051806060016040528060258152602001611117602591396001006000610896610906565b6001600160a01b0390811682526020808301939093005260409182016000908120918d16815292529020549190610b51565b6000610004656108d4610906565b84846109f6565b6001600160a01b0391821660009000815260016020908152604080832093909416825291909152205490565b339000565b6001600160a01b03831661094f5760405162461bcd60e51b8152600401008080602001828103825260248152602001806110f3602491396040019150500060405180910390fd5b6001600160a01b0382166109945760405162461bcd6000e51b815260040180806020018281038252602281526020018061103d602291003960400191505060405180910390fd5b6001600160a01b038084166000818100526001602090815260408083209487168084529482529182902085905581510085815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b00200ac8c7c3b9259281900390910190a3505050565b6001600160a01b03831600610a3b5760405162461bcd60e51b8152600401808060200182810382526025008152602001806110ce6025913960400191505060405180910390fd5b600160000160a01b038216610a805760405162461bcd60e51b815260040180806020010082810382526023815260200180610ff8602391396040019150506040518091000390fd5b610a8b838383610f61565b610ac8816040518060600160405280600026815260200161105f602691396001600160a01b038616600090815260208100905260409020549190610b51565b6001600160a01b03808516600090815260002081905260408082209390935590841681522054610af79082610be8565b600001600160a01b03808416600081815260208181526040918290209490945580005185815290519193928716927fddf252ad1be2c89b69c2b068fc378daa952b00a7f163c4a11628f55a4df523b3ef92918290030190a3505050565b6000818400841115610be05760405162461bcd60e51b8152600401808060200182810382005283818151815260200191508051906020019080838360005b83811015610b00a5578181015183820152602001610b8d565b50505050905090810190601f16008015610bd25780820380516001836020036101000a03191681526020019150005b509250505060405180910390fd5b505050900390565b60008282018381100015610c42576040805162461bcd60e51b815260206004820152601b6024820100527f536166654d6174683a206164646974696f6e206f766572666c6f77000000000000604482015290519081900360640190fd5b9392505050565b60055461000100900460ff16610c9c576040805162461bcd60e51b81526020600482015200601460248201527314185d5cd8589b194e881b9bdd081c185d5cd95960621b00604482015290519081900360640190fd5b6005805461ff00191690557f5db900ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa61000cd0610906565b604080516001600160a01b03909216825251908190036020000190a1565b6001600160a01b038216610d48576040805162461bcd60e51b81005260206004820152601f60248201527f45524332303a206d696e7420746f2000746865207a65726f20616464726573730060448201529051908190036064010090fd5b610d5460008383610f61565b600254610d619082610be8565b600255006001600160a01b038216600090815260208190526040902054610d87908261000be8565b6001600160a01b038316600081815260208181526040808320949000945583518581529351929391927fddf252ad1be2c89b69c2b068fc378daa95002ba7f163c4a11628f55a4df523b3ef9281900390910190a35050565b60055400610100900460ff1615610e2d576040805162461bcd60e51b81526020600482000152601060248201526f14185d5cd8589b194e881c185d5cd95960821b60440082015290519081900360640190fd5b6005805461ff0019166101001790557f0062e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc544b05a20058610cd0610906565b6001600160a01b038216610eaa5760405162461bcd6000e51b81526004018080602001828103825260218152602001806110ad602191003960400191505060405180910390fd5b610eb682600083610f61565b610ef3008160405180606001604052806022815260200161101b60229139600160016000a01b0385166000908152602081905260409020549190610b51565b600160010060a01b038316600090815260208190526040902055600254610f199082610f00b5565b6002556040805182815290516000916001600160a01b038516917fdd00f252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef009181900360200190a35050565b610f6c838383610fb0565b610f7461065456005b15610fb05760405162461bcd60e51b81526004018080602001828103825200602a81526020018061113c602a913960400191505060405180910390fd5b50005050565b6000610c4283836040518060400160405280601e81526020017f53006166654d6174683a207375627472616374696f6e206f766572666c6f77000000815250610b5156fe45524332303a207472616e7366657220746f2074686520007a65726f206164647265737345524332303a206275726e20616d6f756e742000657863656564732062616c616e636545524332303a20617070726f76652074006f20746865207a65726f206164647265737345524332303a207472616e736600657220616d6f756e7420657863656564732062616c616e636545524332303a00207472616e7366657220616d6f756e74206578636565647320616c6c6f7761006e636545524332303a206275726e2066726f6d20746865207a65726f20616400647265737345524332303a207472616e736665722066726f6d20746865207a0065726f206164647265737345524332303a20617070726f76652066726f6d2000746865207a65726f206164647265737345524332303a206465637265617365006420616c6c6f77616e63652062656c6f77207a65726f4552433230506175730061626c653a20746f6b656e207472616e73666572207768696c652070617573006564a2646970667358221220e96342bec8f6c2bf72815a39998973b64c3bed0057770f402e9a7b7eeda0265d4c64736f6c634300060c0033000000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b63000000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000000009570045544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e173700f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bd00a52095d44b8a9af7", encoded) + assert.Equal(t, common.HexToHash("0x010c54fa675ed1b78f269827177019b0814a4ac4d269c68037e2c41cf08f9411"), daBatch.(*daBatchV1).blobVersionedHash) - for _, c := range tc.chunks { - block := &encoding.Block{Transactions: []*types.TransactionData{}} + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "0000010000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e10580808080008", encoded) + assert.Equal(t, common.HexToHash("0x01ea66c4de196d36e2c3a5d7c0045100b9e46ef65be8f7a921ef20e6f2e99ebd"), daBatch.(*daBatchV1).blobVersionedHash) - for _, data := range c { - tx := &types.TransactionData{Type: 0xff, Data: data} - block.Transactions = append(block.Transactions, tx) - } + // this batch only contains L1 txs + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "000001", encoded) + assert.Equal(t, common.HexToHash("0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6"), daBatch.(*daBatchV1).blobVersionedHash) - chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} - chunks = append(chunks, chunk) - } + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "000001", encoded) + assert.Equal(t, common.HexToHash("0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6"), daBatch.(*daBatchV1).blobVersionedHash) - b, _, z, err := constructBlobPayload(chunks, true /* use mock */) - assert.NoError(t, err) - actualZ := hex.EncodeToString(z[:]) - assert.Equal(t, tc.expectedz, actualZ) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "000001", encoded) + assert.Equal(t, common.HexToHash("0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6"), daBatch.(*daBatchV1).blobVersionedHash) - _, y, err := kzg4844.ComputeProof(b, *z) - assert.NoError(t, err) - actualY := hex.EncodeToString(y[:]) - assert.Equal(t, tc.expectedy, actualY) + // 15 chunks + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv1.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, // metadata + "00"+"000f"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"00"+"00"+"0000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+ + // tx payload + "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea003f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ece00a0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86d00f514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288b00baf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf000d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f0010c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f002b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b009aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d0002c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b00219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d199600b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a120940100bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000800083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393e00b095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f87938000aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b600e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae9900c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cb00d19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8007101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce941100ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b002cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec005bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e830700a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de10200513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c57008fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e900a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea000f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f7730016a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6e00ed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2ade00ceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7b00a5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd7300e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9a00ec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d0200c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c400c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808300019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a0041e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfa00ec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc060015b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f03998586600d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e0081065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f8710080843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca2008a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e9000cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c004d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a100209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e260004393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f00879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6a00cb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab0007ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df51400a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf4002a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d6900ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c100be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b460004bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec002e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c700e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b001de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b500243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae600bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000808301009ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb09500b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac100c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb009e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67a00a78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19f00eacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710100843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a00152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cac00e28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd400aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a1200094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d0056548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd700f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03f00b2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e008307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e1004af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bd00e27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483590096fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b100bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d825006f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e8106005f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f8718084003b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a15002d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc3002b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d190096b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a120940001bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e260439003eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f87930080aac1c09c6eed32f1", encoded) + assert.Equal(t, common.HexToHash("0x01521b20f341588dea5978efb00d7b077a986598a6001fc2e5859d77f3ffc284"), daBatch.(*daBatchV1).blobVersionedHash) - } + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "0000020000173700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a17843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380620014b2833981810160405260a0811015620000375760000080fd5b81516020830151604080850180519151939592948301929184640100000000008211156200006357600080fd5b908301906020820185811115620000007957600080fd5b8251640100000000811182820188101715620000945760000080fd5b82525081516020918201929091019080838360005b8381101562000000c3578181015183820152602001620000a9565b50505050905090810190601f00168015620000f15780820380516001836020036101000a03191681526020010091505b5060405260200180516040519392919084640100000000821115620000011557600080fd5b9083019060208201858111156200012b57600080fd5b8200516401000000008111828201881017156200014657600080fd5b8252508151006020918201929091019080838360005b8381101562000175578181015183820001526020016200015b565b50505050905090810190601f168015620001a3570080820380516001836020036101000a031916815260200191505b506040526000209081015185519093508592508491620001c8916003918501906200026b56005b508051620001de9060049060208401906200026b565b50506005805461ff00001960ff1990911660121716905550600680546001600160a01b03808816600001600160a01b031992831617909255600780549287169290911691909117900055620002308162000255565b50506005805462010000600160b01b031916330062010000021790555062000307915050565b6005805460ff191660ff9290920016919091179055565b82805460018160011615610100020316600290049060000052602060002090601f016020900481019282601f10620002ae57805160ff001916838001178555620002de565b82800160010185558215620002de57918200015b82811115620002de578251825591602001919060010190620002c1565b0050620002ec929150620002f0565b5090565b5b80821115620002ec576000810055600101620002f1565b61119b80620003176000396000f3fe60806040523400801561001057600080fd5b506004361061010b5760003560e01c80635c975a00bb116100a257806395d89b411161007157806395d89b41146103015780639d00c29fac14610309578063a457c2d714610335578063a9059cbb1461036157800063dd62ed3e1461038d5761010b565b80635c975abb1461029d57806370a0820031146102a55780638456cb59146102cb5780638e50817a146102d35761010b00565b8063313ce567116100de578063313ce5671461021d57806339509351140061023b5780633f4ba83a1461026757806340c10f19146102715761010b565b00806306fdde0314610110578063095ea7b31461018d57806318160ddd14610100cd57806323b872dd146101e7575b600080fd5b6101186103bb565b604080510060208082528351818301528351919283929083019185019080838360005b830081101561015257818101518382015260200161013a565b5050505090509081000190601f16801561017f5780820380516001836020036101000a03191681520060200191505b509250505060405180910390f35b6101b960048036036040810010156101a357600080fd5b506001600160a01b03813516906020013561045100565b604080519115158252519081900360200190f35b6101d561046e565b6000408051918252519081900360200190f35b6101b960048036036060811015610001fd57600080fd5b506001600160a01b0381358116916020810135909116900060400135610474565b6102256104fb565b6040805160ff909216825251908100900360200190f35b6101b96004803603604081101561025157600080fd5b50006001600160a01b038135169060200135610504565b61026f610552565b005b0061026f6004803603604081101561028757600080fd5b506001600160a01b030081351690602001356105a9565b6101b9610654565b6101d560048036036020008110156102bb57600080fd5b50356001600160a01b0316610662565b61026f0061067d565b61026f600480360360408110156102e957600080fd5b50600160000160a01b03813581169160200135166106d2565b610118610757565b61026f006004803603604081101561031f57600080fd5b506001600160a01b038135160090602001356107b8565b6101b96004803603604081101561034b57600080fd005b506001600160a01b03813516906020013561085f565b6101b9600480360300604081101561037757600080fd5b506001600160a01b038135169060200135006108c7565b6101d5600480360360408110156103a357600080fd5b50600160000160a01b03813581169160200135166108db565b6003805460408051602060001f6002600019610100600188161502019095169490940493840181900481020082018101909252828152606093909290918301828280156104475780601f100061041c57610100808354040283529160200191610447565b82019190600052006020600020905b81548152906001019060200180831161042a57829003601f00168201915b5050505050905090565b600061046561045e610906565b84846100090a565b50600192915050565b60025490565b60006104818484846109f656005b6104f18461048d610906565b6104ec8560405180606001604052806028810052602001611085602891396001600160a01b038a16600090815260016020520060408120906104cb610906565b6001600160a01b03168152602081019190910052604001600020549190610b51565b61090a565b5060019392505050565b6000055460ff1690565b6000610465610511610906565b846104ec856001600061000522610906565b6001600160a01b0390811682526020808301939093526040009182016000908120918c168152925290205490610be8565b600754600160010060a01b0316331461059f576040805162461bcd60e51b81526020600482015200600b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908100900360640190fd5b6105a7610c49565b565b600554610100900460ff1615610005f9576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610646576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610ced565b500050565b600554610100900460ff1690565b6001600160a01b03166000908152006020819052604090205490565b6007546001600160a01b031633146106ca57006040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0008185b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a700610ddd565b6005546201000090046001600160a01b0316331461072657604000805162461bcd60e51b815260206004820152600c60248201526b6f6e6c792000466163746f727960a01b604482015290519081900360640190fd5b60078054006001600160a01b039283166001600160a01b0319918216179091556006805400939092169216919091179055565b60048054604080516020601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c5761010000808354040283529160200191610447565b600554610100900460ff161561000808576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610855576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610e65565b60000061046561086c610906565b846104ec85604051806060016040528060258100526020016111176025913960016000610896610906565b6001600160a01b0300908116825260208083019390935260409182016000908120918d1681529252009020549190610b51565b60006104656108d4610906565b84846109f6565b600001600160a01b0391821660009081526001602090815260408083209390941600825291909152205490565b3390565b6001600160a01b03831661094f576040005162461bcd60e51b8152600401808060200182810382526024815260200180006110f36024913960400191505060405180910390fd5b6001600160a01b038200166109945760405162461bcd60e51b81526004018080602001828103825260002281526020018061103d6022913960400191505060405180910390fd5b600100600160a01b0380841660008181526001602090815260408083209487168084005294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f7142007d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9259281900390910190a350500050565b6001600160a01b038316610a3b5760405162461bcd60e51b8152600400018080602001828103825260258152602001806110ce602591396040019150005060405180910390fd5b6001600160a01b038216610a805760405162461bcd0060e51b8152600401808060200182810382526023815260200180610ff8602300913960400191505060405180910390fd5b610a8b838383610f61565b610ac8008160405180606001604052806026815260200161105f60269139600160016000a01b0386166000908152602081905260409020549190610b51565b600160010060a01b03808516600090815260208190526040808220939093559084168152002054610af79082610be8565b6001600160a01b03808416600081815260208100815260409182902094909455805185815290519193928716927fddf252ad1b00e2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9291829000030190a3505050565b60008184841115610be05760405162461bcd60e51b810052600401808060200182810382528381815181526020019150805190602001009080838360005b83811015610ba5578181015183820152602001610b8d565b0050505050905090810190601f168015610bd2578082038051600183602003610001000a031916815260200191505b509250505060405180910390fd5b50505000900390565b600082820183811015610c42576040805162461bcd60e51b81520060206004820152601b60248201527f536166654d6174683a20616464697469006f6e206f766572666c6f77000000000060448201529051908190036064019000fd5b9392505050565b600554610100900460ff16610c9c576040805162461b00cd60e51b815260206004820152601460248201527314185d5cd8589b194e88001b9bdd081c185d5cd95960621b604482015290519081900360640190fd5b600005805461ff00191690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a500e8aa4e537bd38aeae4b073aa610cd0610906565b604080516001600160a01b00039092168252519081900360200190a1565b6001600160a01b038216610d4800576040805162461bcd60e51b815260206004820152601f60248201527f4552004332303a206d696e7420746f20746865207a65726f20616464726573730060004482015290519081900360640190fd5b610d5460008383610f61565b60025400610d619082610be8565b6002556001600160a01b03821660009081526020810090526040902054610d879082610be8565b6001600160a01b038316600081810052602081815260408083209490945583518581529351929391927fddf252ad001be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef928190000390910190a35050565b600554610100900460ff1615610e2d57604080516200461bcd60e51b815260206004820152601060248201526f14185d5cd8589b19004e881c185d5cd95960821b604482015290519081900360640190fd5b600580005461ff0019166101001790557f62e78cea01bee320cd4e420270b5ea74000d0011b0c9f74754ebdbfc544b05a258610cd0610906565b6001600160a01b03820016610eaa5760405162461bcd60e51b8152600401808060200182810382526000218152602001806110ad6021913960400191505060405180910390fd5b610e00b682600083610f61565b610ef3816040518060600160405280602281526020000161101b602291396001600160a01b038516600090815260208190526040900020549190610b51565b6001600160a01b03831660009081526020819052604000902055600254610f199082610fb5565b600255604080518281529051600091006001600160a01b038516917fddf252ad1be2c89b69c2b068fc378daa952ba700f163c4a11628f55a4df523b3ef9181900360200190a35050565b610f6c83830083610fb0565b610f74610654565b15610fb05760405162461bcd60e51b81520060040180806020018281038252602a81526020018061113c602a91396040010091505060405180910390fd5b505050565b6000610c428383604051806040010060405280601e81526020017f536166654d6174683a20737562747261637469006f6e206f766572666c6f770000815250610b5156fe45524332303a20747261006e7366657220746f20746865207a65726f206164647265737345524332303a00206275726e20616d6f756e7420657863656564732062616c616e63654552430032303a20617070726f766520746f20746865207a65726f20616464726573730045524332303a207472616e7366657220616d6f756e742065786365656473200062616c616e636545524332303a207472616e7366657220616d6f756e7420650078636565647320616c6c6f77616e636545524332303a206275726e2066726f006d20746865207a65726f206164647265737345524332303a207472616e73660065722066726f6d20746865207a65726f206164647265737345524332303a2000617070726f76652066726f6d20746865207a65726f20616464726573734552004332303a2064656372656173656420616c6c6f77616e63652062656c6f7720007a65726f45524332305061757361626c653a20746f6b656e207472616e7366006572207768696c6520706175736564a2646970667358221220e96342bec8f600c2bf72815a39998973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c63004300060c00330000000000000000000000001c5a77d9fa7ef466951b2f01f70024bca3a5820b630000000000000000000000001c5a77d9fa7ef466951b2f0100f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000095745544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a9002e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e7400229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a258d0017bf244c4df02d40343a7626a9d321e105808080808", encoded) + assert.Equal(t, common.HexToHash("0x01b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf"), daBatch.(*daBatchV1).blobVersionedHash) } -func TestCodecV1BatchBlobDataProof(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) +func TestCodecV1BatchBlobDataProofForPointEvaluation(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv1.NewDABatch(batch) assert.NoError(t, err) - verifyData, err := batch.BlobDataProof() + verifyData, err := daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) assert.Equal(t, "0d8e67f882c61159aa99b04ec4f6f3d90cb95cbfba6efd56cefc55ca15b290ef423dc493f1dd7c9fbecdffa021ca4649b13e8d72231487034ec6b27e155ecfd7b44a38af1f9a6c70cd3ccfbf71968f447aa566bbafb0bbc566fc9eeb42973484802635a1bbd8305d34a46693331bf607b38542ec811c92d86ff6f3319de06ee60c42655278ccf874f3615f450de730895276828b73db03c553b0bc7e5474a5e0", hex.EncodeToString(verifyData)) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() + verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) assert.Equal(t, "32da228f4945de828954675f9396debb169bbf336ba93f849a8fc7fee1bc9e5821975f318babe50be728f9b52754d5ce2caa2ba82ba35b5888af1c5f28d23206b8aab265dc352e352807a298f7bb99d432c7cd543e63158cbdb8fbf99f3182a71af35ccbed2693c5e0bc5be38d565e868e0c6fe7bd39baa5ee6339cd334a18af7c680d24e825262499e83b31633b13a9ee89813fae8441630c82bc9dce3f1e07", hex.EncodeToString(verifyData)) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() + verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) assert.Equal(t, "09a37ab43d41bcae3000c090a341e4661a8dc705b3c93d01b9eda3a0b3f8d4a8088a01e54e3565d2e91ce6afbadf479330847d9106737875303ce17f17c48722afd4e1c55a17dbdf8390b5736158afe238d82f8b696669ba47015fcdfd4d1becd0ff7a47f8f379a4ac8d1741e2d67624aee03a0f7cdb7807bc7e0b9fb20bc299af2a35e38cda816708b40f2f18db491e14a0f5d9cfe2f4c12e4ca1a219484f17", hex.EncodeToString(verifyData)) - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() + verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() + verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() + verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() + verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) assert.Equal(t, "55dac3baa818133cfdce0f97ddbb950e341399756d7b49bc34107dd65ecd3a4b54d28f1479467d8b97fb99f5257d3e5d63a81cb2d60e3564fe6ec6066a311c119743324c70e20042de6480f115b215fbba3472a8b994303a99576c1244aa4aec22fdfe6c74ec728aa28a9eb3812bc932a0b603cc94be2007d4b3b17af06b4fb30caf0e574d5abcfc5654079e65154679afad75844396082a7200a4e82462aeed", hex.EncodeToString(verifyData)) - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv1.NewDABatch(batch) assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() + verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) assert.Equal(t, "0b14dce4abfdeb3a69a341f7db6b1e16162c20826e6d964a829e20f671030cab35b73ddb4a78fc4a8540f1d8259512c46e606a701e7ef7742e38cc4562ef53b983bee97f95fbf2d789a8e0fb365c26e141d6a31e43403b4a469d1723128f6d5de5c54e913e143feede32d0af9b6fd6fda28e5610ca6b185d6ac30b53bd83d6366fccb1956daafa90ff6b504a966b119ebb45cb3f7085b7c1d622ee1ad27fcff9", hex.EncodeToString(verifyData)) } -func TestCodecV1BatchSkipBitmap(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000003ff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 - assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000001fffffffff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000000000000000", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000001dd", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 - assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000001ffffffbff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 42, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000000007fffffe", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 32, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) -} +func TestCodecV1DecodeDAChunksRawTx(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) -func TestCodecV1ChunkAndBatchCommitBlobSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BlobSize, err := EstimateChunkL1CommitBlobSize(chunk2) + block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk0 := &Chunk{Blocks: []*Block{block0, block1}} + daChunk0, err := codecv1.NewDAChunk(chunk0, 0) assert.NoError(t, err) - assert.Equal(t, uint64(302), chunk2BlobSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BlobSize, err := EstimateBatchL1CommitBlobSize(batch2) + chunkBytes0, err := daChunk0.Encode() assert.NoError(t, err) - assert.Equal(t, uint64(302), batch2BlobSize) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BlobSize, err := EstimateChunkL1CommitBlobSize(chunk3) + block2 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + daChunk1, err := codecv1.NewDAChunk(chunk1, 0) assert.NoError(t, err) - assert.Equal(t, uint64(5929), chunk3BlobSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BlobSize, err := EstimateBatchL1CommitBlobSize(batch3) + chunkBytes1, err := daChunk1.Encode() assert.NoError(t, err) - assert.Equal(t, uint64(5929), batch3BlobSize) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BlobSize, err := EstimateChunkL1CommitBlobSize(chunk4) - assert.NoError(t, err) - assert.Equal(t, uint64(98), chunk4BlobSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4BlobSize, err := EstimateBatchL1CommitBlobSize(batch4) + batch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} + daBatch, err := codecv1.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, uint64(98), batch4BlobSize) - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BlobSize, err := EstimateChunkL1CommitBlobSize(chunk5) + daChunksRawTx, err := codecv1.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) assert.NoError(t, err) - assert.Equal(t, uint64(6166), chunk5BlobSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BlobSize, err := EstimateChunkL1CommitBlobSize(chunk6) - assert.NoError(t, err) - assert.Equal(t, uint64(98), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BlobSize, err := EstimateBatchL1CommitBlobSize(batch5) + // assert number of chunks + assert.Equal(t, 2, len(daChunksRawTx)) + + // assert block in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Blocks)) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[0], daChunksRawTx[0].Blocks[0]) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[1], daChunksRawTx[0].Blocks[1]) + + // assert block in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Blocks)) + daChunksRawTx[1].Blocks[0].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[0].(*daBlockV0), daChunksRawTx[1].Blocks[0]) + daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) + + blob := daBatch.Blob() + err = codecv1.DecodeTxsFromBlob(blob, daChunksRawTx) assert.NoError(t, err) - assert.Equal(t, uint64(6199), batch5BlobSize) + + // assert transactions in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) + assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) + + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) + + // assert transactions in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) + assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) } -func TestCodecV1ChunkAndBatchCommitCalldataSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) - assert.Equal(t, uint64(60), chunk2CalldataSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) - assert.Equal(t, uint64(60), batch2CalldataSize) +func TestCodecV1BatchStandardTestCases(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) - assert.Equal(t, uint64(60), chunk3CalldataSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) - assert.Equal(t, uint64(60), batch3CalldataSize) + // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. + nRowsData := maxEffectiveBlobBytes - (codecv1.MaxNumChunksPerBatch()*4 + 2) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) - assert.Equal(t, uint64(60), chunk4CalldataSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4BlobSize := EstimateBatchL1CommitCalldataSize(batch4) - assert.Equal(t, uint64(60), batch4BlobSize) + repeat := func(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) + } - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) - assert.Equal(t, uint64(120), chunk5CalldataSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BlobSize := EstimateChunkL1CommitCalldataSize(chunk6) - assert.Equal(t, uint64(60), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) - assert.Equal(t, uint64(180), batch5CalldataSize) -} + for _, tc := range []struct { + chunks [][]string + expectedz string + expectedy string + expectedBlobVersionedHash string + expectedBatchHash string + }{ + // single empty chunk + {chunks: [][]string{{}}, expectedz: "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", expectedy: "304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd08", expectedBlobVersionedHash: "01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6", expectedBatchHash: "7d09040c00525af4aff851ba50556d4bc25a28a2bee04d4d02837fdc31da8e5a"}, + // single non-empty chunk + {chunks: [][]string{{"0x010203"}}, expectedz: "1c1d4bd5153f877d799853080aba243f2c186dd6d6064eaefacfe715c92b6354", expectedy: "24e80ed99526b0d15ba46f7ec682f517576ddae68d5131e5d351f8bae06ea7d3", expectedBlobVersionedHash: "01c57cf97209ce41aaca340099e8eb80984bc54a4f780013cfb9f81bc0641d46", expectedBatchHash: "948fe7a7665c79b975d0f73d47a60150f5f2637fe229f46a0bbacdb282c4359b"}, + // multiple empty chunks + {chunks: [][]string{{}, {}}, expectedz: "152c9ccfcc2884f9891f7adce2de110cf9f85bfd0e21f0933ae0636390a84d41", expectedy: "5f6f532676e25b49e2eae77513fbeca173a300b434c0a5e24fa554b68e27d582", expectedBlobVersionedHash: "01f2d2978e268e82902df85e773ba3ce0bfbd47067595d876378f062a76c9645", expectedBatchHash: "73a883480442f4ad822d7d8a5660f91ec1b30c2837594175861453ed2aa20c43"}, + // multiple non-empty chunks + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "62100f5381179ea7db7aa8fdedb0f7fc7b82730b75432d50ab41f80aeebe45a3", expectedy: "5b1f6e7a54907ddc06871853cf1f5d53bf2de0df7b61d0df84bc2c3fb80320cd", expectedBlobVersionedHash: "0103e951d9f758f8c1d073e4dc80a1813c4e0f12454e59d5cf9459baad57a120", expectedBatchHash: "ec0f1219d073a3a06deabf28bbf1dd94c483334a6763a8ae171debfd70c28dba"}, + // empty chunk followed by non-empty chunk + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "2d94d241c4a2a8d8f02845ca40cfba344f3b42384af2045a75c82e725a184232", expectedy: "302416c177e9e7fe40c3bc4315066c117e27d246b0a33ef68cdda6dd333c485c", expectedBlobVersionedHash: "0197b715c8f9f8c8e295fdd390ee9a629118432f72067398695d9df3c840b7b0", expectedBatchHash: "e84c347c69741d51c26adcffa0e0d23a2621989f5442b6f91a5b2ef409844b4d"}, + // non-empty chunk followed by empty chunk + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "7227567e3b1dbacb48a32bb85e4e99f73e4bd5620ea8cd4f5ac00a364c86af9c", expectedy: "2eb3dfd28362f35f562f779e749a555d2f1f87ddc716e95f04133d25189a391c", expectedBlobVersionedHash: "01997280b92d3a2b0e6616a57f931e2876c602cf6401617390ad9f6c044c7f9a", expectedBatchHash: "91c1bbb91dda9c5b2a4b11df8433dc62e2690088cb9210a56fe1efa4132fc122"}, + // max number of chunks all empty + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "1128ac3e22ced6af85be4335e0d03a266946a7cade8047e7fc59d6c8be642321", expectedy: "2d9b16422ce17f328fd00c99349768f0cb0c8648115eb3bd9b7864617ba88059", expectedBlobVersionedHash: "011747bb3b64aaa020e628df02b5dde642b8eefe2acd3bd8768d264b0b230fe2", expectedBatchHash: "7ed0e502c8be58184a6fad9ff83efb2aa4d42632daf00a6527718e041098ece9"}, + // max number of chunks all non-empty + {chunks: [][]string{{"0x0a"}, {"0x0a0b"}, {"0x0a0b0c"}, {"0x0a0b0c0d"}, {"0x0a0b0c0d0e"}, {"0x0a0b0c0d0e0f"}, {"0x0a0b0c0d0e0f10"}, {"0x0a0b0c0d0e0f1011"}, {"0x0a0b0c0d0e0f101112"}, {"0x0a0b0c0d0e0f10111213"}, {"0x0a0b0c0d0e0f1011121314"}, {"0x0a0b0c0d0e0f101112131415"}, {"0x0a0b0c0d0e0f10111213141516"}, {"0x0a0b0c0d0e0f1011121314151617"}, {"0x0a0b0c0d0e0f101112131415161718"}}, expectedz: "1a4025a3d74e70b511007dd55a2e252478c48054c6383285e8a176f33d99853b", expectedy: "12071ac2571c11220432a27b8be549392892e9baf4c654748ca206def3843940", expectedBlobVersionedHash: "0154c5ae7e60a6cf71c4e1694a4c511d04f9a64e0ebf491fa61227419b9bad15", expectedBatchHash: "eefc56cb4dd1c43415717120fd3d58372e6b052e1533add4f379b58f5acce242"}, + // single chunk blob full + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "72714cc4a0ca75cee2d543b1f958e3d3dd59ac7df0d9d5617d8117b65295a5f2", expectedy: "4ebb690362bcbc42321309c210c99f2ebdb53b3fcf7cf3b17b78f6cfd1203ed3", expectedBlobVersionedHash: "0179bda640290da308c6b4860463db2abb5da3573f188d9db86109644b8888e6", expectedBatchHash: "098cf07b17fd5c684a6e5c47b45c3ead1df8565d785fc508f7e83b1ac43515dc"}, + // multiple chunks blob full + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "70eb5b4db503e59413238eef451871c5d12f2bb96c8b96ceca012f4ca0114727", expectedy: "568d0aaf280ec83f9c81ed2d80ecbdf199bd72dafb8a350007d37ea82997e455", expectedBlobVersionedHash: "01160d9c7e52ada63878060067f415c0d458143055099d27373842e1fe465542", expectedBatchHash: "c9a7112e924a4799d748b5ac5999fd4c84d6562c2e0cd49c01dbab748de96397"}, + // max number of chunks only last one non-empty not full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "03db68ae16ee88489d52db19e6111b25630c5f23ad7cd14530aacf0cd231d476", expectedy: "24527d0b0e93b3dec0060c7b128975a8088b3104d3a297dc807ab43862a77a1a", expectedBlobVersionedHash: "0102b93d4c8ea59ffdd488756a2696702071ac1d90d3140089d737e3babd0213", expectedBatchHash: "8fc9281433f730d9c3efd1fb73f710bdea62f119d61e6b99b34360bda8312d33"}, + // max number of chunks only last one non-empty full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "677670193f73db499cede572bcb55677f0d2f13d690f9a820bd00bf584c3c241", expectedy: "1d85677f172dbdf4ad3094a17deeb1df4d7d2b7f35ecea44aebffa757811a268", expectedBlobVersionedHash: "014e56a635bc97d4fab7b8e33da88453f8050efafe00934210506d3c3b8e63ad", expectedBatchHash: "0109a28c89d11af8bce800c5fd43cbb004e201c17391bc9336f98feef21eb2aa"}, + // max number of chunks but last is empty + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "22935042dfe7df771b02c1f5cababfe508869e8f6339dabe25a8a32e37728bb0", expectedy: "48ca66fb5a094401728c3a6a517ffbd72c4d4d9a8c907e2d2f1320812f4d856f", expectedBlobVersionedHash: "017c817651831f769e01728789b6ee29ccd219d4bfa2830c1258b053715592fc", expectedBatchHash: "7a1095c87f9cef674f7049f7b43b0452510dcff44035d8e9bff30adf53afd1f8"}, + } { + chunks := []*Chunk{} -func TestCodecV1ChunkAndBatchCommitGasEstimation(t *testing.T) { - block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block2Gas := EstimateBlockL1CommitGas(block2) - assert.Equal(t, uint64(960), block2Gas) - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} - chunk2Gas := EstimateChunkL1CommitGas(chunk2) - assert.Equal(t, uint64(1124), chunk2Gas) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2Gas := EstimateBatchL1CommitGas(batch2) - assert.Equal(t, uint64(157649), batch2Gas) + for _, c := range tc.chunks { + block := &Block{Transactions: []*types.TransactionData{}} - block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block3Gas := EstimateBlockL1CommitGas(block3) - assert.Equal(t, uint64(960), block3Gas) - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} - chunk3Gas := EstimateChunkL1CommitGas(chunk3) - assert.Equal(t, uint64(1124), chunk3Gas) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3Gas := EstimateBatchL1CommitGas(batch3) - assert.Equal(t, uint64(157649), batch3Gas) + for _, data := range c { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } - block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block4Gas := EstimateBlockL1CommitGas(block4) - assert.Equal(t, uint64(3572), block4Gas) - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk4Gas := EstimateChunkL1CommitGas(chunk4) - assert.Equal(t, uint64(3745), chunk4Gas) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4Gas := EstimateBatchL1CommitGas(batch4) - assert.Equal(t, uint64(160302), batch4Gas) + chunk := &Chunk{Blocks: []*Block{block}} + chunks = append(chunks, chunk) + } - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} - chunk5Gas := EstimateChunkL1CommitGas(chunk5) - assert.Equal(t, uint64(2202), chunk5Gas) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk6Gas := EstimateChunkL1CommitGas(chunk6) - assert.Equal(t, uint64(3745), chunk6Gas) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5Gas := EstimateBatchL1CommitGas(batch5) - assert.Equal(t, uint64(163087), batch5Gas) + patches := gomonkey.ApplyFunc(convertTxDataToRLPEncoding, + func(txData *types.TransactionData) ([]byte, error) { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, fmt.Errorf("failed to decode txData.Data: data=%v, err=%w", txData.Data, err) + } + return data, nil + }) + defer patches.Reset() + + blob, blobVersionedHash, z, err := codecv1.(*DACodecV1).constructBlobPayload(chunks, codecv1.MaxNumChunksPerBatch()) + require.NoError(t, err) + actualZ := hex.EncodeToString(z[:]) + assert.Equal(t, tc.expectedz, actualZ) + assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + + _, y, err := kzg4844.ComputeProof(blob, *z) + require.NoError(t, err) + actualY := hex.EncodeToString(y[:]) + assert.Equal(t, tc.expectedy, actualY) + + // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) + dataBytes := make([]byte, 32*len(chunks)) + for i := range chunks { + copy(dataBytes[32*i:32*i+32], []byte{math.MaxUint8 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + } + dataHash := crypto.Keccak256Hash(dataBytes) + + batch := daBatchV1{ + daBatchV0: daBatchV0{ + version: CodecV3, + batchIndex: 6789, + l1MessagePopped: 101, + totalL1MessagePopped: 10101, + dataHash: dataHash, + parentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + }, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } + assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) + } } -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) +func TestDACodecV1SimpleMethods(t *testing.T) { + codecv1, err := CodecFromVersion(CodecV1) + require.NoError(t, err) - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block + t.Run("Version", func(t *testing.T) { + version := codecv1.Version() + assert.Equal(t, CodecV1, version) + }) + + t.Run("EstimateBlockL1CommitCalldataSize", func(t *testing.T) { + size, err := codecv1.EstimateBlockL1CommitCalldataSize(nil) + assert.NoError(t, err) + assert.Equal(t, uint64(blockContextByteSize), size) + }) } diff --git a/encoding/codecv1_types.go b/encoding/codecv1_types.go new file mode 100644 index 0000000..bb35dc0 --- /dev/null +++ b/encoding/codecv1_types.go @@ -0,0 +1,169 @@ +package encoding + +import ( + "encoding/binary" + "encoding/hex" + "errors" + "fmt" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +// daChunkV1 groups consecutive DABlocks with their transactions. +type daChunkV1 daChunkV0 + +// newDAChunkV1 is a constructor for daChunkV1, initializing with blocks and transactions. +func newDAChunkV1(blocks []DABlock, transactions [][]*types.TransactionData) *daChunkV1 { + return &daChunkV1{ + blocks: blocks, + transactions: transactions, + } +} + +// Encode serializes the DAChunk into a slice of bytes. +func (c *daChunkV1) Encode() ([]byte, error) { + var chunkBytes []byte + chunkBytes = append(chunkBytes, byte(len(c.blocks))) + + for _, block := range c.blocks { + blockBytes := block.Encode() + chunkBytes = append(chunkBytes, blockBytes...) + } + + return chunkBytes, nil +} + +// Hash computes the hash of the DAChunk data. +func (c *daChunkV1) Hash() (common.Hash, error) { + var dataBytes []byte + + // concatenate block contexts + for _, block := range c.blocks { + encodedBlock := block.Encode() + dataBytes = append(dataBytes, encodedBlock[:blockContextBytesForHashing]...) + } + + // concatenate l1 tx hashes + for _, blockTxs := range c.transactions { + for _, txData := range blockTxs { + if txData.Type != types.L1MessageTxType { + continue + } + + hashBytes := common.FromHex(txData.TxHash) + if len(hashBytes) != common.HashLength { + return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) + } + dataBytes = append(dataBytes, hashBytes...) + } + } + + hash := crypto.Keccak256Hash(dataBytes) + return hash, nil +} + +// BlockRange returns the block range of the DAChunk. +func (c *daChunkV1) BlockRange() (uint64, uint64, error) { + if len(c.blocks) == 0 { + return 0, 0, errors.New("number of blocks is 0") + } + + return c.blocks[0].Number(), c.blocks[len(c.blocks)-1].Number(), nil +} + +// daBatchV1 contains metadata about a batch of DAChunks. +type daBatchV1 struct { + daBatchV0 + + blobVersionedHash common.Hash + blob *kzg4844.Blob + z *kzg4844.Point +} + +// newDABatchV1 is a constructor for daBatchV1. +func newDABatchV1(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, blobVersionedHash, parentBatchHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point) *daBatchV1 { + return &daBatchV1{ + daBatchV0: daBatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + }, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } +} + +// Encode serializes the DABatchV1 into bytes. +func (b *daBatchV1) Encode() []byte { + batchBytes := make([]byte, daBatchV1EncodedMinLength+len(b.skippedL1MessageBitmap)) + batchBytes[daBatchOffsetVersion] = byte(b.version) + binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV1OffsetL1MessagePopped], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[daBatchV1OffsetL1MessagePopped:daBatchV1OffsetTotalL1MessagePopped], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[daBatchV1OffsetTotalL1MessagePopped:daBatchOffsetDataHash], b.totalL1MessagePopped) + copy(batchBytes[daBatchOffsetDataHash:daBatchV1OffsetBlobVersionedHash], b.dataHash[:]) + copy(batchBytes[daBatchV1OffsetBlobVersionedHash:daBatchV1OffsetParentBatchHash], b.blobVersionedHash[:]) + copy(batchBytes[daBatchV1OffsetParentBatchHash:daBatchV1OffsetSkippedL1MessageBitmap], b.parentBatchHash[:]) + copy(batchBytes[daBatchV1OffsetSkippedL1MessageBitmap:], b.skippedL1MessageBitmap[:]) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *daBatchV1) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// Blob returns the blob of the batch. +func (b *daBatchV1) Blob() *kzg4844.Blob { + return b.blob +} + +// BlobBytes returns the blob bytes of the batch. +func (b *daBatchV1) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *daBatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { + if b.blob == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, fmt.Errorf("failed to create blob commitment: %w", err) + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + return blobDataProofFromValues(*b.z, y, commitment, proof), nil +} + +// Version returns the version of the DABatch. +func (b *daBatchV1) Version() CodecVersion { + return b.version +} + +// SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. +func (b *daBatchV1) SkippedL1MessageBitmap() []byte { + return b.skippedL1MessageBitmap +} + +// DataHash returns the data hash of the DABatch. +func (b *daBatchV1) DataHash() common.Hash { + return b.dataHash +} diff --git a/encoding/codecv2.go b/encoding/codecv2.go new file mode 100644 index 0000000..0a7b297 --- /dev/null +++ b/encoding/codecv2.go @@ -0,0 +1,291 @@ +package encoding + +import ( + "crypto/sha256" + "encoding/binary" + "encoding/hex" + "errors" + "fmt" + "math/big" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/log" + + "github.com/scroll-tech/da-codec/encoding/zstd" +) + +type DACodecV2 struct { + DACodecV1 +} + +// codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. +const codecv2MaxNumChunks = 45 + +// Version returns the codec version. +func (d *DACodecV2) Version() CodecVersion { + return CodecV2 +} + +// MaxNumChunksPerBatch returns the maximum number of chunks per batch. +func (d *DACodecV2) MaxNumChunksPerBatch() int { + return codecv2MaxNumChunks +} + +// DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks +func (d *DACodecV2) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { + compressedBytes := bytesFromBlobCanonical(blob) + batchBytes, err := decompressScrollBlobToBatch(append(zstdMagicNumber, compressedBytes[:]...)) + if err != nil { + return err + } + return decodeTxsFromBytes(batchBytes, chunks, d.MaxNumChunksPerBatch()) +} + +// NewDABatch creates a DABatch from the provided Batch. +func (d *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { + // this encoding can only support a fixed number of chunks per batch + if len(batch.Chunks) > d.MaxNumChunksPerBatch() { + return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) + } + + if len(batch.Chunks) == 0 { + return nil, errors.New("batch must contain at least one chunk") + } + + // batch data hash + dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, fmt.Errorf("failed to compute batch data hash, index: %d, err: %w", batch.Index, err) + } + + // skipped L1 messages bitmap + skippedL1MessageBitmap, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, fmt.Errorf("failed to construct skipped bitmap, index: %d, err: %w", batch.Index, err) + } + + // blob payload + blob, blobVersionedHash, z, _, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch()) + if err != nil { + return nil, fmt.Errorf("failed to construct blob payload, index: %d, err: %w", batch.Index, err) + } + + if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { + return nil, fmt.Errorf("batch index: %d, totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", batch.Index, totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + } + l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore + + daBatch := newDABatchV1( + CodecV2, // version + batch.Index, // batchIndex + l1MessagePopped, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + dataHash, // dataHash + blobVersionedHash, // blobVersionedHash + batch.ParentBatchHash, // parentBatchHash + skippedL1MessageBitmap, // skippedL1MessageBitmap + blob, // blob + z, // z + ) + + return daBatch, nil +} + +// constructBlobPayload constructs the 4844 blob payload. +func (d *DACodecV2) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + maxNumChunksPerBatch*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // challenge digest preimage + // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash + challengePreimage := make([]byte, (1+maxNumChunksPerBatch+1)*common.HashLength) + + // the chunk data hash used for calculating the challenge preimage + var chunkDataHash common.Hash + + // blob metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode blob metadata and L2 transactions, + // and simultaneously also build challenge preimage + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into blob payload + rlpTxData, err := convertTxDataToRLPEncoding(tx) + if err != nil { + return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // blob metadata: chunki_size + chunkSize := len(batchBytes) - currentChunkStartIndex + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + + // challenge: compute chunk data hash + chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) + copy(challengePreimage[common.HashLength+chunkID*common.HashLength:], chunkDataHash[:]) + } + + // if we have fewer than maxNumChunksPerBatch chunks, the rest + // of the blob metadata is correctly initialized to 0, + // but we need to add padding to the challenge preimage + for chunkID := len(chunks); chunkID < maxNumChunksPerBatch; chunkID++ { + // use the last chunk's data hash as padding + copy(challengePreimage[common.HashLength+chunkID*common.HashLength:], chunkDataHash[:]) + } + + // challenge: compute metadata hash + hash := crypto.Keccak256Hash(batchBytes[0:metadataLength]) + copy(challengePreimage[0:], hash[:]) + + // blobBytes represents the compressed blob payload (batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return nil, common.Hash{}, nil, nil, err + } + + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. + if len(batchBytes) > minCompressedDataCheckSize { + // Check compressed data compatibility. + if err = checkCompressedDataCompatibility(blobBytes); err != nil { + log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, nil, err + } + } + + if len(blobBytes) > maxEffectiveBlobBytes { + log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") + } + + // convert raw data to BLSFieldElements + blob, err := makeBlobCanonical(blobBytes) + if err != nil { + return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) + } + + // compute blob versioned hash + c, err := kzg4844.BlobToCommitment(blob) + if err != nil { + return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to create blob commitment: %w", err) + } + blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) + + // challenge: append blob versioned hash + copy(challengePreimage[(1+maxNumChunksPerBatch)*common.HashLength:], blobVersionedHash[:]) + + // compute z = challenge_digest % BLS_MODULUS + challengeDigest := crypto.Keccak256Hash(challengePreimage) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), blsModulus) + pointBytes := pointBigInt.Bytes() + + // the challenge point z + var z kzg4844.Point + if len(pointBytes) > kzgPointByteSize { + return nil, common.Hash{}, nil, nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes)) + } + start := kzgPointByteSize - len(pointBytes) + copy(z[start:], pointBytes) + + return blob, blobVersionedHash, &z, blobBytes, nil +} + +// NewDABatchFromBytes decodes the given byte slice into a DABatch. +// Note: This function only populates the batch header, it leaves the blob-related fields empty. +func (d *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { + if len(data) < daBatchV1EncodedMinLength { + return nil, fmt.Errorf("insufficient data for DABatch, expected at least %d bytes but got %d", daBatchV1EncodedMinLength, len(data)) + } + + if CodecVersion(data[daBatchOffsetVersion]) != CodecV2 { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV2, data[daBatchOffsetVersion]) + } + + return newDABatchV1( + CodecVersion(data[daBatchOffsetVersion]), // version + binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV1OffsetL1MessagePopped]), // batchIndex + binary.BigEndian.Uint64(data[daBatchV1OffsetL1MessagePopped:daBatchV1OffsetTotalL1MessagePopped]), // l1MessagePopped + binary.BigEndian.Uint64(data[daBatchV1OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped + common.BytesToHash(data[daBatchOffsetDataHash:daBatchV1OffsetBlobVersionedHash]), // dataHash + common.BytesToHash(data[daBatchV1OffsetBlobVersionedHash:daBatchV1OffsetParentBatchHash]), // blobVersionedHash + common.BytesToHash(data[daBatchV1OffsetParentBatchHash:daBatchV1OffsetSkippedL1MessageBitmap]), // parentBatchHash + data[daBatchV1OffsetSkippedL1MessageBitmap:], // skippedL1MessageBitmap + nil, // blob + nil, // z + ), nil +} + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. +func (d *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + batchBytes, err := constructBatchPayloadInBlob([]*Chunk{c}, d) + if err != nil { + return 0, 0, fmt.Errorf("failed to construct batch payload in blob: %w", err) + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, fmt.Errorf("failed to compress scroll batch bytes: %w", err) + } + return uint64(len(batchBytes)), calculatePaddedBlobSize(uint64(len(blobBytes))), nil +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a batch. +func (d *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + batchBytes, err := constructBatchPayloadInBlob(b.Chunks, d) + if err != nil { + return 0, 0, err + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, err + } + return uint64(len(batchBytes)), calculatePaddedBlobSize(uint64(len(blobBytes))), nil +} + +// checkCompressedDataCompatibility checks the compressed data compatibility for a batch's chunks. +// It constructs a batch payload, compresses the data, and checks the compressed data compatibility. +func (d *DACodecV2) checkCompressedDataCompatibility(chunks []*Chunk) (bool, error) { + batchBytes, err := constructBatchPayloadInBlob(chunks, d) + if err != nil { + return false, fmt.Errorf("failed to construct batch payload in blob: %w", err) + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return false, fmt.Errorf("failed to compress scroll batch bytes: %w", err) + } + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. + if len(batchBytes) <= minCompressedDataCheckSize { + return true, nil + } + if err = checkCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("Compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. +func (d *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + return d.checkCompressedDataCompatibility([]*Chunk{c}) +} + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. +func (d *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + return d.checkCompressedDataCompatibility(b.Chunks) +} diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go deleted file mode 100644 index dd00dc9..0000000 --- a/encoding/codecv2/codecv2.go +++ /dev/null @@ -1,403 +0,0 @@ -package codecv2 - -import ( - "crypto/sha256" - "encoding/binary" - "encoding/hex" - "errors" - "fmt" - "math/big" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/scroll-tech/go-ethereum/log" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv1" - "github.com/scroll-tech/da-codec/encoding/zstd" -) - -// MaxNumChunks is the maximum number of chunks that a batch can contain. -const MaxNumChunks = 45 - -const BlockContextByteSize = codecv1.BlockContextByteSize - -// DABlock represents a Data Availability Block. -type DABlock = codecv1.DABlock - -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk = codecv1.DAChunk - -// DAChunkRawTx groups consecutive DABlocks with their L2 transactions, L1 msgs are loaded in another place. -type DAChunkRawTx = codecv1.DAChunkRawTx - -// DABatch contains metadata about a batch of DAChunks. -type DABatch struct { - // header - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - BlobVersionedHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte - - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point -} - -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - return codecv1.NewDABlock(block, totalL1MessagePoppedBefore) -} - -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - return codecv1.NewDAChunk(chunk, totalL1MessagePoppedBefore) -} - -// DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. -func DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { - return codecv1.DecodeDAChunksRawTx(bytes) -} - -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { - // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > MaxNumChunks { - return nil, errors.New("too many chunks in batch") - } - - if len(batch.Chunks) == 0 { - return nil, errors.New("too few chunks in batch") - } - - // batch data hash - dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - // blob payload - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) - if err != nil { - return nil, err - } - - daBatch := DABatch{ - Version: uint8(encoding.CodecV2), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - blob: blob, - z: z, - } - - return &daBatch, nil -} - -// ComputeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return codecv1.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) -} - -// ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { - // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 - - // batchBytes represents the raw (un-compressed and un-padded) blob payload - batchBytes := make([]byte, metadataLength) - - // challenge digest preimage - // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+MaxNumChunks+1)*32) - - // the chunk data hash used for calculating the challenge preimage - var chunkDataHash common.Hash - - // blob metadata: num_chunks - binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) - - // encode blob metadata and L2 transactions, - // and simultaneously also build challenge preimage - for chunkID, chunk := range chunks { - currentChunkStartIndex := len(batchBytes) - - for _, block := range chunk.Blocks { - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { - continue - } - - // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) - if err != nil { - return nil, common.Hash{}, nil, nil, err - } - batchBytes = append(batchBytes, rlpTxData...) - } - } - - // blob metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } - - // challenge: compute chunk data hash - chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) - } - - // if we have fewer than MaxNumChunks chunks, the rest - // of the blob metadata is correctly initialized to 0, - // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ { - // use the last chunk's data hash as padding - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) - } - - // challenge: compute metadata hash - hash := crypto.Keccak256Hash(batchBytes[0:metadataLength]) - copy(challengePreimage[0:], hash[:]) - - // blobBytes represents the compressed blob payload (batchBytes) - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return nil, common.Hash{}, nil, nil, err - } - - // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if !useMockTxData && len(batchBytes) > 131072 { - // Check compressed data compatibility. - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, err - } - } - - if len(blobBytes) > 126976 { - log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") - } - - // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) - if err != nil { - return nil, common.Hash{}, nil, nil, err - } - - // compute blob versioned hash - c, err := kzg4844.BlobToCommitment(blob) - if err != nil { - return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") - } - blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) - - // challenge: append blob versioned hash - copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:]) - - // compute z = challenge_digest % BLS_MODULUS - challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) - pointBytes := pointBigInt.Bytes() - - // the challenge point z - var z kzg4844.Point - start := 32 - len(pointBytes) - copy(z[start:], pointBytes) - - return blob, blobVersionedHash, &z, blobBytes, nil -} - -// DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { - compressedBytes := encoding.BytesFromBlobCanonical(blob) - magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - - batchBytes, err := encoding.DecompressScrollBlobToBatch(append(magics, compressedBytes[:]...)) - if err != nil { - return err - } - return codecv1.DecodeTxsFromBytes(batchBytes, chunks, MaxNumChunks) -} - -// NewDABatchFromBytes decodes the given byte slice into a DABatch. -// Note: This function only populates the batch header, it leaves the blob-related fields empty. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { - if len(data) < 121 { - return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) - } - - b := &DABatch{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - BlobVersionedHash: common.BytesToHash(data[57:89]), - ParentBatchHash: common.BytesToHash(data[89:121]), - SkippedL1MessageBitmap: data[121:], - } - - return b, nil -} - -// Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { - batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.BlobVersionedHash[:]) - copy(batchBytes[89:], b.ParentBatchHash[:]) - copy(batchBytes[121:], b.SkippedL1MessageBitmap[:]) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// BlobDataProof computes the abi-encoded blob verification data. -func (b *DABatch) BlobDataProof() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProof with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProof with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - return encoding.BlobDataProofFromValues(*b.z, y, commitment, proof), nil -} - -// Blob returns the blob of the batch. -func (b *DABatch) Blob() *kzg4844.Blob { - return b.blob -} - -// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) - if err != nil { - return 0, 0, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return 0, 0, err - } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil -} - -// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) - if err != nil { - return 0, 0, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return 0, 0, err - } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil -} - -// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) - if err != nil { - return false, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if len(batchBytes) <= 131072 { - return true, nil - } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil -} - -// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) - if err != nil { - return false, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if len(batchBytes) <= 131072 { - return true, nil - } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil -} - -// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { - return codecv1.EstimateChunkL1CommitCalldataSize(c) -} - -// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { - return codecv1.EstimateBatchL1CommitCalldataSize(b) -} - -// EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { - return codecv1.EstimateBlockL1CommitGas(b) -} - -// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { - return codecv1.EstimateChunkL1CommitGas(c) -} - -// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { - return codecv1.EstimateBatchL1CommitGas(b) -} diff --git a/encoding/codecv2/codecv2_test.go b/encoding/codecv2_test.go similarity index 51% rename from encoding/codecv2/codecv2_test.go rename to encoding/codecv2_test.go index 69713d5..0e6c088 100644 --- a/encoding/codecv2/codecv2_test.go +++ b/encoding/codecv2_test.go @@ -1,73 +1,76 @@ -package codecv2 +package encoding import ( + "crypto/rand" "encoding/hex" - "encoding/json" - "os" + "math" "strings" "testing" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - + "github.com/agiledragon/gomonkey/v2" "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" - "github.com/scroll-tech/da-codec/encoding/zstd" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestCodecV2BlockEncode(t *testing.T) { - block := &DABlock{} + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + block := &daBlockV0{} encoded := hex.EncodeToString(block.Encode()) assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block, err := NewDABlock(trace2, 0) + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + daBlock, err := codecv2.NewDABlock(block2, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) + encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block, err = NewDABlock(trace3, 0) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + daBlock, err = codecv2.NewDABlock(block3, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) + encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block, err = NewDABlock(trace4, 0) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + daBlock, err = codecv2.NewDABlock(block4, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) + encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block, err = NewDABlock(trace5, 0) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + daBlock, err = codecv2.NewDABlock(block5, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) + encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block, err = NewDABlock(trace6, 0) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + daBlock, err = codecv2.NewDABlock(block6, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) + encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - block, err = NewDABlock(trace7, 0) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + daBlock, err = codecv2.NewDABlock(block7, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) + encoded = hex.EncodeToString(daBlock.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + // sanity check: v0 and v2 block encodings are identical - for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { + for _, trace := range []*Block{block2, block3, block4, block5, block6, block7} { blockv0, err := codecv0.NewDABlock(trace, 0) assert.NoError(t, err) encodedv0 := hex.EncodeToString(blockv0.Encode()) - blockv2, err := NewDABlock(trace, 0) + blockv2, err := codecv2.NewDABlock(trace, 0) assert.NoError(t, err) encodedv2 := hex.EncodeToString(blockv2.Encode()) @@ -76,591 +79,898 @@ func TestCodecV2BlockEncode(t *testing.T) { } func TestCodecV2ChunkEncode(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - encoded := hex.EncodeToString(chunk.Encode()) + daBlock := &daBlockV0{} + daChunkV1 := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + + encodedBytes, err := daChunkV1.Encode() + assert.NoError(t, err) + encoded := hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) // transactions are not part of the encoding - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) - encoded = hex.EncodeToString(chunk.Encode()) + daChunkV1.transactions[0] = append(daChunkV1.transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) + encodedBytes, err = daChunkV1.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err := NewDAChunk(originalChunk, 0) + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv2.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) + encoded = hex.EncodeToString(encodedBytes) assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) } func TestCodecV2ChunkHash(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} + daBlock := &daBlockV0{} + chunk := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} hash, err := chunk.Hash() assert.NoError(t, err) assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) // L1 transactions are part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) hash, err = chunk.Hash() assert.NoError(t, err) assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) // L2 transactions are not part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) hash, err = chunk.Hash() assert.NoError(t, err) assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) // numL1Messages are not part of the hash - chunk.Blocks[0].NumL1Messages = 1 + daBlock = chunk.blocks[0].(*daBlockV0) + daBlock.numL1Messages = 1 + chunk.blocks[0] = daBlock + hash, err = chunk.Hash() assert.NoError(t, err) assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) // invalid hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) _, err = chunk.Hash() assert.Error(t, err) - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv2.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - hash, err = chunk.Hash() + hash, err = daChunk.Hash() assert.NoError(t, err) assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - hash, err = chunk.Hash() + hash, err = daChunk.Hash() assert.NoError(t, err) assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - hash, err = chunk.Hash() + hash, err = daChunk.Hash() assert.NoError(t, err) assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - hash, err = chunk.Hash() + hash, err = daChunk.Hash() assert.NoError(t, err) assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - hash, err = chunk.Hash() + hash, err = daChunk.Hash() assert.NoError(t, err) assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv2.NewDAChunk(originalChunk, 0) assert.NoError(t, err) - hash, err = chunk.Hash() + hash, err = daChunk.Hash() assert.NoError(t, err) assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) } func TestCodecV2BatchEncode(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV2)} - encoded := hex.EncodeToString(batch.Encode()) + daBatchV1 := &daBatchV1{ + daBatchV0: daBatchV0{ + version: CodecV2, + }, + } + encoded := hex.EncodeToString(daBatchV1.Encode()) assert.Equal(t, "02000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd70000000000000000000000000000000000000000000000000000000000000000", encoded) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "02000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad40000000000000000000000000000000000000000000000000000000000000000", encoded) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc53394137000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "02000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc5339413700000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa002900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) + encoded = hex.EncodeToString(daBatch.Encode()) assert.Equal(t, "020000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb3363200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) } func TestCodecV2BatchHash(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV2)} - assert.Equal(t, "0x8839b8a7b8dfebdc8e829f6fe543578ccdc8da1307e1e1581541a1e2a8fa5592", batch.Hash().Hex()) + daBatchV1 := &daBatchV1{ + daBatchV0: daBatchV0{ + version: CodecV2, + }, + } + assert.Equal(t, common.HexToHash("0x8839b8a7b8dfebdc8e829f6fe543578ccdc8da1307e1e1581541a1e2a8fa5592"), daBatchV1.Hash()) - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x57553c35f981626b4d1a73c816aa8d8fad83c460fc049c5792581763f7e21b13", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0x57553c35f981626b4d1a73c816aa8d8fad83c460fc049c5792581763f7e21b13"), daBatch.Hash()) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x0f8e5b5205c5d809bf09047f37b558f4eb388c9c4eb23291cd97810d06654409", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0x0f8e5b5205c5d809bf09047f37b558f4eb388c9c4eb23291cd97810d06654409"), daBatch.Hash()) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xc59155dc0ae7d7d3fc29f0a9c6042f14dc58e3a1f9c0417f52bac2c4a8b33014", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0xc59155dc0ae7d7d3fc29f0a9c6042f14dc58e3a1f9c0417f52bac2c4a8b33014"), daBatch.Hash()) - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x417509641fb0c0d1c07d80e64aab13934f828cb4f09608722bf8126a68c04617", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0x417509641fb0c0d1c07d80e64aab13934f828cb4f09608722bf8126a68c04617"), daBatch.Hash()) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xe9c82b48e2a54c9206f57897cb870536bd22066d2af3d03aafe8a6a39add7635", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0xe9c82b48e2a54c9206f57897cb870536bd22066d2af3d03aafe8a6a39add7635"), daBatch.Hash()) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x5e3d20c5b3f56cc5a28e7431241b3ce3d484b12cfb0b3228f378b196beeb3a53", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0x5e3d20c5b3f56cc5a28e7431241b3ce3d484b12cfb0b3228f378b196beeb3a53"), daBatch.Hash()) - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x19b99491401625d92e16f7df6705219cc55e48e4b08db7bc4020e6934076f5f7", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0x19b99491401625d92e16f7df6705219cc55e48e4b08db7bc4020e6934076f5f7"), daBatch.Hash()) - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xc5daf2ea5a3107c13b2994fb547336a7dca25cd352c051b6d9b9759d77e95fd2", batch.Hash().Hex()) + assert.Equal(t, common.HexToHash("0xc5daf2ea5a3107c13b2994fb547336a7dca25cd352c051b6d9b9759d77e95fd2"), daBatch.Hash()) +} + +func TestCodecV2NewDABatchFromBytes(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + testCases := []struct { + name string + jsonFile string + }{ + {"Empty Batch", ""}, + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var batch *Batch + var daBatch DABatch + var createErr1 error + + if tc.jsonFile == "" { + // Empty daBatch + daBatch = &daBatchV1{ + daBatchV0: daBatchV0{ + version: CodecV2, + }, + } + } else { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + batch = &Batch{Chunks: []*Chunk{chunk}} + daBatch, createErr1 = codecv2.NewDABatch(batch) + assert.NoError(t, createErr1) + } + + // Encode the DABatch + encodedBytes := daBatch.Encode() + + // Decode the bytes back into a DABatch + decodedDABatch, createErr2 := codecv2.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, createErr2) + + // Compare the hashes of the original and decoded DABatch + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) + }) + } + + // Test with multiple blocks and chunks in a batch + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + chunk2 := &Chunk{Blocks: []*Block{block4, block5}} + batch := &Batch{Chunks: []*Chunk{chunk1, chunk2}} + daBatch, err := codecv2.NewDABatch(batch) + assert.NoError(t, err) + + encodedBytes := daBatch.Encode() + decodedDABatch, err := codecv2.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash()) } func TestCodecV2BatchDataHash(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541"), daBatch.DataHash()) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6"), daBatch.DataHash()) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f"), daBatch.DataHash()) - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) + assert.Equal(t, common.HexToHash("0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8"), daBatch.DataHash()) - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) +} + +func TestCodecV2CalldataSizeEstimation(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2CalldataSize, err := codecv2.EstimateChunkL1CommitCalldataSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk2CalldataSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2CalldataSize, err := codecv2.EstimateBatchL1CommitCalldataSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch2CalldataSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3CalldataSize, err := codecv2.EstimateChunkL1CommitCalldataSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk3CalldataSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3CalldataSize, err := codecv2.EstimateBatchL1CommitCalldataSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch3CalldataSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4CalldataSize, err := codecv2.EstimateChunkL1CommitCalldataSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk4CalldataSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4CalldataSize, err := codecv2.EstimateBatchL1CommitCalldataSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch4CalldataSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5CalldataSize, err := codecv2.EstimateChunkL1CommitCalldataSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(120), chunk5CalldataSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6CalldataSize, err := codecv2.EstimateChunkL1CommitCalldataSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk6CalldataSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5CalldataSize, err := codecv2.EstimateBatchL1CommitCalldataSize(batch5) assert.NoError(t, err) - assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) + assert.Equal(t, uint64(180), batch5CalldataSize) } -func TestCodecV2CompressDecompress(t *testing.T) { - blobString := "00" + "0001" + "000000e6" + "00000000" + "00000000" + "00000000" + "00000000" + "00000000" + "00000000" + "00" + "00" + "000000" + "00000000" + "00000000" + "00000000" + "00000000" + "00000000" + "00000000" + "00000000" + - // tx payload - "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1" - blobBytes, err := hex.DecodeString(blobString) +func TestCodecV2CommitGasEstimation(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2Gas, err := codecv2.EstimateChunkL1CommitGas(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(1124), chunk2Gas) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2Gas, err := codecv2.EstimateBatchL1CommitGas(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(157649), batch2Gas) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3Gas, err := codecv2.EstimateChunkL1CommitGas(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(1124), chunk3Gas) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3Gas, err := codecv2.EstimateBatchL1CommitGas(batch3) assert.NoError(t, err) + assert.Equal(t, uint64(157649), batch3Gas) - compressed, err := zstd.CompressScrollBatchBytes(blobBytes) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4Gas, err := codecv2.EstimateChunkL1CommitGas(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(3745), chunk4Gas) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4Gas, err := codecv2.EstimateBatchL1CommitGas(batch4) assert.NoError(t, err) + assert.Equal(t, uint64(160302), batch4Gas) - blob, err := encoding.MakeBlobCanonical(compressed) + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5Gas, err := codecv2.EstimateChunkL1CommitGas(chunk5) assert.NoError(t, err) + assert.Equal(t, uint64(2202), chunk5Gas) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6Gas, err := codecv2.EstimateChunkL1CommitGas(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(3745), chunk6Gas) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5Gas, err := codecv2.EstimateBatchL1CommitGas(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(163087), batch5Gas) +} - res := encoding.BytesFromBlobCanonical(blob) - compressedBytes := res[:] - magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - compressedBytes = append(magics, compressedBytes...) +func TestCodecV2BatchSizeAndBlobSizeEstimation(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) - decompressedBlobBytes, err := encoding.DecompressScrollBlobToBatch(compressedBytes) + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2BatchBytesSize, chunk2BlobSize, err := codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(412), chunk2BatchBytesSize) + assert.Equal(t, uint64(237), chunk2BlobSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2BatchBytesSize, batch2BlobSize, err := codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(412), batch2BatchBytesSize) + assert.Equal(t, uint64(237), batch2BlobSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3BatchBytesSize, chunk3BlobSize, err := codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), chunk3BatchBytesSize) + assert.Equal(t, uint64(2933), chunk3BlobSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3BatchBytesSize, batch3BlobSize, err := codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), batch3BatchBytesSize) + assert.Equal(t, uint64(2933), batch3BlobSize) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4BatchBytesSize, chunk4BlobSize, err := codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) assert.NoError(t, err) - assert.Equal(t, blobBytes, decompressedBlobBytes) + assert.Equal(t, uint64(214), chunk4BatchBytesSize) + assert.Equal(t, uint64(54), chunk4BlobSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + blob4BatchBytesSize, batch4BlobSize, err := codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(214), blob4BatchBytesSize) + assert.Equal(t, uint64(54), batch4BlobSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5BatchBytesSize, chunk5BlobSize, err := codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(6093), chunk5BatchBytesSize) + assert.Equal(t, uint64(3149), chunk5BlobSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6BatchBytesSize, chunk6BlobSize, err := codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk6BatchBytesSize) + assert.Equal(t, uint64(54), chunk6BlobSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5BatchBytesSize, batch5BlobSize, err := codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(6125), batch5BatchBytesSize) + assert.Equal(t, uint64(3186), batch5BlobSize) } -func TestCodecV2Decode(t *testing.T) { - trace0 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - trace1 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk0 := &encoding.Chunk{Blocks: []*encoding.Block{trace0, trace1}} - daChunk0, err := NewDAChunk(chunk0, 0) +func TestCodecV2BatchL1MessagePopped(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(batch) assert.NoError(t, err) - chunkBytes0 := daChunk0.Encode() + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk1 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - daChunk1, err := NewDAChunk(chunk1, 0) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - chunkBytes1 := daChunk1.Encode() + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV1).totalL1MessagePopped) - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk0, chunk1}} - batch, err := NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) + assert.Equal(t, uint64(11), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(11), daBatch.(*daBatchV1).totalL1MessagePopped) - daChunksRawTx, err := DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - // assert number of chunks - assert.Equal(t, 2, len(daChunksRawTx)) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) - // assert block in first chunk - assert.Equal(t, 2, len(daChunksRawTx[0].Blocks)) - assert.Equal(t, daChunk0.Blocks[0], daChunksRawTx[0].Blocks[0]) - assert.Equal(t, daChunk0.Blocks[1], daChunksRawTx[0].Blocks[1]) + batch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv2.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(5), daBatch.(*daBatchV1).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) - // assert block in second chunk - assert.Equal(t, 2, len(daChunksRawTx[1].Blocks)) - daChunksRawTx[1].Blocks[0].BaseFee = nil - assert.Equal(t, daChunk1.Blocks[0], daChunksRawTx[1].Blocks[0]) - daChunksRawTx[1].Blocks[1].BaseFee = nil - assert.Equal(t, daChunk1.Blocks[1], daChunksRawTx[1].Blocks[1]) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(10), daBatch.(*daBatchV1).l1MessagePopped) // skip 7, include 3 + assert.Equal(t, uint64(10), daBatch.(*daBatchV1).totalL1MessagePopped) - blob := batch.Blob() - err = DecodeTxsFromBlob(blob, daChunksRawTx) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) + assert.Equal(t, uint64(257), daBatch.(*daBatchV1).l1MessagePopped) // skip 255, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) - // assert transactions in first chunk - assert.Equal(t, 2, len(daChunksRawTx[0].Transactions)) - // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs - assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) - assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) + batch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv2.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(256), daBatch.(*daBatchV1).l1MessagePopped) // skip 254, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV1).totalL1MessagePopped) - assert.EqualValues(t, daChunk0.Transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) - assert.EqualValues(t, daChunk0.Transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) - // assert transactions in second chunk - assert.Equal(t, 2, len(daChunksRawTx[1].Transactions)) - // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs - assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) - assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) + batch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv2.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(32), daBatch.(*daBatchV1).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV1).totalL1MessagePopped) } -func TestCodecV2BatchBlob(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) +func TestCodecV2BlobEncodingAndHashing(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") + encoded := strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) - assert.Equal(t, "0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7", batch.BlobVersionedHash.Hex()) + assert.Equal(t, common.HexToHash("0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7"), daBatch.(*daBatchV1).blobVersionedHash) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) - assert.Equal(t, "0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4", batch.BlobVersionedHash.Hex()) + assert.Equal(t, common.HexToHash("0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4"), daBatch.(*daBatchV1).blobVersionedHash) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) - assert.Equal(t, "0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c", batch.BlobVersionedHash.Hex()) + assert.Equal(t, common.HexToHash("0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c"), daBatch.(*daBatchV1).blobVersionedHash) // this batch only contains L1 txs - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), daBatch.(*daBatchV1).blobVersionedHash) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), daBatch.(*daBatchV1).blobVersionedHash) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), daBatch.(*daBatchV1).blobVersionedHash) - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + // 45 chunks + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060300d2d0700140d000f000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10005004124d3c68f60b26d07d824482d965c11", encoded) - assert.Equal(t, "0x0140a7ef703ef625ee71e6a580a8ff05cab32c3f3402bd37a1b715f5810760c9", batch.BlobVersionedHash.Hex()) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") + assert.Equal(t, "006024281d0700140d002d000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f100040041e1491b3e82c9b61d60d39a727", encoded) + assert.Equal(t, common.HexToHash("0x01fc79efca1213db1aa0183865b0a360dc152662cde34ee6a34e7607b96c1c89"), daBatch.(*daBatchV1).blobVersionedHash) - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV1).blob[:]), "0") assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) - assert.Equal(t, "0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632", batch.BlobVersionedHash.Hex()) + assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), daBatch.(*daBatchV1).blobVersionedHash) } -func TestCodecV2BatchChallenge(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) +func TestCodecV2BatchBlobDataProofForPointEvaluation(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv2.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err := daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea", hex.EncodeToString(batch.z[:])) + assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d90b7a581ba5b2cd6a916d139d2b7f28bf6997adb512653f6bdef0bbb7d681c742560fab406fd299c04fc1a464d277f8a8b3a918761888bd0f9a96cb9b2521347131a43b633c4fa01470842d9fe4211bc59c990f69185b80def79b9dfbf039b75", hex.EncodeToString(verifyData)) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b7792", hex.EncodeToString(batch.z[:])) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8b1530a433168a29443af928876b3d63f4205ba1876d303d56f8456483b9ce91b6ff2b1707726f01c1429cb9d87e4c165ade0ec9e0547ea5721ff442f63d8fcf9ba2f066b07d9b8a0f057e9c0e0e1e56f9a6ec627f9b1cb24866802e15c49c22a", hex.EncodeToString(verifyData)) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf", hex.EncodeToString(batch.z[:])) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2a5979717c35155300b0b2d68610aacdd8b0dbb94990168103bfd62985732e3f682370c91c9f2b8f08c6398194e2bb18b83eae765cef6e4e991d91e631dd454953516721962a089a03e4d8f640cd115ede836bad7141e8094317a45ccd04ec842", hex.EncodeToString(verifyData)) - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv2.NewDABatch(batch) assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv2.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv2.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) - assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a", hex.EncodeToString(batch.z[:])) + assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a0fad18d05f6f7d57b03dc717f8409489806d89ee5044bea951538682c52d815097e898dbd9a99b1bae2d759ee5f77ac6b6e8fb2cddaf26500532270fd4066e7ae85c450bcbf2cdb4643147091a1ee11ca615b823c97a69cb716d80de6ccafc5823af3a17fc71b72c224edd387abbf4433af013b53f15f394e501e5a3e57af074", hex.EncodeToString(verifyData)) - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv2.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b32", hex.EncodeToString(batch.z[:])) + assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) } -func TestCodecV2ChunkAndBatchCommitGasEstimation(t *testing.T) { - block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block2Gas := EstimateBlockL1CommitGas(block2) - assert.Equal(t, uint64(960), block2Gas) - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} - chunk2Gas := EstimateChunkL1CommitGas(chunk2) - assert.Equal(t, uint64(1124), chunk2Gas) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2Gas := EstimateBatchL1CommitGas(batch2) - assert.Equal(t, uint64(157649), batch2Gas) +func TestCodecV2DecodeDAChunksRawTx(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) - block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block3Gas := EstimateBlockL1CommitGas(block3) - assert.Equal(t, uint64(960), block3Gas) - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} - chunk3Gas := EstimateChunkL1CommitGas(chunk3) - assert.Equal(t, uint64(1124), chunk3Gas) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3Gas := EstimateBatchL1CommitGas(batch3) - assert.Equal(t, uint64(157649), batch3Gas) + block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk0 := &Chunk{Blocks: []*Block{block0, block1}} + daChunk0, err := codecv2.NewDAChunk(chunk0, 0) + assert.NoError(t, err) + chunkBytes0, err := daChunk0.Encode() + assert.NoError(t, err) - block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block4Gas := EstimateBlockL1CommitGas(block4) - assert.Equal(t, uint64(3572), block4Gas) - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk4Gas := EstimateChunkL1CommitGas(chunk4) - assert.Equal(t, uint64(3745), chunk4Gas) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4Gas := EstimateBatchL1CommitGas(batch4) - assert.Equal(t, uint64(160302), batch4Gas) + block2 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + daChunk1, err := codecv2.NewDAChunk(chunk1, 0) + assert.NoError(t, err) + chunkBytes1, err := daChunk1.Encode() + assert.NoError(t, err) - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} - chunk5Gas := EstimateChunkL1CommitGas(chunk5) - assert.Equal(t, uint64(2202), chunk5Gas) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk6Gas := EstimateChunkL1CommitGas(chunk6) - assert.Equal(t, uint64(3745), chunk6Gas) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5Gas := EstimateBatchL1CommitGas(batch5) - assert.Equal(t, uint64(163087), batch5Gas) -} + batch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} + daBatch, err := codecv2.NewDABatch(batch) + assert.NoError(t, err) -func repeat(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) + daChunksRawTx, err := codecv2.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) + assert.NoError(t, err) + // assert number of chunks + assert.Equal(t, 2, len(daChunksRawTx)) + + // assert block in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Blocks)) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[0], daChunksRawTx[0].Blocks[0]) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[1], daChunksRawTx[0].Blocks[1]) + + // assert block in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Blocks)) + daChunksRawTx[1].Blocks[0].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[0].(*daBlockV0), daChunksRawTx[1].Blocks[0]) + daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) + + blob := daBatch.Blob() + err = codecv2.DecodeTxsFromBlob(blob, daChunksRawTx) + assert.NoError(t, err) + + // assert transactions in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) + assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) + + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) + + // assert transactions in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) + assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) } func TestCodecV2BatchStandardTestCases(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) + // Taking into consideration compression, we allow up to 5x of max blob bytes. - // We then ignore the metadata rows for 45 chunks. - maxChunks := 45 - nRowsData := 5*126976 - (maxChunks*4 + 2) + // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. + nRowsData := 5*maxEffectiveBlobBytes - (codecv2.MaxNumChunksPerBatch()*4 + 2) + + repeat := func(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) + } for _, tc := range []struct { chunks [][]string @@ -670,19 +980,19 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { expectedBatchHash string }{ // single empty chunk - {chunks: [][]string{{}}, expectedz: "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", expectedy: "132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", expectedBlobVersionedHash: "015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", expectedBatchHash: "52003e842cce4d84085e1f884ac416f19f2424b5d71df7717159ffdcf47803cc"}, + {chunks: [][]string{{}}, expectedz: "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", expectedy: "132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", expectedBlobVersionedHash: "015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", expectedBatchHash: "48c1e31334d6d6dff9f5b38f703c147dc5f0893882fbdcb22ef5fcef0f25f2ff"}, // single non-empty chunk - {chunks: [][]string{{"0x010203"}}, expectedz: "13c58784e6eeed40130ab43baa13a1f2d5a6d895c66f554456e00c480568a42d", expectedy: "248ace7f7f0fb3718b80b8cf04be560b97d083a3dbbd79d169e0fe9c80c9668c", expectedBlobVersionedHash: "0161d97a72d600ed5aa264bc8fc409a87e60b768ffb52b9c1106858c2ae57f04", expectedBatchHash: "f143f754efac11fd7b1be1828a463e2fc92fb3adc9ba937f88ff7a4d3b5219e8"}, + {chunks: [][]string{{"0x010203"}}, expectedz: "13c58784e6eeed40130ab43baa13a1f2d5a6d895c66f554456e00c480568a42d", expectedy: "248ace7f7f0fb3718b80b8cf04be560b97d083a3dbbd79d169e0fe9c80c9668c", expectedBlobVersionedHash: "0161d97a72d600ed5aa264bc8fc409a87e60b768ffb52b9c1106858c2ae57f04", expectedBatchHash: "8918c151720f8497e29ed68ab94a43a32689dcd96784784b81c0fef36b751142"}, // multiple empty chunks - {chunks: [][]string{{}, {}}, expectedz: "102e7bf1335a8a86e8ecac2283843eff536555e464bb6ba01a29ff1ca8d4b8cb", expectedy: "033a0272284ae81eb693588e731fc19ad24c44a332405e471966335b37f1a2c2", expectedBlobVersionedHash: "01c0a83d1c0ee2ee06f030ca2f0ec36827b3e9682cbc8c00a27b0bdd3530488b", expectedBatchHash: "130c06cd2a0ec4c5f4d734bd9c61cf9d4acd150d347379d3fd42e2d93bf27c49"}, + {chunks: [][]string{{}, {}}, expectedz: "102e7bf1335a8a86e8ecac2283843eff536555e464bb6ba01a29ff1ca8d4b8cb", expectedy: "033a0272284ae81eb693588e731fc19ad24c44a332405e471966335b37f1a2c2", expectedBlobVersionedHash: "01c0a83d1c0ee2ee06f030ca2f0ec36827b3e9682cbc8c00a27b0bdd3530488b", expectedBatchHash: "6a3e8f32ea6f3025679a912992a7fa813849a7e1f46c8d413fd14d188d497bdb"}, // multiple non-empty chunks - {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "0ac462d144c9aa1a7538aebd9087e34e9f9590e59b58ffa08f03cd9e43382ed0", expectedy: "6ac7fc7686c900c9e27fd0ca69736cf77016c8b9e8fd3ebab0ee6be1d6c30c93", expectedBlobVersionedHash: "0104efe2cfccfb25e5ae40250af541bd217cae4c9bc14daaf0360a0a36aa2d03", expectedBatchHash: "85426aad824f708bd55162b5bd0dbf800ae472d887f2c286ba4db19869d3dd20"}, + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "0ac462d144c9aa1a7538aebd9087e34e9f9590e59b58ffa08f03cd9e43382ed0", expectedy: "6ac7fc7686c900c9e27fd0ca69736cf77016c8b9e8fd3ebab0ee6be1d6c30c93", expectedBlobVersionedHash: "0104efe2cfccfb25e5ae40250af541bd217cae4c9bc14daaf0360a0a36aa2d03", expectedBatchHash: "cfbe74dd07beed8dd9ee2be06ebd869e000148f1886ad6134e6609a3e09520e6"}, // empty chunk followed by non-empty chunk - {chunks: [][]string{{}, {"0x010203"}}, expectedz: "1d81a4d2c78fbbf379562a998edde942b2019ec88ede9150a4c2a52a4e271ace", expectedy: "656603441f898b3dd64e0963fea53bfd6a445cb4f838c5caf181186cf45dd7ec", expectedBlobVersionedHash: "0131b881bdc8d8b70a62d9a6f249dc7a48f37428ac10809299489e5e60911f80", expectedBatchHash: "dd1a539175e3b24b2a1da37db2fb0b77c7eb7e69e25c0cfd2b5d9918aba7fd07"}, + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "1d81a4d2c78fbbf379562a998edde942b2019ec88ede9150a4c2a52a4e271ace", expectedy: "656603441f898b3dd64e0963fea53bfd6a445cb4f838c5caf181186cf45dd7ec", expectedBlobVersionedHash: "0131b881bdc8d8b70a62d9a6f249dc7a48f37428ac10809299489e5e60911f80", expectedBatchHash: "f042d7da2c8af0d9edadd2997ddfc28af646afc513489ac0ab8881c9b18e71bc"}, // non-empty chunk followed by empty chunk - {chunks: [][]string{{"0x070809"}, {}}, expectedz: "275116a8ff16b17b90d7287fb567e766d1f79f54f8ac3c6d80e2de59fd34f115", expectedy: "5fea2c1bbed12ccdcf9edef780330ee1d13439de4d3b8f4968f2bda9e4fb8b1f", expectedBlobVersionedHash: "01c44c7e70df601a245e714be4f0aa7c918a0056bff379c20a7128e5926db664", expectedBatchHash: "cf067728aa2230e43897683e32e9bb6ec044ae37727ce206f10b707b81197b13"}, + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "275116a8ff16b17b90d7287fb567e766d1f79f54f8ac3c6d80e2de59fd34f115", expectedy: "5fea2c1bbed12ccdcf9edef780330ee1d13439de4d3b8f4968f2bda9e4fb8b1f", expectedBlobVersionedHash: "01c44c7e70df601a245e714be4f0aa7c918a0056bff379c20a7128e5926db664", expectedBatchHash: "f9c741682ed579af9c9f21d1c90af830276731ae699ee263fa1278076839e015"}, // max number of chunks all empty - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4583c59de31759dbc54109bb2d5825a36655e71db62225fc5d7d758191e59a6b", expectedy: "0b119ffd6c88037d62e1bee05f609d801c6cc6e724214555b97affe3b852819a", expectedBlobVersionedHash: "013ac7e2db84a2f26ee2cba3a5cabbfffd1f7c053e7ea17add4f84a82cf8285a", expectedBatchHash: "fb0c3918408cca7292d55fb93bc6416fe8c06c3b28336bd4a3264f1be5957e07"}, + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4583c59de31759dbc54109bb2d5825a36655e71db62225fc5d7d758191e59a6b", expectedy: "0b119ffd6c88037d62e1bee05f609d801c6cc6e724214555b97affe3b852819a", expectedBlobVersionedHash: "013ac7e2db84a2f26ee2cba3a5cabbfffd1f7c053e7ea17add4f84a82cf8285a", expectedBatchHash: "d0846fec4a9158499553e4824cf0ff3fdb01fab93494883d4f8911719ff163ee"}, // max number of chunks all non-empty {chunks: [][]string{ {"0x0a"}, @@ -730,33 +1040,48 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, - }, expectedz: "08454da7c353fa9d7c4d044cca3972dab6aa38e583276848b1aec904f5592837", expectedy: "36cbc815c329e864a018cadf25070d62184d570ef031f5b5c8a5385e65babe9c", expectedBlobVersionedHash: "0198009a5e0941a6acb7dcd95a5016d7f25ca92d66fb300cf6f9918102ef66c0", expectedBatchHash: "8b532d0fd0497a7041d72e0cba750c6ac0cfbeb5160d7c35e52b04d3935be578"}, + }, expectedz: "08454da7c353fa9d7c4d044cca3972dab6aa38e583276848b1aec904f5592837", expectedy: "36cbc815c329e864a018cadf25070d62184d570ef031f5b5c8a5385e65babe9c", expectedBlobVersionedHash: "0198009a5e0941a6acb7dcd95a5016d7f25ca92d66fb300cf6f9918102ef66c0", expectedBatchHash: "f20c05457800dc52d87858d72a2b54c223f401b150af00b47994964a348ac96b"}, // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "63bebf986e2f0fc8bf5f7067108ea4a2b35a5927296e17d5c0bbc5ec04d8dce4", expectedy: "013b762f02e95a62f08977b1a43a017cd84f785b52ebf8ef25e9ebba6c9b76cb", expectedBlobVersionedHash: "01f68a6b3c0ba2ea0406f80f9c88b9905d9b3cc5b2d8ef12923b20fb24b81855", expectedBatchHash: "51aac18f89ddafb75abb0e0c665e64e68421d5cf6b0cc87ce55d4b29e3a576dd"}, + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "63bebf986e2f0fc8bf5f7067108ea4a2b35a5927296e17d5c0bbc5ec04d8dce4", expectedy: "013b762f02e95a62f08977b1a43a017cd84f785b52ebf8ef25e9ebba6c9b76cb", expectedBlobVersionedHash: "01f68a6b3c0ba2ea0406f80f9c88b9905d9b3cc5b2d8ef12923b20fb24b81855", expectedBatchHash: "9effb4102f20c8634655cee9f109215834e7828beadaebe167595f1d1b871689"}, // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "465e095b082136f20ca975c10eafbb3bf2b71724798da87bd62d3f8795c615dc", expectedy: "6f2ff37b255e0da8b5678a9b1157fdc8a1213c17bd248efd50a4c1540c26295c", expectedBlobVersionedHash: "01da6bdac6237fcba7742cf48868467bf95a5e7f33d16c172b36852e506b46b6", expectedBatchHash: "15bc741d48ac712d82418be97705c269816696eba6dcdc1c3ab821d482d005ee"}, + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "465e095b082136f20ca975c10eafbb3bf2b71724798da87bd62d3f8795c615dc", expectedy: "6f2ff37b255e0da8b5678a9b1157fdc8a1213c17bd248efd50a4c1540c26295c", expectedBlobVersionedHash: "01da6bdac6237fcba7742cf48868467bf95a5e7f33d16c172b36852e506b46b6", expectedBatchHash: "9631c4dcdbd404272b4682db4592a78e7cd8bf81da34160cc6ff0e9eb4703f70"}, // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "1ca17fdb4dea8396d7e2f10ef7b2a587750517df70ec0ce0d853e61310aec0f3", expectedy: "1b686f2eb8d7e3e2325d9101dd799f5e13af8482b402661325545646a9c96ec0", expectedBlobVersionedHash: "019d11fab4509a83623a64b466a00344552fd44421e78726cda537d06c8425d3", expectedBatchHash: "b1149c99e4a0e576bda7ae518420e0c525efc72011f9c2f8c7b05b7fd3e0d3c2"}, + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "1ca17fdb4dea8396d7e2f10ef7b2a587750517df70ec0ce0d853e61310aec0f3", expectedy: "1b686f2eb8d7e3e2325d9101dd799f5e13af8482b402661325545646a9c96ec0", expectedBlobVersionedHash: "019d11fab4509a83623a64b466a00344552fd44421e78726cda537d06c8425d3", expectedBatchHash: "4b369fcaef4a6fd5dbd6bd89e3983f2ff72abf0a19fdabf207c314369500d8e9"}, // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "29c684b13d22cb43d81b9b449c281c15126fdc73512606de81c2d3fc9c7793b1", expectedy: "574418d83d77f6096934c2c4281edf61d48925a268411df0e0c818c6d43156d1", expectedBlobVersionedHash: "01f8da934ada220153abee70e85604ef8fbbf98c203b5eae14d23be088a41f45", expectedBatchHash: "e57e3e1fbb3cb5bb8f9362a66621b0b644d71ca50557b42041c0749fa5e05ea8"}, + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "29c684b13d22cb43d81b9b449c281c15126fdc73512606de81c2d3fc9c7793b1", expectedy: "574418d83d77f6096934c2c4281edf61d48925a268411df0e0c818c6d43156d1", expectedBlobVersionedHash: "01f8da934ada220153abee70e85604ef8fbbf98c203b5eae14d23be088a41f45", expectedBatchHash: "5b116a800222102b4cca07a377de69355c33eb3f5262a3b6b1eab37ee680c04a"}, // max number of chunks but last is empty - {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "16d2883b0797d3420fabf4591f9dbe9f850ce600ce6133c98c9d291d8b3ce0a9", expectedy: "5bdc1ca8f09efa9c544d2b03d565fec500d5347acd5b3fd4d88e881f9459d83a", expectedBlobVersionedHash: "01f51532d6bb0afe8a0a61351888f322cba40dc664408a3201eb761aaba66671", expectedBatchHash: "8b13d4535977c990d66742293444b6e48e4252698045d66920fd7d4833688444"}, + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "16d2883b0797d3420fabf4591f9dbe9f850ce600ce6133c98c9d291d8b3ce0a9", expectedy: "5bdc1ca8f09efa9c544d2b03d565fec500d5347acd5b3fd4d88e881f9459d83a", expectedBlobVersionedHash: "01f51532d6bb0afe8a0a61351888f322cba40dc664408a3201eb761aaba66671", expectedBatchHash: "27af1cbf60123f73bef96464839578875a8bebf39edc786914aa7a0c3a4e3a44"}, } { - chunks := []*encoding.Chunk{} + chunks := []*Chunk{} for _, c := range tc.chunks { - block := &encoding.Block{Transactions: []*types.TransactionData{}} + block := &Block{Transactions: []*types.TransactionData{}} for _, data := range c { tx := &types.TransactionData{Type: 0xff, Data: data} block.Transactions = append(block.Transactions, tx) } - chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} + chunk := &Chunk{Blocks: []*Block{block}} chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* use mock */) + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyFunc(convertTxDataToRLPEncoding, func(txData *types.TransactionData) ([]byte, error) { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, err + } + return data, nil + }) + + patches.ApplyFunc(checkCompressedDataCompatibility, func(_ []byte) error { + return nil + }) + + blob, blobVersionedHash, z, _, err := codecv2.(*DACodecV2).constructBlobPayload(chunks, codecv2.MaxNumChunksPerBatch()) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -770,280 +1095,140 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) dataBytes := make([]byte, 32*len(chunks)) for i := range chunks { - copy(dataBytes[32*i:32*i+32], []byte{255 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + copy(dataBytes[32*i:32*i+32], []byte{math.MaxUint8 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) } dataHash := crypto.Keccak256Hash(dataBytes) - batch := DABatch{ - Version: uint8(encoding.CodecV3), - BatchIndex: 6789, - L1MessagePopped: 101, - TotalL1MessagePopped: 10101, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), - blob: blob, - z: z, + batch := daBatchV1{ + daBatchV0: daBatchV0{ + version: CodecV2, + batchIndex: 6789, + l1MessagePopped: 101, + totalL1MessagePopped: 10101, + dataHash: dataHash, + parentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + }, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, } - assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) } } -func TestCodecV2BatchBlobDataProof(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err := batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d90b7a581ba5b2cd6a916d139d2b7f28bf6997adb512653f6bdef0bbb7d681c742560fab406fd299c04fc1a464d277f8a8b3a918761888bd0f9a96cb9b2521347131a43b633c4fa01470842d9fe4211bc59c990f69185b80def79b9dfbf039b75", hex.EncodeToString(verifyData)) +func TestDACodecV2SimpleMethods(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8b1530a433168a29443af928876b3d63f4205ba1876d303d56f8456483b9ce91b6ff2b1707726f01c1429cb9d87e4c165ade0ec9e0547ea5721ff442f63d8fcf9ba2f066b07d9b8a0f057e9c0e0e1e56f9a6ec627f9b1cb24866802e15c49c22a", hex.EncodeToString(verifyData)) + t.Run("Version", func(t *testing.T) { + version := codecv2.Version() + assert.Equal(t, CodecV2, version) + }) +} - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2a5979717c35155300b0b2d68610aacdd8b0dbb94990168103bfd62985732e3f682370c91c9f2b8f08c6398194e2bb18b83eae765cef6e4e991d91e631dd454953516721962a089a03e4d8f640cd115ede836bad7141e8094317a45ccd04ec842", hex.EncodeToString(verifyData)) +func TestCodecV2ChunkCompressedDataCompatibilityCheck(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + // chunk with a single empty block + emptyBlock := &Block{} + emptyChunk := &Chunk{Blocks: []*Block{emptyBlock}} - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + compatible, err := codecv2.CheckChunkCompressedDataCompatibility(emptyChunk) assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + assert.True(t, compatible) - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() + txChunk := &Chunk{ + Blocks: []*Block{ + { + Transactions: []*types.TransactionData{ + {Type: types.L1MessageTxType}, + }, + }, + }, + } + compatible, err = codecv2.CheckChunkCompressedDataCompatibility(txChunk) assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + assert.True(t, compatible) - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a0fad18d05f6f7d57b03dc717f8409489806d89ee5044bea951538682c52d815097e898dbd9a99b1bae2d759ee5f77ac6b6e8fb2cddaf26500532270fd4066e7ae85c450bcbf2cdb4643147091a1ee11ca615b823c97a69cb716d80de6ccafc5823af3a17fc71b72c224edd387abbf4433af013b53f15f394e501e5a3e57af074", hex.EncodeToString(verifyData)) + testCases := []struct { + name string + jsonFile string + }{ + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + compatible, err := codecv2.CheckChunkCompressedDataCompatibility(chunk) + assert.NoError(t, err) + assert.True(t, compatible) + }) + } } -func TestCodecV2BatchSkipBitmap(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000003ff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 - assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000001fffffffff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000000000000000", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000001dd", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 - assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000001ffffffbff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 42, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000000007fffffe", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 32, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) -} +func TestCodecV2BatchCompressedDataCompatibilityCheck(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) -func TestCodecV2ChunkAndBatchBlobSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(412), chunk2BatchBytesSize) - assert.Equal(t, uint64(237), chunk2BlobSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) - assert.NoError(t, err) - assert.Equal(t, uint64(412), batch2BatchBytesSize) - assert.Equal(t, uint64(237), batch2BlobSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), chunk3BatchBytesSize) - assert.Equal(t, uint64(2933), chunk3BlobSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + // empty batch + emptyBatch := &Batch{} + compatible, err := codecv2.CheckBatchCompressedDataCompatibility(emptyBatch) assert.NoError(t, err) - assert.Equal(t, uint64(5863), batch3BatchBytesSize) - assert.Equal(t, uint64(2933), batch3BlobSize) + assert.True(t, compatible) - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk4BatchBytesSize) - assert.Equal(t, uint64(54), chunk4BlobSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) - assert.NoError(t, err) - assert.Equal(t, uint64(214), blob4BatchBytesSize) - assert.Equal(t, uint64(54), batch4BlobSize) + testCases := []struct { + name string + jsonFiles []string + }{ + {"Single Block 02", []string{"testdata/blockTrace_02.json"}}, + {"Single Block 03", []string{"testdata/blockTrace_03.json"}}, + {"Single Block 04", []string{"testdata/blockTrace_04.json"}}, + {"Single Block 05", []string{"testdata/blockTrace_05.json"}}, + {"Single Block 06", []string{"testdata/blockTrace_06.json"}}, + {"Single Block 07", []string{"testdata/blockTrace_07.json"}}, + {"Multiple Blocks And Chunks", []string{"testdata/blockTrace_02.json", "testdata/blockTrace_03.json", "testdata/blockTrace_04.json", "testdata/blockTrace_05.json", "testdata/blockTrace_06.json", "testdata/blockTrace_07.json"}}, + } - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) - assert.NoError(t, err) - assert.Equal(t, uint64(6093), chunk5BatchBytesSize) - assert.Equal(t, uint64(3149), chunk5BlobSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk6BatchBytesSize) - assert.Equal(t, uint64(54), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) - assert.NoError(t, err) - assert.Equal(t, uint64(6125), batch5BatchBytesSize) - assert.Equal(t, uint64(3186), batch5BlobSize) + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var chunks []*Chunk + for _, jsonFile := range tc.jsonFiles { + block := readBlockFromJSON(t, jsonFile) + chunks = append(chunks, &Chunk{Blocks: []*Block{block}}) + } + batch := &Batch{Chunks: chunks} + compatible, err := codecv2.CheckBatchCompressedDataCompatibility(batch) + assert.NoError(t, err) + assert.True(t, compatible) + }) + } } -func TestCodecV2ChunkAndBatchCalldataSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) - assert.Equal(t, uint64(60), chunk2CalldataSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) - assert.Equal(t, uint64(60), batch2CalldataSize) +func TestCodecV2FailedCompressedDataCompatibilityCheck(t *testing.T) { + codecv2, err := CodecFromVersion(CodecV2) + require.NoError(t, err) - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) - assert.Equal(t, uint64(60), chunk3CalldataSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) - assert.Equal(t, uint64(60), batch3CalldataSize) + patches := gomonkey.ApplyFunc(constructBatchPayloadInBlob, func(_ []*Chunk, _ Codec) ([]byte, error) { + randomBytes := make([]byte, minCompressedDataCheckSize+1) + _, readerr := rand.Read(randomBytes) + require.NoError(t, readerr) + return []byte(hex.EncodeToString(randomBytes)), nil + }) + defer patches.Reset() - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) - assert.Equal(t, uint64(60), chunk4CalldataSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4) - assert.Equal(t, uint64(60), batch4CalldataSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) - assert.Equal(t, uint64(120), chunk5CalldataSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6) - assert.Equal(t, uint64(60), chunk6CalldataSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) - assert.Equal(t, uint64(180), batch5CalldataSize) -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) + compatible, err := codecv2.CheckChunkCompressedDataCompatibility(nil) assert.NoError(t, err) + assert.False(t, compatible) - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block + compatible, err = codecv2.CheckBatchCompressedDataCompatibility(&Batch{}) + assert.NoError(t, err) + assert.False(t, compatible) } diff --git a/encoding/codecv3.go b/encoding/codecv3.go new file mode 100644 index 0000000..3ea65f1 --- /dev/null +++ b/encoding/codecv3.go @@ -0,0 +1,147 @@ +package encoding + +import ( + "encoding/binary" + "encoding/json" + "errors" + "fmt" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/params" +) + +type DACodecV3 struct { + DACodecV2 +} + +// Version returns the codec version. +func (d *DACodecV3) Version() CodecVersion { + return CodecV3 +} + +// NewDABatch creates a DABatch from the provided Batch. +func (d *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { + // this encoding can only support a fixed number of chunks per batch + if len(batch.Chunks) > d.MaxNumChunksPerBatch() { + return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) + } + + if len(batch.Chunks) == 0 { + return nil, errors.New("batch must contain at least one chunk") + } + + if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { + return nil, errors.New("too few blocks in last chunk of the batch") + } + + // batch data hash + dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // skipped L1 messages bitmap + skippedL1MessageBitmap, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // blob payload + blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch()) + if err != nil { + return nil, err + } + + lastChunk := batch.Chunks[len(batch.Chunks)-1] + lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] + + if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { + return nil, fmt.Errorf("batch index: %d, totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", batch.Index, totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + } + l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore + + return newDABatchV3( + CodecV3, // version + batch.Index, // batchIndex + l1MessagePopped, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + lastBlock.Header.Time, // lastBlockTimestamp + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + skippedL1MessageBitmap, // skippedL1MessageBitmap + blob, // blob + z, // z + blobBytes, // blobBytes + ) +} + +// NewDABatchFromBytes decodes the given byte slice into a DABatch. +// Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. +func (d *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { + if len(data) != daBatchV3EncodedLength { + return nil, fmt.Errorf("invalid data length for DABatch, expected %d bytes but got %d", daBatchV3EncodedLength, len(data)) + } + + if CodecVersion(data[daBatchOffsetVersion]) != CodecV3 { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV3, data[daBatchOffsetVersion]) + } + + return newDABatchV3WithProof( + CodecVersion(data[daBatchOffsetVersion]), // version + binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV3OffsetL1MessagePopped]), // batchIndex + binary.BigEndian.Uint64(data[daBatchV3OffsetL1MessagePopped:daBatchV3OffsetTotalL1MessagePopped]), // l1MessagePopped + binary.BigEndian.Uint64(data[daBatchV3OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped + binary.BigEndian.Uint64(data[daBatchV3OffsetLastBlockTimestamp:daBatchV3OffsetBlobDataProof]), // lastBlockTimestamp + common.BytesToHash(data[daBatchOffsetDataHash:daBatchV3OffsetBlobVersionedHash]), // dataHash + common.BytesToHash(data[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp]), // parentBatchHash + common.BytesToHash(data[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash]), // blobVersionedHash + nil, // skippedL1MessageBitmap + nil, // blob + nil, // z + nil, // blobBytes + [2]common.Hash{ // blobDataProof + common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointByteSize]), + common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointByteSize : daBatchV3EncodedLength]), + }, + ), nil +} + +// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. +func (d *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + // Reuse the V2 implementation, should have slightly different gas cost, but sufficient for estimation in practice, + // since we have extraGasCost to over-estimate the gas cost. + totalL1CommitGas, err := d.DACodecV2.EstimateChunkL1CommitGas(c) + if err != nil { + return 0, fmt.Errorf("failed to estimate L1 commit gas for chunk: %w", err) + } + totalL1CommitGas += params.BlobTxPointEvaluationPrecompileGas // plus gas cost for the point-evaluation precompile call. + return totalL1CommitGas, nil +} + +// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. +func (d *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { + // Reuse the V2 implementation, should have slightly different gas cost, but sufficient for estimation in practice, + // since we have extraGasCost to over-estimate the gas cost. + totalL1CommitGas, err := d.DACodecV2.EstimateBatchL1CommitGas(b) + if err != nil { + return 0, fmt.Errorf("failed to estimate L1 commit gas for batch: %w", err) + } + totalL1CommitGas += params.BlobTxPointEvaluationPrecompileGas // plus gas cost for the point-evaluation precompile call. + return totalL1CommitGas, nil +} + +// JSONFromBytes converts the bytes to a daBatchV3 and then marshals it to JSON. +func (d *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { + batch, err := d.NewDABatchFromBytes(data) + if err != nil { + return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) + } + + jsonBytes, err := json.Marshal(batch) + if err != nil { + return nil, fmt.Errorf("failed to marshal DABatch to JSON, version %d, hash %s: %w", batch.Version(), batch.Hash(), err) + } + + return jsonBytes, nil +} diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go deleted file mode 100644 index da184ea..0000000 --- a/encoding/codecv3/codecv3.go +++ /dev/null @@ -1,283 +0,0 @@ -package codecv3 - -import ( - "encoding/binary" - "encoding/hex" - "errors" - "fmt" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv2" -) - -// MaxNumChunks is the maximum number of chunks that a batch can contain. -const MaxNumChunks = codecv2.MaxNumChunks - -// DABlock represents a Data Availability Block. -type DABlock = codecv2.DABlock - -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk = codecv2.DAChunk - -// DAChunkRawTx groups consecutive DABlocks with their L2 transactions, L1 msgs are loaded in another place. -type DAChunkRawTx = codecv2.DAChunkRawTx - -// DABatch contains metadata about a batch of DAChunks. -type DABatch struct { - // header - Version uint8 `json:"version"` - BatchIndex uint64 `json:"batch_index"` - L1MessagePopped uint64 `json:"l1_message_popped"` - TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` - DataHash common.Hash `json:"data_hash"` - BlobVersionedHash common.Hash `json:"blob_versioned_hash"` - ParentBatchHash common.Hash `json:"parent_batch_hash"` - LastBlockTimestamp uint64 `json:"last_block_timestamp"` - BlobDataProof [2]common.Hash `json:"blob_data_proof"` - - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point - - // for batch task - blobBytes []byte -} - -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - return codecv2.NewDABlock(block, totalL1MessagePoppedBefore) -} - -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - return codecv2.NewDAChunk(chunk, totalL1MessagePoppedBefore) -} - -// DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. -func DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { - return codecv2.DecodeDAChunksRawTx(bytes) -} - -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { - // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > MaxNumChunks { - return nil, errors.New("too many chunks in batch") - } - - if len(batch.Chunks) == 0 { - return nil, errors.New("too few chunks in batch") - } - - if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { - return nil, errors.New("too few blocks in last chunk of the batch") - } - - // batch data hash - dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - // skipped L1 messages bitmap - _, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - // blob payload - blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) - if err != nil { - return nil, err - } - - lastChunk := batch.Chunks[len(batch.Chunks)-1] - lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - - daBatch := DABatch{ - Version: uint8(encoding.CodecV3), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: batch.ParentBatchHash, - LastBlockTimestamp: lastBlock.Header.Time, - blob: blob, - z: z, - blobBytes: blobBytes, - } - - daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() - if err != nil { - return nil, err - } - - return &daBatch, nil -} - -// ComputeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return codecv2.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) -} - -// ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { - return codecv2.ConstructBlobPayload(chunks, useMockTxData) -} - -// DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { - return codecv2.DecodeTxsFromBlob(blob, chunks) -} - -// NewDABatchFromBytes decodes the given byte slice into a DABatch. -// Note: This function only populates the batch header, it leaves the blob-related fields empty. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { - if len(data) != 193 { - return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) - } - - b := &DABatch{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - BlobVersionedHash: common.BytesToHash(data[57:89]), - ParentBatchHash: common.BytesToHash(data[89:121]), - LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), - BlobDataProof: [2]common.Hash{ - common.BytesToHash(data[129:161]), - common.BytesToHash(data[161:193]), - }, - } - - return b, nil -} - -// Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { - batchBytes := make([]byte, 193) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) - copy(batchBytes[25:57], b.DataHash[:]) - copy(batchBytes[57:89], b.BlobVersionedHash[:]) - copy(batchBytes[89:121], b.ParentBatchHash[:]) - binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) - copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) - copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { - if b.blob == nil { - return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") - } - if b.z == nil { - return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty z") - } - - _, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return [2]common.Hash{}, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of result: - // | z | y | - // |---------|---------| - // | bytes32 | bytes32 | - var result [2]common.Hash - result[0] = common.BytesToHash(b.z[:]) - result[1] = common.BytesToHash(y[:]) - - return result, nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - return encoding.BlobDataProofFromValues(*b.z, y, commitment, proof), nil -} - -// Blob returns the blob of the batch. -func (b *DABatch) Blob() *kzg4844.Blob { - return b.blob -} - -// BlobBytes returns the blob bytes of the batch. -func (b *DABatch) BlobBytes() []byte { - return b.blobBytes -} - -// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c) -} - -// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b) -} - -// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - return codecv2.CheckChunkCompressedDataCompatibility(c) -} - -// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - return codecv2.CheckBatchCompressedDataCompatibility(b) -} - -// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { - return codecv2.EstimateChunkL1CommitCalldataSize(c) -} - -// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { - return codecv2.EstimateBatchL1CommitCalldataSize(b) -} - -// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { - return codecv2.EstimateChunkL1CommitGas(c) + 50000 // plus 50000 for the point-evaluation precompile call. -} - -// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { - return codecv2.EstimateBatchL1CommitGas(b) + 50000 // plus 50000 for the point-evaluation precompile call. -} diff --git a/encoding/codecv3/codecv3_test.go b/encoding/codecv3/codecv3_test.go deleted file mode 100644 index 2a917fd..0000000 --- a/encoding/codecv3/codecv3_test.go +++ /dev/null @@ -1,1099 +0,0 @@ -package codecv3 - -import ( - "encoding/hex" - "encoding/json" - "os" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" -) - -func TestCodecV3BlockEncode(t *testing.T) { - block := &DABlock{} - encoded := hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block, err := NewDABlock(trace2, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block, err = NewDABlock(trace3, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block, err = NewDABlock(trace4, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block, err = NewDABlock(trace5, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block, err = NewDABlock(trace6, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - block, err = NewDABlock(trace7, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - - // sanity check: v0 and v3 block encodings are identical - for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { - blockv0, err := codecv0.NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv0 := hex.EncodeToString(blockv0.Encode()) - - blockv3, err := NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv3 := hex.EncodeToString(blockv3.Encode()) - - assert.Equal(t, encodedv0, encodedv3) - } -} - -func TestCodecV3ChunkEncode(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - encoded := hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - // transactions are not part of the encoding - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err := NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) -} - -func TestCodecV3ChunkHash(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - hash, err := chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) - - // L1 transactions are part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // L2 transactions are not part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // numL1Messages are not part of the hash - chunk.Blocks[0].NumL1Messages = 1 - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // invalid hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) - _, err = chunk.Hash() - assert.Error(t, err) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) -} - -func TestCodecV3BatchEncode(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV3)} - encoded := hex.EncodeToString(batch.Encode()) - assert.Equal(t, "03000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "03000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "03000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) -} - -func TestCodecV3BatchHash(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV3)} - assert.Equal(t, "0x9f059299e02cd1ccaed5bbcc821843000ae6b992b68b55ff59a51252478681b0", batch.Hash().Hex()) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc5065afb8f29f620ae1edb4c6ebaf7380faf4226fb83ee920d70d489fe51c5c2", batch.Hash().Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9ec8eabaa13229ec9c9d0687133afd7435afcfe260fc4c73fea052c0911522ac", batch.Hash().Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xda944b66dcaa6dc1442be2230233e97286ee1ed3c51cde155a36643b293b07c4", batch.Hash().Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x20e2324fac82e484c569eb286a221c61151c2b3c38a63b289f6ef6c30fb31e49", batch.Hash().Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc962bce28a34a4eb9ec81393edcf2e6367e84aad9c4fc5641da6f18f54053ed5", batch.Hash().Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x405e0fc4b7efbe5b6d1dcc63c1f3253bbb6fbefedd1afe6b2067629f9da1f1cc", batch.Hash().Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x3d5d24c951cb55e56f3b4e2defcd8f32d6d048565e6723ac7cdff7ed5e580e3a", batch.Hash().Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb", batch.Hash().Hex()) -} - -func TestCodecV3BatchDataHash(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) -} - -func TestCodecV3BatchBlob(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) - assert.Equal(t, "0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7", batch.BlobVersionedHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) - assert.Equal(t, "0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4", batch.BlobVersionedHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) - assert.Equal(t, "0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c", batch.BlobVersionedHash.Hex()) - - // this batch only contains L1 txs - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060300d2d0700140d000f000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10005004124d3c68f60b26d07d824482d965c11", encoded) - assert.Equal(t, "0x0140a7ef703ef625ee71e6a580a8ff05cab32c3f3402bd37a1b715f5810760c9", batch.BlobVersionedHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) - assert.Equal(t, "0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632", batch.BlobVersionedHash.Hex()) -} - -func TestCodecV3BatchChallenge(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea", hex.EncodeToString(batch.z[:])) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b7792", hex.EncodeToString(batch.z[:])) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf", hex.EncodeToString(batch.z[:])) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a", hex.EncodeToString(batch.z[:])) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b32", hex.EncodeToString(batch.z[:])) -} - -func TestCodecV3ChunkAndBatchCommitGasEstimation(t *testing.T) { - block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} - chunk2Gas := EstimateChunkL1CommitGas(chunk2) - assert.Equal(t, uint64(51124), chunk2Gas) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2Gas := EstimateBatchL1CommitGas(batch2) - assert.Equal(t, uint64(207649), batch2Gas) - - block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} - chunk3Gas := EstimateChunkL1CommitGas(chunk3) - assert.Equal(t, uint64(51124), chunk3Gas) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3Gas := EstimateBatchL1CommitGas(batch3) - assert.Equal(t, uint64(207649), batch3Gas) - - block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk4Gas := EstimateChunkL1CommitGas(chunk4) - assert.Equal(t, uint64(53745), chunk4Gas) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4Gas := EstimateBatchL1CommitGas(batch4) - assert.Equal(t, uint64(210302), batch4Gas) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} - chunk5Gas := EstimateChunkL1CommitGas(chunk5) - assert.Equal(t, uint64(52202), chunk5Gas) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk6Gas := EstimateChunkL1CommitGas(chunk6) - assert.Equal(t, uint64(53745), chunk6Gas) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5Gas := EstimateBatchL1CommitGas(batch5) - assert.Equal(t, uint64(213087), batch5Gas) -} - -func repeat(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) -} - -func TestCodecV3BatchStandardTestCases(t *testing.T) { - // Taking into consideration compression, we allow up to 5x of max blob bytes. - // We then ignore the metadata rows for 45 chunks. - maxChunks := 45 - nRowsData := 5*126976 - (maxChunks*4 + 2) - - for _, tc := range []struct { - chunks [][]string - expectedz string - expectedy string - expectedBlobVersionedHash string - expectedBatchHash string - }{ - // single empty chunk - {chunks: [][]string{{}}, expectedz: "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", expectedy: "132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", expectedBlobVersionedHash: "015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", expectedBatchHash: "edde6b1becf302856884f0b9da5879d58eeb822ddab14a06bacd8de9276dbc79"}, - // single non-empty chunk - {chunks: [][]string{{"0x010203"}}, expectedz: "13c58784e6eeed40130ab43baa13a1f2d5a6d895c66f554456e00c480568a42d", expectedy: "248ace7f7f0fb3718b80b8cf04be560b97d083a3dbbd79d169e0fe9c80c9668c", expectedBlobVersionedHash: "0161d97a72d600ed5aa264bc8fc409a87e60b768ffb52b9c1106858c2ae57f04", expectedBatchHash: "4c30ec3d03ecf70c479e802640a185cadf971e61acf68dac149ac73bdc645195"}, - // multiple empty chunks - {chunks: [][]string{{}, {}}, expectedz: "102e7bf1335a8a86e8ecac2283843eff536555e464bb6ba01a29ff1ca8d4b8cb", expectedy: "033a0272284ae81eb693588e731fc19ad24c44a332405e471966335b37f1a2c2", expectedBlobVersionedHash: "01c0a83d1c0ee2ee06f030ca2f0ec36827b3e9682cbc8c00a27b0bdd3530488b", expectedBatchHash: "31fd0237208587df3ddbea413673b479e2daa84fd1143a519940267c37257b1a"}, - // multiple non-empty chunks - {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "0ac462d144c9aa1a7538aebd9087e34e9f9590e59b58ffa08f03cd9e43382ed0", expectedy: "6ac7fc7686c900c9e27fd0ca69736cf77016c8b9e8fd3ebab0ee6be1d6c30c93", expectedBlobVersionedHash: "0104efe2cfccfb25e5ae40250af541bd217cae4c9bc14daaf0360a0a36aa2d03", expectedBatchHash: "0e0e8fd8b4f8ceb0215a29cc8b95750c0d1969706573af8872f397747809a479"}, - // empty chunk followed by non-empty chunk - {chunks: [][]string{{}, {"0x010203"}}, expectedz: "1d81a4d2c78fbbf379562a998edde942b2019ec88ede9150a4c2a52a4e271ace", expectedy: "656603441f898b3dd64e0963fea53bfd6a445cb4f838c5caf181186cf45dd7ec", expectedBlobVersionedHash: "0131b881bdc8d8b70a62d9a6f249dc7a48f37428ac10809299489e5e60911f80", expectedBatchHash: "d6b97dde29d4b8afb1a036ee54757af4087c939cb96cf17c2720e9f59eff19da"}, - // non-empty chunk followed by empty chunk - {chunks: [][]string{{"0x070809"}, {}}, expectedz: "275116a8ff16b17b90d7287fb567e766d1f79f54f8ac3c6d80e2de59fd34f115", expectedy: "5fea2c1bbed12ccdcf9edef780330ee1d13439de4d3b8f4968f2bda9e4fb8b1f", expectedBlobVersionedHash: "01c44c7e70df601a245e714be4f0aa7c918a0056bff379c20a7128e5926db664", expectedBatchHash: "3d56e12359c8b565f9cbe1c8f81e848be4635d9df84bc6ef0eb9986a15e08c20"}, - // max number of chunks all empty - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4583c59de31759dbc54109bb2d5825a36655e71db62225fc5d7d758191e59a6b", expectedy: "0b119ffd6c88037d62e1bee05f609d801c6cc6e724214555b97affe3b852819a", expectedBlobVersionedHash: "013ac7e2db84a2f26ee2cba3a5cabbfffd1f7c053e7ea17add4f84a82cf8285a", expectedBatchHash: "2e8078e277221a0d0e235ef825eef02653677bd50e259aeed64af5b95477645c"}, - // max number of chunks all non-empty - {chunks: [][]string{ - {"0x0a"}, - {"0x0a0b"}, - {"0x0a0b0c"}, - {"0x0a0b0c0d"}, - {"0x0a0b0c0d0e"}, - {"0x0a0b0c0d0e0f"}, - {"0x0a0b0c0d0e0f10"}, - {"0x0a0b0c0d0e0f1011"}, - {"0x0a0b0c0d0e0f101112"}, - {"0x0a0b0c0d0e0f10111213"}, - {"0x0a0b0c0d0e0f1011121314"}, - {"0x0a0b0c0d0e0f101112131415"}, - {"0x0a0b0c0d0e0f10111213141516"}, - {"0x0a0b0c0d0e0f1011121314151617"}, - {"0x0a0b0c0d0e0f101112131415161718"}, - {"0x0a0b0c0d0e0f10111213141516171819"}, - {"0x0a0b0c0d0e0f101112131415161718191a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, - }, expectedz: "08454da7c353fa9d7c4d044cca3972dab6aa38e583276848b1aec904f5592837", expectedy: "36cbc815c329e864a018cadf25070d62184d570ef031f5b5c8a5385e65babe9c", expectedBlobVersionedHash: "0198009a5e0941a6acb7dcd95a5016d7f25ca92d66fb300cf6f9918102ef66c0", expectedBatchHash: "e366eeacd45fbc2f43756f66d0a8f82f7f390a9aa7795df82e7df2d724856e7e"}, - // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "63bebf986e2f0fc8bf5f7067108ea4a2b35a5927296e17d5c0bbc5ec04d8dce4", expectedy: "013b762f02e95a62f08977b1a43a017cd84f785b52ebf8ef25e9ebba6c9b76cb", expectedBlobVersionedHash: "01f68a6b3c0ba2ea0406f80f9c88b9905d9b3cc5b2d8ef12923b20fb24b81855", expectedBatchHash: "88e6df6a5e1112485995fe5957d57c90ff306343a9d8d80831b7a6c041daf728"}, - // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "465e095b082136f20ca975c10eafbb3bf2b71724798da87bd62d3f8795c615dc", expectedy: "6f2ff37b255e0da8b5678a9b1157fdc8a1213c17bd248efd50a4c1540c26295c", expectedBlobVersionedHash: "01da6bdac6237fcba7742cf48868467bf95a5e7f33d16c172b36852e506b46b6", expectedBatchHash: "7bd97fc7c8c7e918029e5bd85d3c9e0335117475c449d5c6dd24e5af9d55cfc6"}, - // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "1ca17fdb4dea8396d7e2f10ef7b2a587750517df70ec0ce0d853e61310aec0f3", expectedy: "1b686f2eb8d7e3e2325d9101dd799f5e13af8482b402661325545646a9c96ec0", expectedBlobVersionedHash: "019d11fab4509a83623a64b466a00344552fd44421e78726cda537d06c8425d3", expectedBatchHash: "8b50a41e08000b7617de7204d8082870c8446f591fadffcb5190fdeadf47fae5"}, - // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "29c684b13d22cb43d81b9b449c281c15126fdc73512606de81c2d3fc9c7793b1", expectedy: "574418d83d77f6096934c2c4281edf61d48925a268411df0e0c818c6d43156d1", expectedBlobVersionedHash: "01f8da934ada220153abee70e85604ef8fbbf98c203b5eae14d23be088a41f45", expectedBatchHash: "cc0592160b2fcdb58750d29c36662b55437f4bc69ba3d45a965590f534a0228c"}, - // max number of chunks but last is empty - {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "16d2883b0797d3420fabf4591f9dbe9f850ce600ce6133c98c9d291d8b3ce0a9", expectedy: "5bdc1ca8f09efa9c544d2b03d565fec500d5347acd5b3fd4d88e881f9459d83a", expectedBlobVersionedHash: "01f51532d6bb0afe8a0a61351888f322cba40dc664408a3201eb761aaba66671", expectedBatchHash: "043a40c8fbc4edb6a820ba4162f1368d157d1d59c07f969b2c584cc6a47385ca"}, - } { - chunks := []*encoding.Chunk{} - - for _, c := range tc.chunks { - block := &encoding.Block{Transactions: []*types.TransactionData{}} - - for _, data := range c { - tx := &types.TransactionData{Type: 0xff, Data: data} - block.Transactions = append(block.Transactions, tx) - } - - chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} - chunks = append(chunks, chunk) - } - - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* use mock */) - require.NoError(t, err) - actualZ := hex.EncodeToString(z[:]) - assert.Equal(t, tc.expectedz, actualZ) - assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) - - _, y, err := kzg4844.ComputeProof(blob, *z) - require.NoError(t, err) - actualY := hex.EncodeToString(y[:]) - assert.Equal(t, tc.expectedy, actualY) - - // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) - dataBytes := make([]byte, 32*len(chunks)) - for i := range chunks { - copy(dataBytes[32*i:32*i+32], []byte{255 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) - } - dataHash := crypto.Keccak256Hash(dataBytes) - - batch := DABatch{ - Version: uint8(encoding.CodecV3), - BatchIndex: 6789, - L1MessagePopped: 101, - TotalL1MessagePopped: 10101, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), - LastBlockTimestamp: 192837, - blob: blob, - z: z, - } - - batch.BlobDataProof, err = batch.blobDataProofForPICircuit() - require.NoError(t, err) - - assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) - } -} - -func TestCodecV3BatchBlobDataProof(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err := batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d90b7a581ba5b2cd6a916d139d2b7f28bf6997adb512653f6bdef0bbb7d681c742560fab406fd299c04fc1a464d277f8a8b3a918761888bd0f9a96cb9b2521347131a43b633c4fa01470842d9fe4211bc59c990f69185b80def79b9dfbf039b75", hex.EncodeToString(verifyData)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8b1530a433168a29443af928876b3d63f4205ba1876d303d56f8456483b9ce91b6ff2b1707726f01c1429cb9d87e4c165ade0ec9e0547ea5721ff442f63d8fcf9ba2f066b07d9b8a0f057e9c0e0e1e56f9a6ec627f9b1cb24866802e15c49c22a", hex.EncodeToString(verifyData)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2a5979717c35155300b0b2d68610aacdd8b0dbb94990168103bfd62985732e3f682370c91c9f2b8f08c6398194e2bb18b83eae765cef6e4e991d91e631dd454953516721962a089a03e4d8f640cd115ede836bad7141e8094317a45ccd04ec842", hex.EncodeToString(verifyData)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a0fad18d05f6f7d57b03dc717f8409489806d89ee5044bea951538682c52d815097e898dbd9a99b1bae2d759ee5f77ac6b6e8fb2cddaf26500532270fd4066e7ae85c450bcbf2cdb4643147091a1ee11ca615b823c97a69cb716d80de6ccafc5823af3a17fc71b72c224edd387abbf4433af013b53f15f394e501e5a3e57af074", hex.EncodeToString(verifyData)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) -} - -func TestCodecV3BatchL1MessagePopped(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 - assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 - assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 42, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 32, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) -} - -func TestCodecV3ChunkAndBatchBlobSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(412), chunk2BatchBytesSize) - assert.Equal(t, uint64(237), chunk2BlobSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) - assert.NoError(t, err) - assert.Equal(t, uint64(412), batch2BatchBytesSize) - assert.Equal(t, uint64(237), batch2BlobSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), chunk3BatchBytesSize) - assert.Equal(t, uint64(2933), chunk3BlobSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), batch3BatchBytesSize) - assert.Equal(t, uint64(2933), batch3BlobSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk4BatchBytesSize) - assert.Equal(t, uint64(54), chunk4BlobSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) - assert.NoError(t, err) - assert.Equal(t, uint64(214), blob4BatchBytesSize) - assert.Equal(t, uint64(54), batch4BlobSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) - assert.NoError(t, err) - assert.Equal(t, uint64(6093), chunk5BatchBytesSize) - assert.Equal(t, uint64(3149), chunk5BlobSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk6BatchBytesSize) - assert.Equal(t, uint64(54), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) - assert.NoError(t, err) - assert.Equal(t, uint64(6125), batch5BatchBytesSize) - assert.Equal(t, uint64(3186), batch5BlobSize) -} - -func TestCodecV3ChunkAndBatchCalldataSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) - assert.Equal(t, uint64(60), chunk2CalldataSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) - assert.Equal(t, uint64(60), batch2CalldataSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) - assert.Equal(t, uint64(60), chunk3CalldataSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) - assert.Equal(t, uint64(60), batch3CalldataSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) - assert.Equal(t, uint64(60), chunk4CalldataSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4) - assert.Equal(t, uint64(60), batch4CalldataSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) - assert.Equal(t, uint64(120), chunk5CalldataSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6) - assert.Equal(t, uint64(60), chunk6CalldataSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) - assert.Equal(t, uint64(180), batch5CalldataSize) -} - -func TestCodecV3DABatchJSONMarshalUnmarshal(t *testing.T) { - t.Run("Case 1", func(t *testing.T) { - jsonStr := `{ - "version": 3, - "batch_index": 293212, - "l1_message_popped": 7, - "total_l1_message_popped": 904750, - "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", - "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", - "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", - "last_block_timestamp": 1721130505, - "blob_data_proof": [ - "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", - "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(3), batch.Version) - assert.Equal(t, uint64(293212), batch.BatchIndex) - assert.Equal(t, uint64(7), batch.L1MessagePopped) - assert.Equal(t, uint64(904750), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), batch.ParentBatchHash) - assert.Equal(t, uint64(1721130505), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0x6c693817a272efd00dd1323a533a114bd0a8c63b55816fde36c5784a4125441d") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) - - t.Run("Case 2", func(t *testing.T) { - jsonStr := `{ - "version": 4, - "batch_index": 123, - "l1_message_popped": 0, - "total_l1_message_popped": 0, - "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", - "last_block_timestamp": 1720174236, - "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", - "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", - "blob_data_proof": [ - "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", - "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(4), batch.Version) - assert.Equal(t, uint64(123), batch.BatchIndex) - assert.Equal(t, uint64(0), batch.L1MessagePopped) - assert.Equal(t, uint64(0), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), batch.ParentBatchHash) - assert.Equal(t, uint64(1720174236), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0x005661faf2444824b8a3fe1a53958195b197436a0df81b5d1677287bcd1c1923") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) - - t.Run("Case 3", func(t *testing.T) { - jsonStr := `{ - "version": 3, - "batch_index": 293205, - "l1_message_popped": 0, - "total_l1_message_popped": 904737, - "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", - "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", - "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", - "last_block_timestamp": 1721129563, - "blob_data_proof": [ - "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", - "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(3), batch.Version) - assert.Equal(t, uint64(293205), batch.BatchIndex) - assert.Equal(t, uint64(0), batch.L1MessagePopped) - assert.Equal(t, uint64(904737), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), batch.ParentBatchHash) - assert.Equal(t, uint64(1721129563), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0xe86e067f78b1c29c1cc297f6d9fe670c7beea1eebb226d1b8eeb9616a2bcac7e") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/codecv3_test.go b/encoding/codecv3_test.go new file mode 100644 index 0000000..03d4453 --- /dev/null +++ b/encoding/codecv3_test.go @@ -0,0 +1,1428 @@ +package encoding + +import ( + "crypto/rand" + "encoding/hex" + "encoding/json" + "fmt" + "math" + "strings" + "testing" + + "github.com/agiledragon/gomonkey/v2" + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/common/hexutil" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCodecV3BlockEncode(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + block := &daBlockV0{} + encoded := hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + daBlock, err := codecv3.NewDABlock(block2, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + daBlock, err = codecv3.NewDABlock(block3, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + daBlock, err = codecv3.NewDABlock(block4, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + daBlock, err = codecv3.NewDABlock(block5, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + daBlock, err = codecv3.NewDABlock(block6, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + daBlock, err = codecv3.NewDABlock(block7, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) + + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + // sanity check: v0 and v3 block encodings are identical + for _, trace := range []*Block{block2, block3, block4, block5, block6, block7} { + blockv0, err := codecv0.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv0 := hex.EncodeToString(blockv0.Encode()) + + blockv3, err := codecv3.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv3 := hex.EncodeToString(blockv3.Encode()) + + assert.Equal(t, encodedv0, encodedv3) + } +} + +func TestCodecV3ChunkEncode(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + daChunkV1 := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + + encodedBytes, err := daChunkV1.Encode() + assert.NoError(t, err) + encoded := hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + // transactions are not part of the encoding + daChunkV1.transactions[0] = append(daChunkV1.transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) + encodedBytes, err = daChunkV1.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} + +func TestCodecV3ChunkHash(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + chunk := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + hash, err := chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) + + // L1 transactions are part of the hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // L2 transactions are not part of the hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // numL1Messages are not part of the hash + daBlock = chunk.blocks[0].(*daBlockV0) + daBlock.numL1Messages = 1 + chunk.blocks[0] = daBlock + + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // invalid hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + _, err = chunk.Hash() + assert.Error(t, err) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv3.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) +} + +func TestCodecV3BatchEncode(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + // empty daBatch + daBatchV3 := &daBatchV3{ + daBatchV0: daBatchV0{ + version: CodecV3, + }, + } + encoded := hex.EncodeToString(daBatchV3.Encode()) + assert.Equal(t, "03000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "030000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "03000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "030000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "030000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "030000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "03000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) + + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "030000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) +} + +func TestCodecV3BatchHash(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + // empty daBatch + daBatchV3 := &daBatchV3{ + daBatchV0: daBatchV0{ + version: CodecV3, + }, + } + assert.Equal(t, common.HexToHash("0x9f059299e02cd1ccaed5bbcc821843000ae6b992b68b55ff59a51252478681b0"), daBatchV3.Hash()) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc5065afb8f29f620ae1edb4c6ebaf7380faf4226fb83ee920d70d489fe51c5c2"), daBatch.Hash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9ec8eabaa13229ec9c9d0687133afd7435afcfe260fc4c73fea052c0911522ac"), daBatch.Hash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xda944b66dcaa6dc1442be2230233e97286ee1ed3c51cde155a36643b293b07c4"), daBatch.Hash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x20e2324fac82e484c569eb286a221c61151c2b3c38a63b289f6ef6c30fb31e49"), daBatch.Hash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc962bce28a34a4eb9ec81393edcf2e6367e84aad9c4fc5641da6f18f54053ed5"), daBatch.Hash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x405e0fc4b7efbe5b6d1dcc63c1f3253bbb6fbefedd1afe6b2067629f9da1f1cc"), daBatch.Hash()) + + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x3d5d24c951cb55e56f3b4e2defcd8f32d6d048565e6723ac7cdff7ed5e580e3a"), daBatch.Hash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb"), daBatch.Hash()) +} + +func TestCodecV3NewDABatchFromBytes(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + testCases := []struct { + name string + jsonFile string + }{ + {"Empty Batch", ""}, + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var batch *Batch + var daBatch DABatch + var createErr1 error + + if tc.jsonFile == "" { + // Empty daBatch + daBatch = &daBatchV3{ + daBatchV0: daBatchV0{ + version: CodecV3, + }, + } + } else { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + batch = &Batch{Chunks: []*Chunk{chunk}} + daBatch, createErr1 = codecv3.NewDABatch(batch) + assert.NoError(t, createErr1) + } + + // Encode the DABatch + encodedBytes := daBatch.Encode() + + // Decode the bytes back into a DABatch + decodedDABatch, createErr2 := codecv3.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, createErr2) + + // Compare the hashes of the original and decoded DABatch + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) + }) + } + + // Test with multiple blocks and chunks in a batch + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + chunk2 := &Chunk{Blocks: []*Block{block4, block5}} + batch := &Batch{Chunks: []*Chunk{chunk1, chunk2}} + daBatch, err := codecv3.NewDABatch(batch) + assert.NoError(t, err) + + encodedBytes := daBatch.Encode() + decodedDABatch, err := codecv3.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash()) +} + +func TestCodecV3BatchDataHash(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541"), daBatch.DataHash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6"), daBatch.DataHash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f"), daBatch.DataHash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) + + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8"), daBatch.DataHash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) +} + +func TestCodecV3DABatchJSONMarshalUnmarshal(t *testing.T) { + t.Run("Case 1", func(t *testing.T) { + expectedJsonStr := `{ + "version": 3, + "batch_index": 293212, + "l1_message_popped": 7, + "total_l1_message_popped": 904750, + "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", + "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", + "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", + "last_block_timestamp": 1721130505, + "blob_data_proof": [ + "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", + "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" + ] + }` + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 3, + batchIndex: 293212, + l1MessagePopped: 7, + totalL1MessagePopped: 904750, + dataHash: common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), + parentBatchHash: common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), + }, + blobVersionedHash: common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), + lastBlockTimestamp: 1721130505, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), + common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), + }, + } + + data, err := json.Marshal(&daBatch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJson, actualJson map[string]interface{} + err = json.Unmarshal([]byte(expectedJsonStr), &expectedJson) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJson) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") + }) + + t.Run("Case 2", func(t *testing.T) { + expectedJsonStr := `{ + "version": 4, + "batch_index": 123, + "l1_message_popped": 0, + "total_l1_message_popped": 0, + "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", + "last_block_timestamp": 1720174236, + "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", + "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", + "blob_data_proof": [ + "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", + "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" + ] + }` + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 4, + batchIndex: 123, + l1MessagePopped: 0, + totalL1MessagePopped: 0, + dataHash: common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), + parentBatchHash: common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), + }, + blobVersionedHash: common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), + lastBlockTimestamp: 1720174236, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), + common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), + }, + } + + data, err := json.Marshal(&daBatch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJson, actualJson map[string]interface{} + err = json.Unmarshal([]byte(expectedJsonStr), &expectedJson) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJson) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") + }) + + t.Run("Case 3", func(t *testing.T) { + expectedJsonStr := `{ + "version": 3, + "batch_index": 293205, + "l1_message_popped": 0, + "total_l1_message_popped": 904737, + "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", + "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", + "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", + "last_block_timestamp": 1721129563, + "blob_data_proof": [ + "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", + "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" + ] + }` + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 3, + batchIndex: 293205, + l1MessagePopped: 0, + totalL1MessagePopped: 904737, + dataHash: common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), + parentBatchHash: common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), + }, + blobVersionedHash: common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), + lastBlockTimestamp: 1721129563, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), + common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), + }, + } + + data, err := json.Marshal(&daBatch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJson, actualJson map[string]interface{} + err = json.Unmarshal([]byte(expectedJsonStr), &expectedJson) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJson) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") + }) +} + +func TestDACodecV3JSONFromBytes(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 3, + batchIndex: 293212, + l1MessagePopped: 7, + totalL1MessagePopped: 904750, + dataHash: common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), + parentBatchHash: common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), + }, + blobVersionedHash: common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), + lastBlockTimestamp: 1721130505, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), + common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), + }, + } + + outputJSON, err := codecv3.JSONFromBytes(daBatch.Encode()) + require.NoError(t, err, "JSONFromBytes failed") + + var outputMap map[string]interface{} + err = json.Unmarshal(outputJSON, &outputMap) + require.NoError(t, err, "Failed to unmarshal output JSON") + + expectedFields := map[string]interface{}{ + "version": float64(daBatch.version), + "batch_index": float64(daBatch.batchIndex), + "l1_message_popped": float64(daBatch.l1MessagePopped), + "total_l1_message_popped": float64(daBatch.totalL1MessagePopped), + "data_hash": daBatch.dataHash.Hex(), + "blob_versioned_hash": daBatch.blobVersionedHash.Hex(), + "parent_batch_hash": daBatch.parentBatchHash.Hex(), + "last_block_timestamp": float64(daBatch.lastBlockTimestamp), + "blob_data_proof": []interface{}{ + daBatch.blobDataProof[0].Hex(), + daBatch.blobDataProof[1].Hex(), + }, + } + + assert.Len(t, outputMap, len(expectedFields), "Unexpected number of fields in output") + for key, expectedValue := range expectedFields { + assert.Equal(t, expectedValue, outputMap[key], fmt.Sprintf("Mismatch in field %s", key)) + } +} + +func TestCodecV3CalldataSizeEstimation(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2CalldataSize, err := codecv3.EstimateChunkL1CommitCalldataSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk2CalldataSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2CalldataSize, err := codecv3.EstimateBatchL1CommitCalldataSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch2CalldataSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3CalldataSize, err := codecv3.EstimateChunkL1CommitCalldataSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk3CalldataSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3CalldataSize, err := codecv3.EstimateBatchL1CommitCalldataSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch3CalldataSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4CalldataSize, err := codecv3.EstimateChunkL1CommitCalldataSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk4CalldataSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4CalldataSize, err := codecv3.EstimateBatchL1CommitCalldataSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch4CalldataSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5CalldataSize, err := codecv3.EstimateChunkL1CommitCalldataSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(120), chunk5CalldataSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6CalldataSize, err := codecv3.EstimateChunkL1CommitCalldataSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk6CalldataSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5CalldataSize, err := codecv3.EstimateBatchL1CommitCalldataSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(180), batch5CalldataSize) +} + +func TestCodecV3CommitGasEstimation(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2Gas, err := codecv3.EstimateChunkL1CommitGas(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(51124), chunk2Gas) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2Gas, err := codecv3.EstimateBatchL1CommitGas(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(207649), batch2Gas) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3Gas, err := codecv3.EstimateChunkL1CommitGas(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(51124), chunk3Gas) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3Gas, err := codecv3.EstimateBatchL1CommitGas(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(207649), batch3Gas) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4Gas, err := codecv3.EstimateChunkL1CommitGas(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(53745), chunk4Gas) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4Gas, err := codecv3.EstimateBatchL1CommitGas(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(210302), batch4Gas) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5Gas, err := codecv3.EstimateChunkL1CommitGas(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(52202), chunk5Gas) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6Gas, err := codecv3.EstimateChunkL1CommitGas(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(53745), chunk6Gas) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5Gas, err := codecv3.EstimateBatchL1CommitGas(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(213087), batch5Gas) +} + +func TestCodecV3BatchSizeAndBlobSizeEstimation(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2BatchBytesSize, chunk2BlobSize, err := codecv3.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(412), chunk2BatchBytesSize) + assert.Equal(t, uint64(237), chunk2BlobSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2BatchBytesSize, batch2BlobSize, err := codecv3.EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(412), batch2BatchBytesSize) + assert.Equal(t, uint64(237), batch2BlobSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3BatchBytesSize, chunk3BlobSize, err := codecv3.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), chunk3BatchBytesSize) + assert.Equal(t, uint64(2933), chunk3BlobSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3BatchBytesSize, batch3BlobSize, err := codecv3.EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), batch3BatchBytesSize) + assert.Equal(t, uint64(2933), batch3BlobSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4BatchBytesSize, chunk4BlobSize, err := codecv3.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk4BatchBytesSize) + assert.Equal(t, uint64(54), chunk4BlobSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + blob4BatchBytesSize, batch4BlobSize, err := codecv3.EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(214), blob4BatchBytesSize) + assert.Equal(t, uint64(54), batch4BlobSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5BatchBytesSize, chunk5BlobSize, err := codecv3.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(6093), chunk5BatchBytesSize) + assert.Equal(t, uint64(3149), chunk5BlobSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6BatchBytesSize, chunk6BlobSize, err := codecv3.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk6BatchBytesSize) + assert.Equal(t, uint64(54), chunk6BlobSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5BatchBytesSize, batch5BlobSize, err := codecv3.EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(6125), batch5BatchBytesSize) + assert.Equal(t, uint64(3186), batch5BlobSize) +} + +func TestCodecV3BatchL1MessagePopped(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(11), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(11), daBatch.(*daBatchV3).totalL1MessagePopped) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) + + batch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(5), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(10), daBatch.(*daBatchV3).l1MessagePopped) // skip 7, include 3 + assert.Equal(t, uint64(10), daBatch.(*daBatchV3).totalL1MessagePopped) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(257), daBatch.(*daBatchV3).l1MessagePopped) // skip 255, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) + + batch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(256), daBatch.(*daBatchV3).l1MessagePopped) // skip 254, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) + + batch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(32), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) +} + +func TestCodecV3BlobEncodingAndHashing(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded := strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) + assert.Equal(t, common.HexToHash("0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7"), daBatch.(*daBatchV3).blobVersionedHash) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) + assert.Equal(t, common.HexToHash("0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4"), daBatch.(*daBatchV3).blobVersionedHash) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) + assert.Equal(t, common.HexToHash("0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c"), daBatch.(*daBatchV3).blobVersionedHash) + + // this batch only contains L1 txs + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), daBatch.(*daBatchV3).blobVersionedHash) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), daBatch.(*daBatchV3).blobVersionedHash) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) + assert.Equal(t, common.HexToHash("0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370"), daBatch.(*daBatchV3).blobVersionedHash) + + // 45 chunks + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "006024281d0700140d002d000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f100040041e1491b3e82c9b61d60d39a727", encoded) + assert.Equal(t, common.HexToHash("0x01fc79efca1213db1aa0183865b0a360dc152662cde34ee6a34e7607b96c1c89"), daBatch.(*daBatchV3).blobVersionedHash) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) + assert.Equal(t, common.HexToHash("0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632"), daBatch.(*daBatchV3).blobVersionedHash) +} + +func TestCodecV3BatchBlobDataProofForPointEvaluation(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv3.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err := daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d90b7a581ba5b2cd6a916d139d2b7f28bf6997adb512653f6bdef0bbb7d681c742560fab406fd299c04fc1a464d277f8a8b3a918761888bd0f9a96cb9b2521347131a43b633c4fa01470842d9fe4211bc59c990f69185b80def79b9dfbf039b75", hex.EncodeToString(verifyData)) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8b1530a433168a29443af928876b3d63f4205ba1876d303d56f8456483b9ce91b6ff2b1707726f01c1429cb9d87e4c165ade0ec9e0547ea5721ff442f63d8fcf9ba2f066b07d9b8a0f057e9c0e0e1e56f9a6ec627f9b1cb24866802e15c49c22a", hex.EncodeToString(verifyData)) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2a5979717c35155300b0b2d68610aacdd8b0dbb94990168103bfd62985732e3f682370c91c9f2b8f08c6398194e2bb18b83eae765cef6e4e991d91e631dd454953516721962a089a03e4d8f640cd115ede836bad7141e8094317a45ccd04ec842", hex.EncodeToString(verifyData)) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) + + // 45 chunks + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "580ec95306dc32ce5d3bda93c43d9a3e24d2fd801aefa5097757777888ff7de5068f96617616075651e4a90e7687cab788cc0bc470d9e4f38f2a4e1bd0949a75b99fd46a5eb5e896e295d823d80622f284deff3c25b56164d227dd2c382b2dbd918ed9c1eef973e9bbcd773b964128abb79bade5d595a2207ddd4062830145abc49c9ca0ca45a3a479934fbf86ffd42a12b17c59932237dee5d6fcd8466baea4", hex.EncodeToString(verifyData)) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv3.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) +} + +func TestCodecV3DecodeDAChunksRawTx(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk0 := &Chunk{Blocks: []*Block{block0, block1}} + daChunk0, err := codecv3.NewDAChunk(chunk0, 0) + assert.NoError(t, err) + chunkBytes0, err := daChunk0.Encode() + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + daChunk1, err := codecv3.NewDAChunk(chunk1, 0) + assert.NoError(t, err) + chunkBytes1, err := daChunk1.Encode() + assert.NoError(t, err) + + batch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} + daBatch, err := codecv3.NewDABatch(batch) + assert.NoError(t, err) + + daChunksRawTx, err := codecv3.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) + assert.NoError(t, err) + // assert number of chunks + assert.Equal(t, 2, len(daChunksRawTx)) + + // assert block in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Blocks)) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[0], daChunksRawTx[0].Blocks[0]) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[1], daChunksRawTx[0].Blocks[1]) + + // assert block in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Blocks)) + daChunksRawTx[1].Blocks[0].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[0].(*daBlockV0), daChunksRawTx[1].Blocks[0]) + daChunksRawTx[1].Blocks[1].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx[1].Blocks[1]) + + blob := daBatch.Blob() + err = codecv3.DecodeTxsFromBlob(blob, daChunksRawTx) + assert.NoError(t, err) + + // assert transactions in first chunk + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 2, len(daChunksRawTx[0].Transactions[0])) + assert.Equal(t, 1, len(daChunksRawTx[0].Transactions[1])) + + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][0].TxHash, daChunksRawTx[0].Transactions[0][0].Hash().String()) + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][1].TxHash, daChunksRawTx[0].Transactions[0][1].Hash().String()) + + // assert transactions in second chunk + assert.Equal(t, 2, len(daChunksRawTx[1].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 1, len(daChunksRawTx[1].Transactions[0])) + assert.Equal(t, 0, len(daChunksRawTx[1].Transactions[1])) +} + +func TestCodecV3BatchStandardTestCases(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + // Taking into consideration compression, we allow up to 5x of max blob bytes. + // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. + nRowsData := 5*maxEffectiveBlobBytes - (codecv3.MaxNumChunksPerBatch()*4 + 2) + + repeat := func(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) + } + + for _, tc := range []struct { + chunks [][]string + expectedz string + expectedy string + expectedBlobVersionedHash string + expectedBatchHash string + }{ + // single empty chunk + {chunks: [][]string{{}}, expectedz: "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", expectedy: "132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", expectedBlobVersionedHash: "015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", expectedBatchHash: "edde6b1becf302856884f0b9da5879d58eeb822ddab14a06bacd8de9276dbc79"}, + // single non-empty chunk + {chunks: [][]string{{"0x010203"}}, expectedz: "13c58784e6eeed40130ab43baa13a1f2d5a6d895c66f554456e00c480568a42d", expectedy: "248ace7f7f0fb3718b80b8cf04be560b97d083a3dbbd79d169e0fe9c80c9668c", expectedBlobVersionedHash: "0161d97a72d600ed5aa264bc8fc409a87e60b768ffb52b9c1106858c2ae57f04", expectedBatchHash: "4c30ec3d03ecf70c479e802640a185cadf971e61acf68dac149ac73bdc645195"}, + // multiple empty chunks + {chunks: [][]string{{}, {}}, expectedz: "102e7bf1335a8a86e8ecac2283843eff536555e464bb6ba01a29ff1ca8d4b8cb", expectedy: "033a0272284ae81eb693588e731fc19ad24c44a332405e471966335b37f1a2c2", expectedBlobVersionedHash: "01c0a83d1c0ee2ee06f030ca2f0ec36827b3e9682cbc8c00a27b0bdd3530488b", expectedBatchHash: "31fd0237208587df3ddbea413673b479e2daa84fd1143a519940267c37257b1a"}, + // multiple non-empty chunks + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "0ac462d144c9aa1a7538aebd9087e34e9f9590e59b58ffa08f03cd9e43382ed0", expectedy: "6ac7fc7686c900c9e27fd0ca69736cf77016c8b9e8fd3ebab0ee6be1d6c30c93", expectedBlobVersionedHash: "0104efe2cfccfb25e5ae40250af541bd217cae4c9bc14daaf0360a0a36aa2d03", expectedBatchHash: "0e0e8fd8b4f8ceb0215a29cc8b95750c0d1969706573af8872f397747809a479"}, + // empty chunk followed by non-empty chunk + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "1d81a4d2c78fbbf379562a998edde942b2019ec88ede9150a4c2a52a4e271ace", expectedy: "656603441f898b3dd64e0963fea53bfd6a445cb4f838c5caf181186cf45dd7ec", expectedBlobVersionedHash: "0131b881bdc8d8b70a62d9a6f249dc7a48f37428ac10809299489e5e60911f80", expectedBatchHash: "d6b97dde29d4b8afb1a036ee54757af4087c939cb96cf17c2720e9f59eff19da"}, + // non-empty chunk followed by empty chunk + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "275116a8ff16b17b90d7287fb567e766d1f79f54f8ac3c6d80e2de59fd34f115", expectedy: "5fea2c1bbed12ccdcf9edef780330ee1d13439de4d3b8f4968f2bda9e4fb8b1f", expectedBlobVersionedHash: "01c44c7e70df601a245e714be4f0aa7c918a0056bff379c20a7128e5926db664", expectedBatchHash: "3d56e12359c8b565f9cbe1c8f81e848be4635d9df84bc6ef0eb9986a15e08c20"}, + // max number of chunks all empty + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4583c59de31759dbc54109bb2d5825a36655e71db62225fc5d7d758191e59a6b", expectedy: "0b119ffd6c88037d62e1bee05f609d801c6cc6e724214555b97affe3b852819a", expectedBlobVersionedHash: "013ac7e2db84a2f26ee2cba3a5cabbfffd1f7c053e7ea17add4f84a82cf8285a", expectedBatchHash: "2e8078e277221a0d0e235ef825eef02653677bd50e259aeed64af5b95477645c"}, + // max number of chunks all non-empty + {chunks: [][]string{ + {"0x0a"}, + {"0x0a0b"}, + {"0x0a0b0c"}, + {"0x0a0b0c0d"}, + {"0x0a0b0c0d0e"}, + {"0x0a0b0c0d0e0f"}, + {"0x0a0b0c0d0e0f10"}, + {"0x0a0b0c0d0e0f1011"}, + {"0x0a0b0c0d0e0f101112"}, + {"0x0a0b0c0d0e0f10111213"}, + {"0x0a0b0c0d0e0f1011121314"}, + {"0x0a0b0c0d0e0f101112131415"}, + {"0x0a0b0c0d0e0f10111213141516"}, + {"0x0a0b0c0d0e0f1011121314151617"}, + {"0x0a0b0c0d0e0f101112131415161718"}, + {"0x0a0b0c0d0e0f10111213141516171819"}, + {"0x0a0b0c0d0e0f101112131415161718191a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, + }, expectedz: "08454da7c353fa9d7c4d044cca3972dab6aa38e583276848b1aec904f5592837", expectedy: "36cbc815c329e864a018cadf25070d62184d570ef031f5b5c8a5385e65babe9c", expectedBlobVersionedHash: "0198009a5e0941a6acb7dcd95a5016d7f25ca92d66fb300cf6f9918102ef66c0", expectedBatchHash: "e366eeacd45fbc2f43756f66d0a8f82f7f390a9aa7795df82e7df2d724856e7e"}, + // single chunk blob full + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "63bebf986e2f0fc8bf5f7067108ea4a2b35a5927296e17d5c0bbc5ec04d8dce4", expectedy: "013b762f02e95a62f08977b1a43a017cd84f785b52ebf8ef25e9ebba6c9b76cb", expectedBlobVersionedHash: "01f68a6b3c0ba2ea0406f80f9c88b9905d9b3cc5b2d8ef12923b20fb24b81855", expectedBatchHash: "88e6df6a5e1112485995fe5957d57c90ff306343a9d8d80831b7a6c041daf728"}, + // multiple chunks blob full + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "465e095b082136f20ca975c10eafbb3bf2b71724798da87bd62d3f8795c615dc", expectedy: "6f2ff37b255e0da8b5678a9b1157fdc8a1213c17bd248efd50a4c1540c26295c", expectedBlobVersionedHash: "01da6bdac6237fcba7742cf48868467bf95a5e7f33d16c172b36852e506b46b6", expectedBatchHash: "7bd97fc7c8c7e918029e5bd85d3c9e0335117475c449d5c6dd24e5af9d55cfc6"}, + // max number of chunks only last one non-empty not full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "1ca17fdb4dea8396d7e2f10ef7b2a587750517df70ec0ce0d853e61310aec0f3", expectedy: "1b686f2eb8d7e3e2325d9101dd799f5e13af8482b402661325545646a9c96ec0", expectedBlobVersionedHash: "019d11fab4509a83623a64b466a00344552fd44421e78726cda537d06c8425d3", expectedBatchHash: "8b50a41e08000b7617de7204d8082870c8446f591fadffcb5190fdeadf47fae5"}, + // max number of chunks only last one non-empty full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "29c684b13d22cb43d81b9b449c281c15126fdc73512606de81c2d3fc9c7793b1", expectedy: "574418d83d77f6096934c2c4281edf61d48925a268411df0e0c818c6d43156d1", expectedBlobVersionedHash: "01f8da934ada220153abee70e85604ef8fbbf98c203b5eae14d23be088a41f45", expectedBatchHash: "cc0592160b2fcdb58750d29c36662b55437f4bc69ba3d45a965590f534a0228c"}, + // max number of chunks but last is empty + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "16d2883b0797d3420fabf4591f9dbe9f850ce600ce6133c98c9d291d8b3ce0a9", expectedy: "5bdc1ca8f09efa9c544d2b03d565fec500d5347acd5b3fd4d88e881f9459d83a", expectedBlobVersionedHash: "01f51532d6bb0afe8a0a61351888f322cba40dc664408a3201eb761aaba66671", expectedBatchHash: "043a40c8fbc4edb6a820ba4162f1368d157d1d59c07f969b2c584cc6a47385ca"}, + } { + chunks := []*Chunk{} + + for _, c := range tc.chunks { + block := &Block{Transactions: []*types.TransactionData{}} + + for _, data := range c { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + + chunk := &Chunk{Blocks: []*Block{block}} + chunks = append(chunks, chunk) + } + + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyFunc(convertTxDataToRLPEncoding, func(txData *types.TransactionData) ([]byte, error) { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, err + } + return data, nil + }) + + patches.ApplyFunc(checkCompressedDataCompatibility, func(_ []byte) error { + return nil + }) + + blob, blobVersionedHash, z, _, err := codecv3.(*DACodecV3).constructBlobPayload(chunks, codecv3.MaxNumChunksPerBatch()) + require.NoError(t, err) + actualZ := hex.EncodeToString(z[:]) + assert.Equal(t, tc.expectedz, actualZ) + assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + + _, y, err := kzg4844.ComputeProof(blob, *z) + require.NoError(t, err) + actualY := hex.EncodeToString(y[:]) + assert.Equal(t, tc.expectedy, actualY) + + // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) + dataBytes := make([]byte, 32*len(chunks)) + for i := range chunks { + copy(dataBytes[32*i:32*i+32], []byte{math.MaxUint8 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + } + dataHash := crypto.Keccak256Hash(dataBytes) + + batch := daBatchV3{ + daBatchV0: daBatchV0{ + version: CodecV3, + batchIndex: 6789, + l1MessagePopped: 101, + totalL1MessagePopped: 10101, + dataHash: dataHash, + parentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + }, + lastBlockTimestamp: 192837, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } + batch.blobDataProof, err = batch.blobDataProofForPICircuit() + require.NoError(t, err) + assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) + } +} + +func TestDACodecV3SimpleMethods(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + t.Run("Version", func(t *testing.T) { + version := codecv3.Version() + assert.Equal(t, CodecV3, version) + }) +} + +func TestCodecV3ChunkCompressedDataCompatibilityCheck(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + // chunk with a single empty block + emptyBlock := &Block{} + emptyChunk := &Chunk{Blocks: []*Block{emptyBlock}} + + compatible, err := codecv3.CheckChunkCompressedDataCompatibility(emptyChunk) + assert.NoError(t, err) + assert.True(t, compatible) + + txChunk := &Chunk{ + Blocks: []*Block{ + { + Transactions: []*types.TransactionData{ + {Type: types.L1MessageTxType}, + }, + }, + }, + } + compatible, err = codecv3.CheckChunkCompressedDataCompatibility(txChunk) + assert.NoError(t, err) + assert.True(t, compatible) + + testCases := []struct { + name string + jsonFile string + }{ + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + compatible, err := codecv3.CheckChunkCompressedDataCompatibility(chunk) + assert.NoError(t, err) + assert.True(t, compatible) + }) + } +} + +func TestCodecV3BatchCompressedDataCompatibilityCheck(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + // empty batch + emptyBatch := &Batch{} + compatible, err := codecv3.CheckBatchCompressedDataCompatibility(emptyBatch) + assert.NoError(t, err) + assert.True(t, compatible) + + testCases := []struct { + name string + jsonFiles []string + }{ + {"Single Block 02", []string{"testdata/blockTrace_02.json"}}, + {"Single Block 03", []string{"testdata/blockTrace_03.json"}}, + {"Single Block 04", []string{"testdata/blockTrace_04.json"}}, + {"Single Block 05", []string{"testdata/blockTrace_05.json"}}, + {"Single Block 06", []string{"testdata/blockTrace_06.json"}}, + {"Single Block 07", []string{"testdata/blockTrace_07.json"}}, + {"Multiple Blocks And Chunks", []string{"testdata/blockTrace_02.json", "testdata/blockTrace_03.json", "testdata/blockTrace_04.json", "testdata/blockTrace_05.json", "testdata/blockTrace_06.json", "testdata/blockTrace_07.json"}}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var chunks []*Chunk + for _, jsonFile := range tc.jsonFiles { + block := readBlockFromJSON(t, jsonFile) + chunks = append(chunks, &Chunk{Blocks: []*Block{block}}) + } + batch := &Batch{Chunks: chunks} + compatible, err := codecv3.CheckBatchCompressedDataCompatibility(batch) + assert.NoError(t, err) + assert.True(t, compatible) + }) + } +} + +func TestCodecV3FailedCompressedDataCompatibilityCheck(t *testing.T) { + codecv3, err := CodecFromVersion(CodecV3) + require.NoError(t, err) + + patches := gomonkey.ApplyFunc(constructBatchPayloadInBlob, func(_ []*Chunk, _ Codec) ([]byte, error) { + randomBytes := make([]byte, minCompressedDataCheckSize+1) + _, readerr := rand.Read(randomBytes) + require.NoError(t, readerr) + return []byte(hex.EncodeToString(randomBytes)), nil + }) + defer patches.Reset() + + compatible, err := codecv3.CheckChunkCompressedDataCompatibility(nil) + assert.NoError(t, err) + assert.False(t, compatible) + + compatible, err = codecv3.CheckBatchCompressedDataCompatibility(&Batch{}) + assert.NoError(t, err) + assert.False(t, compatible) +} diff --git a/encoding/codecv3_types.go b/encoding/codecv3_types.go new file mode 100644 index 0000000..43f8acf --- /dev/null +++ b/encoding/codecv3_types.go @@ -0,0 +1,206 @@ +package encoding + +import ( + "encoding/binary" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +// daBatchV3 contains metadata about a batch of DAChunks. +type daBatchV3 struct { + daBatchV0 + + blobVersionedHash common.Hash + lastBlockTimestamp uint64 + blobDataProof [2]common.Hash + blob *kzg4844.Blob + z *kzg4844.Point + blobBytes []byte +} + +// newDABatchV3 is a constructor for daBatchV3 that calls blobDataProofForPICircuit internally. +func newDABatchV3(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, + dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, + z *kzg4844.Point, blobBytes []byte, +) (*daBatchV3, error) { + daBatch := &daBatchV3{ + daBatchV0: daBatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + }, + blobVersionedHash: blobVersionedHash, + lastBlockTimestamp: lastBlockTimestamp, + blob: blob, + z: z, + blobBytes: blobBytes, + } + + proof, err := daBatch.blobDataProofForPICircuit() + if err != nil { + return nil, err + } + + daBatch.blobDataProof = proof + + return daBatch, nil +} + +// newDABatchV3WithProof is a constructor for daBatchV3 that allows directly passing blobDataProof. +func newDABatchV3WithProof(version CodecVersion, batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, + dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, + blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, blobDataProof [2]common.Hash, +) *daBatchV3 { + return &daBatchV3{ + daBatchV0: daBatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + }, + blobVersionedHash: blobVersionedHash, + lastBlockTimestamp: lastBlockTimestamp, + blob: blob, + z: z, + blobBytes: blobBytes, + blobDataProof: blobDataProof, // Set blobDataProof directly + } +} + +// Encode serializes the DABatchV3 into bytes. +func (b *daBatchV3) Encode() []byte { + batchBytes := make([]byte, daBatchV3EncodedLength) + batchBytes[daBatchOffsetVersion] = byte(b.version) + binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV3OffsetL1MessagePopped], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[daBatchV3OffsetL1MessagePopped:daBatchV3OffsetTotalL1MessagePopped], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[daBatchV3OffsetTotalL1MessagePopped:daBatchOffsetDataHash], b.totalL1MessagePopped) + copy(batchBytes[daBatchOffsetDataHash:daBatchV3OffsetBlobVersionedHash], b.dataHash[:]) + copy(batchBytes[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash], b.blobVersionedHash[:]) + copy(batchBytes[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp], b.parentBatchHash[:]) + binary.BigEndian.PutUint64(batchBytes[daBatchV3OffsetLastBlockTimestamp:daBatchV3OffsetBlobDataProof], b.lastBlockTimestamp) + copy(batchBytes[daBatchV3OffsetBlobDataProof:daBatchV3OffsetBlobDataProof+kzgPointByteSize], b.blobDataProof[0].Bytes()) + copy(batchBytes[daBatchV3OffsetBlobDataProof+kzgPointByteSize:daBatchV3EncodedLength], b.blobDataProof[1].Bytes()) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *daBatchV3) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// blobDataProofForPICircuit computes the abi-encoded blob verification data. +func (b *daBatchV3) blobDataProofForPICircuit() ([2]common.Hash, error) { + if b.blob == nil { + return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") + } + if b.z == nil { + return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty z") + } + + _, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return [2]common.Hash{}, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of result: + // | z | y | + // |---------|---------| + // | bytes32 | bytes32 | + var result [2]common.Hash + result[0] = common.BytesToHash(b.z[:]) + result[1] = common.BytesToHash(y[:]) + + return result, nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *daBatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { + if b.blob == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, fmt.Errorf("failed to create blob commitment: %w", err) + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + return blobDataProofFromValues(*b.z, y, commitment, proof), nil +} + +// Blob returns the blob of the batch. +func (b *daBatchV3) Blob() *kzg4844.Blob { + return b.blob +} + +// BlobBytes returns the blob bytes of the batch. +func (b *daBatchV3) BlobBytes() []byte { + return b.blobBytes +} + +// MarshalJSON implements the custom JSON serialization for daBatchV3. +// This method is designed to provide prover with batch info in snake_case format. +func (b *daBatchV3) MarshalJSON() ([]byte, error) { + type daBatchV3JSON struct { + Version CodecVersion `json:"version"` + BatchIndex uint64 `json:"batch_index"` + L1MessagePopped uint64 `json:"l1_message_popped"` + TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` + DataHash string `json:"data_hash"` + ParentBatchHash string `json:"parent_batch_hash"` + BlobVersionedHash string `json:"blob_versioned_hash"` + LastBlockTimestamp uint64 `json:"last_block_timestamp"` + BlobDataProof [2]string `json:"blob_data_proof"` + } + + return json.Marshal(&daBatchV3JSON{ + Version: b.version, + BatchIndex: b.batchIndex, + L1MessagePopped: b.l1MessagePopped, + TotalL1MessagePopped: b.totalL1MessagePopped, + DataHash: b.dataHash.Hex(), + ParentBatchHash: b.parentBatchHash.Hex(), + BlobVersionedHash: b.blobVersionedHash.Hex(), + LastBlockTimestamp: b.lastBlockTimestamp, + BlobDataProof: [2]string{ + b.blobDataProof[0].Hex(), + b.blobDataProof[1].Hex(), + }, + }) +} + +// Version returns the version of the DABatch. +func (b *daBatchV3) Version() CodecVersion { + return b.version +} + +// SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. +func (b *daBatchV3) SkippedL1MessageBitmap() []byte { + return b.skippedL1MessageBitmap +} + +// DataHash returns the data hash of the DABatch. +func (b *daBatchV3) DataHash() common.Hash { + return b.dataHash +} diff --git a/encoding/codecv4.go b/encoding/codecv4.go new file mode 100644 index 0000000..4d51fe5 --- /dev/null +++ b/encoding/codecv4.go @@ -0,0 +1,326 @@ +package encoding + +import ( + "crypto/sha256" + "encoding/binary" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "math/big" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/log" + + "github.com/scroll-tech/da-codec/encoding/zstd" +) + +type DACodecV4 struct { + DACodecV3 +} + +// Version returns the codec version. +func (d *DACodecV4) Version() CodecVersion { + return CodecV4 +} + +// DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks +func (d *DACodecV4) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { + rawBytes := bytesFromBlobCanonical(blob) + + // if first byte is 1 - data compressed, 0 - not compressed + if rawBytes[0] == 0x1 { + batchBytes, err := decompressScrollBlobToBatch(append(zstdMagicNumber, rawBytes[1:]...)) + if err != nil { + return err + } + return decodeTxsFromBytes(batchBytes, chunks, d.MaxNumChunksPerBatch()) + } else { + return decodeTxsFromBytes(rawBytes[1:], chunks, d.MaxNumChunksPerBatch()) + } +} + +// NewDABatch creates a DABatch from the provided Batch. +func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { + // this encoding can only support a fixed number of chunks per batch + if len(batch.Chunks) > d.MaxNumChunksPerBatch() { + return nil, fmt.Errorf("too many chunks in batch: got %d, maximum allowed is %d", len(batch.Chunks), d.MaxNumChunksPerBatch()) + } + + if len(batch.Chunks) == 0 { + return nil, errors.New("batch must contain at least one chunk") + } + + if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { + return nil, errors.New("too few blocks in last chunk of the batch") + } + + // batch data hash + dataHash, err := d.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // skipped L1 messages bitmap + skippedL1MessageBitmap, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + enableCompression, err := d.CheckBatchCompressedDataCompatibility(batch) + if err != nil { + return nil, err + } + + // blob payload + blob, blobVersionedHash, z, blobBytes, err := d.constructBlobPayload(batch.Chunks, d.MaxNumChunksPerBatch(), enableCompression) + if err != nil { + return nil, err + } + + lastChunk := batch.Chunks[len(batch.Chunks)-1] + lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] + + if totalL1MessagePoppedAfter < batch.TotalL1MessagePoppedBefore { + return nil, fmt.Errorf("batch index: %d, totalL1MessagePoppedAfter (%d) is less than batch.TotalL1MessagePoppedBefore (%d)", batch.Index, totalL1MessagePoppedAfter, batch.TotalL1MessagePoppedBefore) + } + l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore + + return newDABatchV3( + CodecV4, // version + batch.Index, // batchIndex + l1MessagePopped, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + lastBlock.Header.Time, // lastBlockTimestamp + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + skippedL1MessageBitmap, // skippedL1MessageBitmap + blob, // blob + z, // z + blobBytes, // blobBytes + ) +} + +// NewDABatchFromBytes decodes the given byte slice into a DABatch. +// Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. +func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { + if len(data) != daBatchV3EncodedLength { + return nil, fmt.Errorf("invalid data length for DABatch, expected %d bytes but got %d", daBatchV3EncodedLength, len(data)) + } + + if CodecVersion(data[daBatchOffsetVersion]) != CodecV4 { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV4, data[daBatchOffsetVersion]) + } + + return newDABatchV3WithProof( + CodecVersion(data[daBatchOffsetVersion]), // version + binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV3OffsetL1MessagePopped]), // batchIndex + binary.BigEndian.Uint64(data[daBatchV3OffsetL1MessagePopped:daBatchV3OffsetTotalL1MessagePopped]), // l1MessagePopped + binary.BigEndian.Uint64(data[daBatchV3OffsetTotalL1MessagePopped:daBatchOffsetDataHash]), // totalL1MessagePopped + binary.BigEndian.Uint64(data[daBatchV3OffsetLastBlockTimestamp:daBatchV3OffsetBlobDataProof]), // lastBlockTimestamp + common.BytesToHash(data[daBatchOffsetDataHash:daBatchV3OffsetBlobVersionedHash]), // dataHash + common.BytesToHash(data[daBatchV3OffsetParentBatchHash:daBatchV3OffsetLastBlockTimestamp]), // parentBatchHash + common.BytesToHash(data[daBatchV3OffsetBlobVersionedHash:daBatchV3OffsetParentBatchHash]), // blobVersionedHash + nil, // skippedL1MessageBitmap + nil, // blob + nil, // z + nil, // blobBytes + [2]common.Hash{ // blobDataProof + common.BytesToHash(data[daBatchV3OffsetBlobDataProof : daBatchV3OffsetBlobDataProof+kzgPointByteSize]), + common.BytesToHash(data[daBatchV3OffsetBlobDataProof+kzgPointByteSize : daBatchV3EncodedLength]), + }, + ), nil +} + +// constructBlobPayload constructs the 4844 blob payload. +func (d *DACodecV4) constructBlobPayload(chunks []*Chunk, maxNumChunksPerBatch int, enableCompression bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + maxNumChunksPerBatch*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // challenge digest preimage + // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash + challengePreimage := make([]byte, (1+maxNumChunksPerBatch+1)*common.HashLength) + + // the chunk data hash used for calculating the challenge preimage + var chunkDataHash common.Hash + + // blob metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode blob metadata and L2 transactions, + // and simultaneously also build challenge preimage + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into blob payload + rlpTxData, err := convertTxDataToRLPEncoding(tx) + if err != nil { + return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // blob metadata: chunki_size + chunkSize := len(batchBytes) - currentChunkStartIndex + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + + // challenge: compute chunk data hash + chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) + copy(challengePreimage[common.HashLength+chunkID*common.HashLength:], chunkDataHash[:]) + } + + // if we have fewer than maxNumChunksPerBatch chunks, the rest + // of the blob metadata is correctly initialized to 0, + // but we need to add padding to the challenge preimage + for chunkID := len(chunks); chunkID < maxNumChunksPerBatch; chunkID++ { + // use the last chunk's data hash as padding + copy(challengePreimage[common.HashLength+chunkID*common.HashLength:], chunkDataHash[:]) + } + + // challenge: compute metadata hash + hash := crypto.Keccak256Hash(batchBytes[0:metadataLength]) + copy(challengePreimage[0:], hash[:]) + + var blobBytes []byte + if enableCompression { + // blobBytes represents the compressed blob payload (batchBytes) + var err error + blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return nil, common.Hash{}, nil, nil, err + } + // Check compressed data compatibility. + if err = checkCompressedDataCompatibility(blobBytes); err != nil { + log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, nil, err + } + blobBytes = append([]byte{1}, blobBytes...) + } else { + blobBytes = append([]byte{0}, batchBytes...) + } + + if len(blobBytes) > maxEffectiveBlobBytes { + log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") + } + + // convert raw data to BLSFieldElements + blob, err := makeBlobCanonical(blobBytes) + if err != nil { + return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) + } + + // compute blob versioned hash + c, err := kzg4844.BlobToCommitment(blob) + if err != nil { + return nil, common.Hash{}, nil, nil, fmt.Errorf("failed to create blob commitment: %w", err) + } + blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) + + // challenge: append blob versioned hash + copy(challengePreimage[(1+maxNumChunksPerBatch)*common.HashLength:], blobVersionedHash[:]) + + // compute z = challenge_digest % BLS_MODULUS + challengeDigest := crypto.Keccak256Hash(challengePreimage) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), blsModulus) + pointBytes := pointBigInt.Bytes() + + // the challenge point z + var z kzg4844.Point + if len(pointBytes) > kzgPointByteSize { + return nil, common.Hash{}, nil, nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes)) + } + start := kzgPointByteSize - len(pointBytes) + copy(z[start:], pointBytes) + + return blob, blobVersionedHash, &z, blobBytes, nil +} + +func (d *DACodecV4) estimateL1CommitBatchSizeAndBlobSize(chunks []*Chunk) (uint64, uint64, error) { + batchBytes, err := constructBatchPayloadInBlob(chunks, d) + if err != nil { + return 0, 0, fmt.Errorf("failed to construct batch payload in blob: %w", err) + } + var blobBytesLength uint64 + enableCompression, err := d.CheckBatchCompressedDataCompatibility(&Batch{Chunks: chunks}) + if err != nil { + return 0, 0, fmt.Errorf("failed to compress scroll batch bytes: %w", err) + } + if enableCompression { + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, err + } + blobBytesLength = 1 + uint64(len(blobBytes)) + } else { + blobBytesLength = 1 + uint64(len(batchBytes)) + } + return uint64(len(batchBytes)), calculatePaddedBlobSize(blobBytesLength), nil +} + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. +func (d *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + return d.estimateL1CommitBatchSizeAndBlobSize([]*Chunk{c}) +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a batch. +func (d *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + return d.estimateL1CommitBatchSizeAndBlobSize(b.Chunks) +} + +// checkCompressedDataCompatibility checks the compressed data compatibility for a batch's chunks. +// It constructs a batch payload, compresses the data, and checks the compressed data compatibility. +func (d *DACodecV4) checkCompressedDataCompatibility(chunks []*Chunk) (bool, error) { + batchBytes, err := constructBatchPayloadInBlob(chunks, d) + if err != nil { + return false, fmt.Errorf("failed to construct batch payload in blob: %w", err) + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return false, fmt.Errorf("failed to compress scroll batch bytes: %w", err) + } + if err = checkCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("Compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +func (d *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + return d.checkCompressedDataCompatibility([]*Chunk{c}) +} + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +func (d *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + return d.checkCompressedDataCompatibility(b.Chunks) +} + +// JSONFromBytes converts the bytes to a daBatchV3 and then marshals it to JSON. +func (d *DACodecV4) JSONFromBytes(data []byte) ([]byte, error) { + batch, err := d.NewDABatchFromBytes(data) // this is different from the V3 implementation + if err != nil { + return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) + } + + jsonBytes, err := json.Marshal(batch) + if err != nil { + return nil, fmt.Errorf("failed to marshal DABatch to JSON, version %d, hash %s: %w", batch.Version(), batch.Hash(), err) + } + + return jsonBytes, nil +} diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go deleted file mode 100644 index d1aa48c..0000000 --- a/encoding/codecv4/codecv4.go +++ /dev/null @@ -1,462 +0,0 @@ -package codecv4 - -import ( - "crypto/sha256" - "encoding/binary" - "encoding/hex" - "errors" - "fmt" - "math/big" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/scroll-tech/go-ethereum/log" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv1" - "github.com/scroll-tech/da-codec/encoding/codecv3" - "github.com/scroll-tech/da-codec/encoding/zstd" -) - -// MaxNumChunks is the maximum number of chunks that a batch can contain. -const MaxNumChunks = codecv3.MaxNumChunks - -// DABlock represents a Data Availability Block. -type DABlock = codecv3.DABlock - -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk = codecv3.DAChunk - -// DAChunkRawTx groups consecutive DABlocks with their L2 transactions, L1 msgs are loaded in another place. -type DAChunkRawTx = codecv3.DAChunkRawTx - -// DABatch contains metadata about a batch of DAChunks. -type DABatch struct { - // header - Version uint8 `json:"version"` - BatchIndex uint64 `json:"batch_index"` - L1MessagePopped uint64 `json:"l1_message_popped"` - TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` - DataHash common.Hash `json:"data_hash"` - BlobVersionedHash common.Hash `json:"blob_versioned_hash"` - ParentBatchHash common.Hash `json:"parent_batch_hash"` - LastBlockTimestamp uint64 `json:"last_block_timestamp"` - BlobDataProof [2]common.Hash `json:"blob_data_proof"` - - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point - - // for batch task - blobBytes []byte -} - -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - return codecv3.NewDABlock(block, totalL1MessagePoppedBefore) -} - -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - return codecv3.NewDAChunk(chunk, totalL1MessagePoppedBefore) -} - -// DecodeDAChunksRawTx takes a byte slice and decodes it into a []*DAChunkRawTx. -func DecodeDAChunksRawTx(bytes [][]byte) ([]*DAChunkRawTx, error) { - return codecv3.DecodeDAChunksRawTx(bytes) -} - -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch, enableCompress bool) (*DABatch, error) { - // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > MaxNumChunks { - return nil, errors.New("too many chunks in batch") - } - - if len(batch.Chunks) == 0 { - return nil, errors.New("too few chunks in batch") - } - - if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { - return nil, errors.New("too few blocks in last chunk of the batch") - } - - // batch data hash - dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - // skipped L1 messages bitmap - _, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - // blob payload - blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, enableCompress, false /* no mock */) - if err != nil { - return nil, err - } - - lastChunk := batch.Chunks[len(batch.Chunks)-1] - lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - - daBatch := DABatch{ - Version: uint8(encoding.CodecV4), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: batch.ParentBatchHash, - LastBlockTimestamp: lastBlock.Header.Time, - blob: blob, - z: z, - blobBytes: blobBytes, - } - - daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() - if err != nil { - return nil, err - } - - return &daBatch, nil -} - -// ComputeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return codecv3.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) -} - -// ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { - // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 - - // batchBytes represents the raw (un-compressed and un-padded) blob payload - batchBytes := make([]byte, metadataLength) - - // challenge digest preimage - // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+MaxNumChunks+1)*32) - - // the chunk data hash used for calculating the challenge preimage - var chunkDataHash common.Hash - - // blob metadata: num_chunks - binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) - - // encode blob metadata and L2 transactions, - // and simultaneously also build challenge preimage - for chunkID, chunk := range chunks { - currentChunkStartIndex := len(batchBytes) - - for _, block := range chunk.Blocks { - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { - continue - } - - // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) - if err != nil { - return nil, common.Hash{}, nil, nil, err - } - batchBytes = append(batchBytes, rlpTxData...) - } - } - - // blob metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } - - // challenge: compute chunk data hash - chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) - } - - // if we have fewer than MaxNumChunks chunks, the rest - // of the blob metadata is correctly initialized to 0, - // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ { - // use the last chunk's data hash as padding - copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) - } - - // challenge: compute metadata hash - hash := crypto.Keccak256Hash(batchBytes[0:metadataLength]) - copy(challengePreimage[0:], hash[:]) - - var blobBytes []byte - if enableCompress { - // blobBytes represents the compressed blob payload (batchBytes) - var err error - blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return nil, common.Hash{}, nil, nil, err - } - if !useMockTxData { - // Check compressed data compatibility. - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, err - } - } - blobBytes = append([]byte{1}, blobBytes...) - } else { - blobBytes = append([]byte{0}, batchBytes...) - } - - if len(blobBytes) > 126976 { - log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") - } - - // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) - if err != nil { - return nil, common.Hash{}, nil, nil, err - } - - // compute blob versioned hash - c, err := kzg4844.BlobToCommitment(blob) - if err != nil { - return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") - } - blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) - - // challenge: append blob versioned hash - copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:]) - - // compute z = challenge_digest % BLS_MODULUS - challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) - pointBytes := pointBigInt.Bytes() - - // the challenge point z - var z kzg4844.Point - start := 32 - len(pointBytes) - copy(z[start:], pointBytes) - - return blob, blobVersionedHash, &z, blobBytes, nil -} - -// DecodeTxsFromBlob decodes txs from blob bytes and writes to chunks -func DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { - rawBytes := encoding.BytesFromBlobCanonical(blob) - - // if first byte is 1 - data compressed, 0 - not compressed - if rawBytes[0] == 0x1 { - magics := []byte{0x28, 0xb5, 0x2f, 0xfd} - batchBytes, err := encoding.DecompressScrollBlobToBatch(append(magics, rawBytes[1:]...)) - if err != nil { - return err - } - return codecv1.DecodeTxsFromBytes(batchBytes, chunks, MaxNumChunks) - } else { - return codecv1.DecodeTxsFromBytes(rawBytes[1:], chunks, MaxNumChunks) - } -} - -// NewDABatchFromBytes decodes the given byte slice into a DABatch. -// Note: This function only populates the batch header, it leaves the blob-related fields empty. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { - if len(data) != 193 { - return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) - } - - b := &DABatch{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - BlobVersionedHash: common.BytesToHash(data[57:89]), - ParentBatchHash: common.BytesToHash(data[89:121]), - LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), - BlobDataProof: [2]common.Hash{ - common.BytesToHash(data[129:161]), - common.BytesToHash(data[161:193]), - }, - } - - return b, nil -} - -// Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { - batchBytes := make([]byte, 193) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) - copy(batchBytes[25:57], b.DataHash[:]) - copy(batchBytes[57:89], b.BlobVersionedHash[:]) - copy(batchBytes[89:121], b.ParentBatchHash[:]) - binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) - copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) - copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { - if b.blob == nil { - return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") - } - if b.z == nil { - return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty z") - } - - _, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return [2]common.Hash{}, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of result: - // | z | y | - // |---------|---------| - // | bytes32 | bytes32 | - var result [2]common.Hash - result[0] = common.BytesToHash(b.z[:]) - result[1] = common.BytesToHash(y[:]) - - return result, nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - return encoding.BlobDataProofFromValues(*b.z, y, commitment, proof), nil -} - -// Blob returns the blob of the batch. -func (b *DABatch) Blob() *kzg4844.Blob { - return b.blob -} - -// BlobBytes returns the blob bytes of the batch. -func (b *DABatch) BlobBytes() []byte { - return b.blobBytes -} - -// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress bool) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) - if err != nil { - return 0, 0, err - } - var blobBytesLength uint64 - if enableCompress { - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return 0, 0, err - } - blobBytesLength = 1 + uint64(len(blobBytes)) - } else { - blobBytesLength = 1 + uint64(len(batchBytes)) - } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil -} - -// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress bool) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) - if err != nil { - return 0, 0, err - } - var blobBytesLength uint64 - if enableCompress { - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return 0, 0, err - } - blobBytesLength = 1 + uint64(len(blobBytes)) - } else { - blobBytesLength = 1 + uint64(len(batchBytes)) - } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil -} - -// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) - if err != nil { - return false, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil -} - -// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) - if err != nil { - return false, err - } - blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil -} - -// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { - return codecv3.EstimateChunkL1CommitCalldataSize(c) -} - -// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { - return codecv3.EstimateBatchL1CommitCalldataSize(b) -} - -// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { - return codecv3.EstimateChunkL1CommitGas(c) -} - -// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { - return codecv3.EstimateBatchL1CommitGas(b) -} diff --git a/encoding/codecv4/codecv4_test.go b/encoding/codecv4/codecv4_test.go deleted file mode 100644 index a1b13cf..0000000 --- a/encoding/codecv4/codecv4_test.go +++ /dev/null @@ -1,838 +0,0 @@ -package codecv4 - -import ( - "encoding/hex" - "encoding/json" - "os" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" -) - -func TestCodecV4BlockEncode(t *testing.T) { - block := &DABlock{} - encoded := hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block, err := NewDABlock(trace2, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block, err = NewDABlock(trace3, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block, err = NewDABlock(trace4, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block, err = NewDABlock(trace5, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block, err = NewDABlock(trace6, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - block, err = NewDABlock(trace7, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - - // sanity check: v0 and v4 block encodings are identical - for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { - blockv0, err := codecv0.NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv0 := hex.EncodeToString(blockv0.Encode()) - - blockv4, err := NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv4 := hex.EncodeToString(blockv4.Encode()) - - assert.Equal(t, encodedv0, encodedv4) - } -} - -func TestCodecV4ChunkEncode(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - encoded := hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - // transactions are not part of the encoding - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err := NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) -} - -func TestCodecV4ChunkHash(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - hash, err := chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) - - // L1 transactions are part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // L2 transactions are not part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // numL1Messages are not part of the hash - chunk.Blocks[0].NumL1Messages = 1 - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // invalid hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) - _, err = chunk.Hash() - assert.Error(t, err) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) -} - -func TestCodecV4BatchEncode(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV4)} - encoded := hex.EncodeToString(batch.Encode()) - assert.Equal(t, "04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f2900000000000000000000000000000000000000000000000000000000000000000000000063807b2a26451ed31542ed15543973f8bc8c3b6382ba0cba5650a7faf14625377029203c1b6db22aa24613cb68dee10ca50bbbc88fc15b8a6abf9dcf3ad382a2642e480d", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "04000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df7105000000000000000000000000000000000000000000000000000000000000000000000000063807b2d30702c0ea39553a0601a9c6fc5b27c076ddfc1044001fb0a8ad1fd9016304a61233de2770e0fb9a5578e5f633846ef9fa4c2ab8b80b8f9a30f09be07cda8d725", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93000000000000000000000000000000000000000000000000000000000000000000000000646b6e1338122423f3cebb92645f9ac93c8ee50edb75ea93a951f278007e721a7b9f995824895b00195499dfe77d201cf3627050d866abb2685f87e10466c4fcaf3a8588", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "04000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d80113ba3d5c53a035f4b4ec6f8a2ba9ab521bccab9f90e3a713ab5fffc0adec57000000000000000000000000000000000000000000000000000000000000000000000000646b6ed012e49b70b64652e5cab5dfdd1f58958d863de1d7fcb959e09f147a98b0b895171560f81b17ec3a2fe1c8ed2d308ca5bf002d7e3c18db9682a8d0f5379bf213aa", encoded) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26000000000000000000000000000000000000000000000000000000000000000000000000646b6ed046aedf214a661b6b37b9c325fef4484ff3613a6fb52719609bf02a66bc7ba23b6e9b7bcbe3be0ba95654f16f715bf7e39ef87a84199340423f6487cf56058085", encoded) -} - -func TestCodecV4BatchHash(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV4)} - assert.Equal(t, "0xdaf0827d02b32d41458aea0d5796dd0072d0a016f9834a2cb1a964d2c6ee135c", batch.Hash().Hex()) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x53d6da35c9b6f0413b6ebb80f4a8c19b0e3279481ddf602398a54d3b4e5d4f2c", batch.Hash().Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x08feefdb19215bb0f51f85a3b02a0954ac7da67681e274db49b9102f4c6e0857", batch.Hash().Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, "0xc56c5e51993342232193d1d93124bae30a5b1444eebf49b2dd5f2c5962d4d54d", batch.Hash().Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x2c32177c8b4c6289d977361c7fd0f1a6ea15add64da2eb8caf0420ac9b35231e", batch.Hash().Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x909bebbebdbf5ba9c85c6894e839c0b044d2878c457c4942887e3d64469ad342", batch.Hash().Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x53765a37bbd72655df586b530d79cb4ad0fb814d72ddc95e01e0ede579f45117", batch.Hash().Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x74ccf9cc265f423cc6e6e53ed294000637a832cdc93c76485855289bebb6764a", batch.Hash().Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x8d5ee00a80d7dbdc083d0cdedd35c2cb722e5944f9d88f7450c9186f3ef3da44", batch.Hash().Hex()) -} - -func TestCodecV4ChunkAndBatchCommitGasEstimation(t *testing.T) { - block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} - chunk2Gas := EstimateChunkL1CommitGas(chunk2) - assert.Equal(t, uint64(51124), chunk2Gas) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2Gas := EstimateBatchL1CommitGas(batch2) - assert.Equal(t, uint64(207649), batch2Gas) - - block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} - chunk3Gas := EstimateChunkL1CommitGas(chunk3) - assert.Equal(t, uint64(51124), chunk3Gas) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3Gas := EstimateBatchL1CommitGas(batch3) - assert.Equal(t, uint64(207649), batch3Gas) - - block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk4Gas := EstimateChunkL1CommitGas(chunk4) - assert.Equal(t, uint64(53745), chunk4Gas) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4Gas := EstimateBatchL1CommitGas(batch4) - assert.Equal(t, uint64(210302), batch4Gas) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} - chunk5Gas := EstimateChunkL1CommitGas(chunk5) - assert.Equal(t, uint64(52202), chunk5Gas) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk6Gas := EstimateChunkL1CommitGas(chunk6) - assert.Equal(t, uint64(53745), chunk6Gas) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5Gas := EstimateBatchL1CommitGas(batch5) - assert.Equal(t, uint64(213087), batch5Gas) -} - -func repeat(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) -} - -func TestCodecV4BatchStandardTestCases(t *testing.T) { - // Taking into consideration compression, we allow up to 5x of max blob bytes. - // We then ignore the metadata rows for 45 chunks. - maxChunks := 45 - nRowsData := 5*126976 - (maxChunks*4 + 2) - - for _, tc := range []struct { - chunks [][]string - expectedz string - expectedy string - expectedBlobVersionedHash string - expectedBatchHash string - }{ - // single empty chunk - {chunks: [][]string{{}}, expectedz: "1517a7f04a9f2517aaad8440792de202bd1fef70a861e12134c882ccf0c5a537", expectedy: "1ff0c5ea938308566ab022bc30d0136792084dc9adca93612ec925411915d4a9", expectedBlobVersionedHash: "015f16731c3e7864a08edae95f11db8c96e39a487427d7e58b691745d87f8a21", expectedBatchHash: "c3cfeead404a6de1ec5feaa29b6c1c1a5e6a40671c5d5e9cf1dd86fdf5a2e44a"}, - // single non-empty chunk - {chunks: [][]string{{"0x010203"}}, expectedz: "2cbd5fb174611060e72a2afcc385cea273b0f5ea8656f04f3661d757a6b00ff9", expectedy: "68d653e973d32fc5b79763d1b7de1699f37e2527830331b1a02f39d58d7070a9", expectedBlobVersionedHash: "019de38b4472451c5e8891dbb01bc2e834d660198cb9878e6b94fb55e4aaf92b", expectedBatchHash: "41e1c4a5220feb7fed5ba9e3980d138b8d5b4b06b8a46a87d796dbf5ed9265f5"}, - // multiple empty chunks - {chunks: [][]string{{}, {}}, expectedz: "0f9270fd0f21c1eef46334614c586759a2fb71ae46fef50560e92ef7ec926ccc", expectedy: "028f18fc74210d214d3e78a5f92f5c68a9d4dcc633e6e7ffb4144651a39b9dce", expectedBlobVersionedHash: "014a46e5be597971d313e300a052dc406b9f06fad394e1ba115df7da9ca5746d", expectedBatchHash: "94cac32609ae6c3d99dacf5af3650a7748b4dcf8c9779353b932a75e85bc2632"}, - // multiple non-empty chunks - {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "3a199bd64627e67c320add8a5932870535c667236eda365c989f0b73176bb000", expectedy: "221d60db4912e9067df77ee3d71587ea1023ec0238c23044a3325f909fd5ceb3", expectedBlobVersionedHash: "0145df6dbf8070bb3137156fe4540c11330e84487fcac24239442859d95e925c", expectedBatchHash: "d2332749a82a3b94766493ee3826074b8af74efc98367d14fd82e1056e2abf88"}, - // empty chunk followed by non-empty chunk - {chunks: [][]string{{}, {"0x010203"}}, expectedz: "0a421d448784eb111c2ae9a8031a7cf79e4638b300c48d0c7ff38322e25268fc", expectedy: "48ad5516b1370ac6be17a1d3220e286c9522366ec36fc66a584bbe1ee904eaf1", expectedBlobVersionedHash: "019e5c4c0bfa68324657a0d2e49075eeee2e7c928811bc9c8b2c03888d9d3a5d", expectedBatchHash: "5eac258323d1a4d166d2d116b330262440f46f1ecf07b247cc792bca4a905761"}, - // non-empty chunk followed by empty chunk - {chunks: [][]string{{"0x070809"}, {}}, expectedz: "6aa26c5d595fa1b72c4e1aa4f06b35788060a7504137c7dd6896486819445230", expectedy: "72c082827841ab84576b49cd63bd06af07cb090626ea3e91a8e77de29b3e61dc", expectedBlobVersionedHash: "0166c93797bf7d4e5701d36bfc8bcea5270c1c4ff18d1aaa248125c87746cf3d", expectedBatchHash: "03e0bdf053fa21d37bf55ac27e7774298b95465123c353e30761e51965269a10"}, - // max number of chunks all empty - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4a04cb1860de2c0d03a78520da62a447ef2af92e36dc0b1806db501d7cf63469", expectedy: "17ca30439aed3d9a96f4336d2a416da04a0803667922c7b0765557bb0162493f", expectedBlobVersionedHash: "014b8172c9e2ef89ac8d2ff0c9991baafff3602459250f5870721ac4f05dca09", expectedBatchHash: "216add0492703b12b841ebf6d217a41d1907dd4acd54d07a870472d31d4fde0d"}, - // max number of chunks all non-empty - {chunks: [][]string{ - {"0x0a"}, - {"0x0a0b"}, - {"0x0a0b0c"}, - {"0x0a0b0c0d"}, - {"0x0a0b0c0d0e"}, - {"0x0a0b0c0d0e0f"}, - {"0x0a0b0c0d0e0f10"}, - {"0x0a0b0c0d0e0f1011"}, - {"0x0a0b0c0d0e0f101112"}, - {"0x0a0b0c0d0e0f10111213"}, - {"0x0a0b0c0d0e0f1011121314"}, - {"0x0a0b0c0d0e0f101112131415"}, - {"0x0a0b0c0d0e0f10111213141516"}, - {"0x0a0b0c0d0e0f1011121314151617"}, - {"0x0a0b0c0d0e0f101112131415161718"}, - {"0x0a0b0c0d0e0f10111213141516171819"}, - {"0x0a0b0c0d0e0f101112131415161718191a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, - }, expectedz: "53eafb50809b3473cb4f8764f7e5d598af9eaaddc45a5a6da7cddac3380e39bb", expectedy: "40751ed98861f5c2058b4062b275f94a3d505a3221f6abe8dbe1074a4f10d0f4", expectedBlobVersionedHash: "01b78b07dbe03b960cd73ea45088b231a50ce88408fa938765e971c5dc7bbb6b", expectedBatchHash: "257175785213c68b10bb94396b657892fb7ae70708bf98ce357752906a80a6f0"}, - // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "37ca5366d9f5ddd9471f074f8019050ea6a13097368e84f298ffa1bd806ad851", expectedy: "5aa602da97cc438a039431c799b5f97467bcd45e693273dd1215f201b19fa5bd", expectedBlobVersionedHash: "01e531e7351a271839b2ae6ddec58818efd5f426fd6a7c0bc5c33c9171ed74bf", expectedBatchHash: "d3809d6b2fd10a62c6c58f9e7c32772f4ac062a78d363f46cd3ee301e87dbad2"}, - // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "250fc907e7ba3b5affb90a624566e337b02dd89a265677571cc0d1c51b60af19", expectedy: "1b2898bb001d962717159f49b015ae7228b21e9a590f836be0d79a0870c7d82b", expectedBlobVersionedHash: "01f3c431a72bbfd43c42dbd638d7f6d109be2b9449b96386b214f92b9e28ccc4", expectedBatchHash: "a51631991f6210b13e9c8ac9260704cca29fdc08adcfbd210053dc77c956e82f"}, - // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "6ba09c6123b374f1828ce5b3e52c69ac7e2251f1a573ba4d51e71b386eef9c38", expectedy: "3104f9e81ecf4ade3281cc8ea68c4f451341388e2a2c84be4b5e5ed938b6bb26", expectedBlobVersionedHash: "017813036e3c57d5259d5b1d89ca0fe253e43d740f5ee287eabc916b3486f15d", expectedBatchHash: "ebfaf617cc91d9147b00968263993f70e0efc57c1189877092a87ea60b55a2d7"}, - // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "295f6ba39b866f6635a1e11ffe16badf42174ba120bdcb973806620370f665fc", expectedy: "553772861d517aefd58332d87d75a388523b40dbd69c1d73b7d78fd18d895513", expectedBlobVersionedHash: "013a5cb4a098dfa068b82acea202eac5c7b1ec8f16c7cb37b2a9629e7359a4b1", expectedBatchHash: "b4c58eb1be9b2b21f6a43b4170ee92d6ee0af46e20848fff508a07d40b2bac29"}, - // max number of chunks but last is empty - {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "4affa105e7c5d72a3223482b237296fead99e6d716b97bab0cb3447f93309692", expectedy: "4a850a8c7b84d568d8505121c92ebf284e88aa7a881290cf3939d52040871e56", expectedBlobVersionedHash: "01d3ce566fbdbcab307095bdc05de7bc2905d25f3dd4453b0f7d5f7ba8da9f08", expectedBatchHash: "ac29c2e8c26749cf99fca994cde6d33147e9e9aa60f162c964720b4937cae8fb"}, - } { - chunks := []*encoding.Chunk{} - - for _, c := range tc.chunks { - block := &encoding.Block{Transactions: []*types.TransactionData{}} - - for _, data := range c { - tx := &types.TransactionData{Type: 0xff, Data: data} - block.Transactions = append(block.Transactions, tx) - } - - chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} - chunks = append(chunks, chunk) - } - - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* enable encode */, true /* use mock */) - require.NoError(t, err) - actualZ := hex.EncodeToString(z[:]) - assert.Equal(t, tc.expectedz, actualZ) - assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) - - _, y, err := kzg4844.ComputeProof(blob, *z) - require.NoError(t, err) - actualY := hex.EncodeToString(y[:]) - assert.Equal(t, tc.expectedy, actualY) - - // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) - dataBytes := make([]byte, 32*len(chunks)) - for i := range chunks { - copy(dataBytes[32*i:32*i+32], []byte{255 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) - } - dataHash := crypto.Keccak256Hash(dataBytes) - - batch := DABatch{ - Version: uint8(encoding.CodecV4), - BatchIndex: 6789, - L1MessagePopped: 101, - TotalL1MessagePopped: 10101, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), - LastBlockTimestamp: 192837, - blob: blob, - z: z, - } - - batch.BlobDataProof, err = batch.blobDataProofForPICircuit() - require.NoError(t, err) - - assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) - } -} - -func TestCodecV4BatchL1MessagePopped(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 - assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 - assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, 42, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, 32, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) -} - -func TestCodecV4ChunkAndBatchBlobSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(412), chunk2BatchBytesSize) - assert.Equal(t, uint64(238), chunk2BlobSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(412), batch2BatchBytesSize) - assert.Equal(t, uint64(238), batch2BlobSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), chunk3BatchBytesSize) - assert.Equal(t, uint64(2934), chunk3BlobSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), batch3BatchBytesSize) - assert.Equal(t, uint64(2934), batch3BlobSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk4BatchBytesSize) - assert.Equal(t, uint64(55), chunk4BlobSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(214), blob4BatchBytesSize) - assert.Equal(t, uint64(55), batch4BlobSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(6093), chunk5BatchBytesSize) - assert.Equal(t, uint64(3150), chunk5BlobSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk6BatchBytesSize) - assert.Equal(t, uint64(55), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, true /* enable encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(6125), batch5BatchBytesSize) - assert.Equal(t, uint64(3187), batch5BlobSize) -} - -func TestCodecV4ChunkAndBatchCalldataSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) - assert.Equal(t, uint64(60), chunk2CalldataSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) - assert.Equal(t, uint64(60), batch2CalldataSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) - assert.Equal(t, uint64(60), chunk3CalldataSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) - assert.Equal(t, uint64(60), batch3CalldataSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) - assert.Equal(t, uint64(60), chunk4CalldataSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4) - assert.Equal(t, uint64(60), batch4CalldataSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) - assert.Equal(t, uint64(120), chunk5CalldataSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6) - assert.Equal(t, uint64(60), chunk6CalldataSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) - assert.Equal(t, uint64(180), batch5CalldataSize) -} - -func TestCodecV4DABatchJSONMarshalUnmarshal(t *testing.T) { - t.Run("Case 1", func(t *testing.T) { - jsonStr := `{ - "version": 4, - "batch_index": 293212, - "l1_message_popped": 7, - "total_l1_message_popped": 904750, - "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", - "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", - "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", - "last_block_timestamp": 1721130505, - "blob_data_proof": [ - "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", - "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(4), batch.Version) - assert.Equal(t, uint64(293212), batch.BatchIndex) - assert.Equal(t, uint64(7), batch.L1MessagePopped) - assert.Equal(t, uint64(904750), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), batch.ParentBatchHash) - assert.Equal(t, uint64(1721130505), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0x64ba42153a4f642b2d8a37cf74a53067c37bba7389b85e7e07521f584e6b73d0") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) - - t.Run("Case 2", func(t *testing.T) { - jsonStr := `{ - "version": 5, - "batch_index": 123, - "l1_message_popped": 0, - "total_l1_message_popped": 0, - "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", - "last_block_timestamp": 1720174236, - "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", - "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", - "blob_data_proof": [ - "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", - "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(5), batch.Version) - assert.Equal(t, uint64(123), batch.BatchIndex) - assert.Equal(t, uint64(0), batch.L1MessagePopped) - assert.Equal(t, uint64(0), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), batch.ParentBatchHash) - assert.Equal(t, uint64(1720174236), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0xd14f142dbc5c384e9920d5bf82c6bbf7c98030ffd7a3cace6c8a6e9639a285f9") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) - - t.Run("Case 3", func(t *testing.T) { - jsonStr := `{ - "version": 4, - "batch_index": 293205, - "l1_message_popped": 0, - "total_l1_message_popped": 904737, - "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", - "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", - "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", - "last_block_timestamp": 1721129563, - "blob_data_proof": [ - "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", - "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(4), batch.Version) - assert.Equal(t, uint64(293205), batch.BatchIndex) - assert.Equal(t, uint64(0), batch.L1MessagePopped) - assert.Equal(t, uint64(904737), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), batch.ParentBatchHash) - assert.Equal(t, uint64(1721129563), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0x19638ca802926b93946fe281666205958838d46172587d150ca4c720ae244cd3") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/codecv4_test.go b/encoding/codecv4_test.go new file mode 100644 index 0000000..c83601e --- /dev/null +++ b/encoding/codecv4_test.go @@ -0,0 +1,1599 @@ +package encoding + +import ( + "encoding/hex" + "encoding/json" + "fmt" + "math" + "strings" + "testing" + + "github.com/agiledragon/gomonkey/v2" + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/common/hexutil" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCodecV4BlockEncode(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + block := &daBlockV0{} + encoded := hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + daBlock, err := codecv4.NewDABlock(block2, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + daBlock, err = codecv4.NewDABlock(block3, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + daBlock, err = codecv4.NewDABlock(block4, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + daBlock, err = codecv4.NewDABlock(block5, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + daBlock, err = codecv4.NewDABlock(block6, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + daBlock, err = codecv4.NewDABlock(block7, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBlock.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) + + codecv0, err := CodecFromVersion(CodecV0) + require.NoError(t, err) + + // sanity check: v0 and v4 block encodings are identical + for _, trace := range []*Block{block2, block3, block4, block5, block6, block7} { + blockv0, err := codecv0.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv0 := hex.EncodeToString(blockv0.Encode()) + + blockv4, err := codecv4.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv4 := hex.EncodeToString(blockv4.Encode()) + + assert.Equal(t, encodedv0, encodedv4) + } +} + +func TestCodecV4ChunkEncode(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + daChunkV1 := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + + encodedBytes, err := daChunkV1.Encode() + assert.NoError(t, err) + encoded := hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + // transactions are not part of the encoding + daChunkV1.transactions[0] = append(daChunkV1.transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) + encodedBytes, err = daChunkV1.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encodedBytes, err = daChunk.Encode() + assert.NoError(t, err) + encoded = hex.EncodeToString(encodedBytes) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} + +func TestCodecV4ChunkHash(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + // chunk with a single empty block + daBlock := &daBlockV0{} + chunk := &daChunkV1{blocks: []DABlock{daBlock}, transactions: [][]*types.TransactionData{nil}} + hash, err := chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) + + // L1 transactions are part of the hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // L2 transactions are not part of the hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // numL1Messages are not part of the hash + daBlock = chunk.blocks[0].(*daBlockV0) + daBlock.numL1Messages = 1 + chunk.blocks[0] = daBlock + + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // invalid hash + chunk.transactions[0] = append(chunk.transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + _, err = chunk.Hash() + assert.Error(t, err) + + block := readBlockFromJSON(t, "testdata/blockTrace_02.json") + originalChunk := &Chunk{Blocks: []*Block{block}} + daChunk, err := codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_03.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_04.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_05.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_06.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) + + block = readBlockFromJSON(t, "testdata/blockTrace_07.json") + originalChunk = &Chunk{Blocks: []*Block{block}} + daChunk, err = codecv4.NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = daChunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) +} + +func TestCodecV4BatchEncode(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + // empty daBatch + daBatchV3 := &daBatchV3{ + daBatchV0: daBatchV0{ + version: CodecV4, + }, + } + encoded := hex.EncodeToString(daBatchV3.Encode()) + assert.Equal(t, "04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "040000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f2900000000000000000000000000000000000000000000000000000000000000000000000063807b2a26451ed31542ed15543973f8bc8c3b6382ba0cba5650a7faf14625377029203c1b6db22aa24613cb68dee10ca50bbbc88fc15b8a6abf9dcf3ad382a2642e480d", encoded) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "04000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df7105000000000000000000000000000000000000000000000000000000000000000000000000063807b2d30702c0ea39553a0601a9c6fc5b27c076ddfc1044001fb0a8ad1fd9016304a61233de2770e0fb9a5578e5f633846ef9fa4c2ab8b80b8f9a30f09be07cda8d725", encoded) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "040000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93000000000000000000000000000000000000000000000000000000000000000000000000646b6e1338122423f3cebb92645f9ac93c8ee50edb75ea93a951f278007e721a7b9f995824895b00195499dfe77d201cf3627050d866abb2685f87e10466c4fcaf3a8588", encoded) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "040000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "04000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) + + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d80113ba3d5c53a035f4b4ec6f8a2ba9ab521bccab9f90e3a713ab5fffc0adec57000000000000000000000000000000000000000000000000000000000000000000000000646b6ed012e49b70b64652e5cab5dfdd1f58958d863de1d7fcb959e09f147a98b0b895171560f81b17ec3a2fe1c8ed2d308ca5bf002d7e3c18db9682a8d0f5379bf213aa", encoded) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = hex.EncodeToString(daBatch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26000000000000000000000000000000000000000000000000000000000000000000000000646b6ed046aedf214a661b6b37b9c325fef4484ff3613a6fb52719609bf02a66bc7ba23b6e9b7bcbe3be0ba95654f16f715bf7e39ef87a84199340423f6487cf56058085", encoded) +} + +func TestCodecV4BatchHash(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + // empty daBatch + daBatchV3 := &daBatchV3{ + daBatchV0: daBatchV0{ + version: CodecV4, + }, + } + assert.Equal(t, common.HexToHash("0xdaf0827d02b32d41458aea0d5796dd0072d0a016f9834a2cb1a964d2c6ee135c"), daBatchV3.Hash()) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x53d6da35c9b6f0413b6ebb80f4a8c19b0e3279481ddf602398a54d3b4e5d4f2c"), daBatch.Hash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x08feefdb19215bb0f51f85a3b02a0954ac7da67681e274db49b9102f4c6e0857"), daBatch.Hash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc56c5e51993342232193d1d93124bae30a5b1444eebf49b2dd5f2c5962d4d54d"), daBatch.Hash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x2c32177c8b4c6289d977361c7fd0f1a6ea15add64da2eb8caf0420ac9b35231e"), daBatch.Hash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x909bebbebdbf5ba9c85c6894e839c0b044d2878c457c4942887e3d64469ad342"), daBatch.Hash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x53765a37bbd72655df586b530d79cb4ad0fb814d72ddc95e01e0ede579f45117"), daBatch.Hash()) + + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x74ccf9cc265f423cc6e6e53ed294000637a832cdc93c76485855289bebb6764a"), daBatch.Hash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x8d5ee00a80d7dbdc083d0cdedd35c2cb722e5944f9d88f7450c9186f3ef3da44"), daBatch.Hash()) +} + +func TestCodecV4NewDABatchFromBytes(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + testCases := []struct { + name string + jsonFile string + }{ + {"Empty Batch", ""}, + {"Block 02", "testdata/blockTrace_02.json"}, + {"Block 03", "testdata/blockTrace_03.json"}, + {"Block 04", "testdata/blockTrace_04.json"}, + {"Block 05", "testdata/blockTrace_05.json"}, + {"Block 06", "testdata/blockTrace_06.json"}, + {"Block 07", "testdata/blockTrace_07.json"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var batch *Batch + var daBatch DABatch + var createErr1 error + + if tc.jsonFile == "" { + // Empty daBatch + daBatch = &daBatchV3{ + daBatchV0: daBatchV0{ + version: CodecV4, + }, + } + } else { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + batch = &Batch{Chunks: []*Chunk{chunk}} + daBatch, createErr1 = codecv4.NewDABatch(batch) + assert.NoError(t, createErr1) + } + + // Encode the DABatch + encodedBytes := daBatch.Encode() + + // Decode the bytes back into a DABatch + decodedDABatch, createErr2 := codecv4.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, createErr2) + + // Compare the hashes of the original and decoded DABatch + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash(), "Hashes should match for %s", tc.name) + }) + } + + // Test with multiple blocks and chunks in a batch + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + chunk2 := &Chunk{Blocks: []*Block{block4, block5}} + batch := &Batch{Chunks: []*Chunk{chunk1, chunk2}} + daBatch, err := codecv4.NewDABatch(batch) + assert.NoError(t, err) + + encodedBytes := daBatch.Encode() + decodedDABatch, err := codecv4.NewDABatchFromBytes(encodedBytes) + assert.NoError(t, err) + + assert.Equal(t, daBatch.Hash(), decodedDABatch.Hash()) +} + +func TestCodecV4BatchDataHash(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541"), daBatch.DataHash()) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6"), daBatch.DataHash()) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f"), daBatch.DataHash()) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4"), daBatch.DataHash()) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d"), daBatch.DataHash()) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208"), daBatch.DataHash()) + + batch = &Batch{Chunks: []*Chunk{chunk2, chunk3, chunk4, chunk5}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8"), daBatch.DataHash()) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, common.HexToHash("0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767"), daBatch.DataHash()) +} + +func TestCodecV4DABatchJSONMarshalUnmarshal(t *testing.T) { + t.Run("Case 1", func(t *testing.T) { + expectedJsonStr := `{ + "version": 4, + "batch_index": 293212, + "l1_message_popped": 7, + "total_l1_message_popped": 904750, + "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", + "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", + "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", + "last_block_timestamp": 1721130505, + "blob_data_proof": [ + "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", + "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" + ] + }` + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 4, + batchIndex: 293212, + l1MessagePopped: 7, + totalL1MessagePopped: 904750, + dataHash: common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), + parentBatchHash: common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), + }, + blobVersionedHash: common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), + lastBlockTimestamp: 1721130505, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), + common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), + }, + } + + data, err := json.Marshal(&daBatch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJson, actualJson map[string]interface{} + err = json.Unmarshal([]byte(expectedJsonStr), &expectedJson) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJson) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") + }) + + t.Run("Case 2", func(t *testing.T) { + jsonStr := `{ + "version": 5, + "batch_index": 123, + "l1_message_popped": 0, + "total_l1_message_popped": 0, + "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", + "last_block_timestamp": 1720174236, + "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", + "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", + "blob_data_proof": [ + "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", + "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" + ] + }` + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 5, + batchIndex: 123, + l1MessagePopped: 0, + totalL1MessagePopped: 0, + dataHash: common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), + parentBatchHash: common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), + }, + blobVersionedHash: common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), + lastBlockTimestamp: 1720174236, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), + common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), + }, + } + + data, err := json.Marshal(&daBatch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJson, actualJson map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &expectedJson) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJson) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") + }) + + t.Run("Case 3", func(t *testing.T) { + jsonStr := `{ + "version": 4, + "batch_index": 293205, + "l1_message_popped": 0, + "total_l1_message_popped": 904737, + "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", + "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", + "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", + "last_block_timestamp": 1721129563, + "blob_data_proof": [ + "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", + "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" + ] + }` + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 4, + batchIndex: 293205, + l1MessagePopped: 0, + totalL1MessagePopped: 904737, + dataHash: common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), + parentBatchHash: common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), + }, + blobVersionedHash: common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), + lastBlockTimestamp: 1721129563, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), + common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), + }, + } + + data, err := json.Marshal(&daBatch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJson, actualJson map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &expectedJson) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJson) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + assert.Equal(t, expectedJson, actualJson, "Marshaled JSON does not match expected JSON") + }) +} + +func TestDACodecV4JSONFromBytes(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + daBatch := daBatchV3{ + daBatchV0: daBatchV0{ + version: 4, + batchIndex: 293212, + l1MessagePopped: 7, + totalL1MessagePopped: 904750, + dataHash: common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), + parentBatchHash: common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), + }, + blobVersionedHash: common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), + lastBlockTimestamp: 1721130505, + blobDataProof: [2]common.Hash{ + common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), + common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), + }, + } + + outputJSON, err := codecv4.JSONFromBytes(daBatch.Encode()) + require.NoError(t, err, "JSONFromBytes failed") + + var outputMap map[string]interface{} + err = json.Unmarshal(outputJSON, &outputMap) + require.NoError(t, err, "Failed to unmarshal output JSON") + + expectedFields := map[string]interface{}{ + "version": float64(daBatch.version), + "batch_index": float64(daBatch.batchIndex), + "l1_message_popped": float64(daBatch.l1MessagePopped), + "total_l1_message_popped": float64(daBatch.totalL1MessagePopped), + "data_hash": daBatch.dataHash.Hex(), + "blob_versioned_hash": daBatch.blobVersionedHash.Hex(), + "parent_batch_hash": daBatch.parentBatchHash.Hex(), + "last_block_timestamp": float64(daBatch.lastBlockTimestamp), + "blob_data_proof": []interface{}{ + daBatch.blobDataProof[0].Hex(), + daBatch.blobDataProof[1].Hex(), + }, + } + + assert.Len(t, outputMap, len(expectedFields), "Unexpected number of fields in output") + for key, expectedValue := range expectedFields { + assert.Equal(t, expectedValue, outputMap[key], fmt.Sprintf("Mismatch in field %s", key)) + } +} + +func TestCodecV4CalldataSizeEstimation(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2CalldataSize, err := codecv4.EstimateChunkL1CommitCalldataSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk2CalldataSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2CalldataSize, err := codecv4.EstimateBatchL1CommitCalldataSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch2CalldataSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3CalldataSize, err := codecv4.EstimateChunkL1CommitCalldataSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk3CalldataSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3CalldataSize, err := codecv4.EstimateBatchL1CommitCalldataSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch3CalldataSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4CalldataSize, err := codecv4.EstimateChunkL1CommitCalldataSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk4CalldataSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4CalldataSize, err := codecv4.EstimateBatchL1CommitCalldataSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(60), batch4CalldataSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5CalldataSize, err := codecv4.EstimateChunkL1CommitCalldataSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(120), chunk5CalldataSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6CalldataSize, err := codecv4.EstimateChunkL1CommitCalldataSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(60), chunk6CalldataSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5CalldataSize, err := codecv4.EstimateBatchL1CommitCalldataSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(180), batch5CalldataSize) +} + +func TestCodecV4CommitGasEstimation(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2Gas, err := codecv4.EstimateChunkL1CommitGas(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(51124), chunk2Gas) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2Gas, err := codecv4.EstimateBatchL1CommitGas(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(207649), batch2Gas) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3Gas, err := codecv4.EstimateChunkL1CommitGas(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(51124), chunk3Gas) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3Gas, err := codecv4.EstimateBatchL1CommitGas(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(207649), batch3Gas) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4Gas, err := codecv4.EstimateChunkL1CommitGas(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(53745), chunk4Gas) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + batch4Gas, err := codecv4.EstimateBatchL1CommitGas(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(210302), batch4Gas) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5Gas, err := codecv4.EstimateChunkL1CommitGas(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(52202), chunk5Gas) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6Gas, err := codecv4.EstimateChunkL1CommitGas(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(53745), chunk6Gas) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5Gas, err := codecv4.EstimateBatchL1CommitGas(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(213087), batch5Gas) +} + +func TestCodecV4BatchSizeAndBlobSizeEstimation(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + chunk2BatchBytesSize, chunk2BlobSize, err := codecv4.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + assert.NoError(t, err) + assert.Equal(t, uint64(412), chunk2BatchBytesSize) + assert.Equal(t, uint64(238), chunk2BlobSize) + batch2 := &Batch{Chunks: []*Chunk{chunk2}} + batch2BatchBytesSize, batch2BlobSize, err := codecv4.EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + assert.NoError(t, err) + assert.Equal(t, uint64(412), batch2BatchBytesSize) + assert.Equal(t, uint64(238), batch2BlobSize) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + chunk3BatchBytesSize, chunk3BlobSize, err := codecv4.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), chunk3BatchBytesSize) + assert.Equal(t, uint64(2934), chunk3BlobSize) + batch3 := &Batch{Chunks: []*Chunk{chunk3}} + batch3BatchBytesSize, batch3BlobSize, err := codecv4.EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), batch3BatchBytesSize) + assert.Equal(t, uint64(2934), batch3BlobSize) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + chunk4BatchBytesSize, chunk4BlobSize, err := codecv4.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk4BatchBytesSize) + assert.Equal(t, uint64(55), chunk4BlobSize) + batch4 := &Batch{Chunks: []*Chunk{chunk4}} + blob4BatchBytesSize, batch4BlobSize, err := codecv4.EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + assert.NoError(t, err) + assert.Equal(t, uint64(214), blob4BatchBytesSize) + assert.Equal(t, uint64(55), batch4BlobSize) + + chunk5 := &Chunk{Blocks: []*Block{block2, block3}} + chunk5BatchBytesSize, chunk5BlobSize, err := codecv4.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + assert.NoError(t, err) + assert.Equal(t, uint64(6093), chunk5BatchBytesSize) + assert.Equal(t, uint64(3150), chunk5BlobSize) + chunk6 := &Chunk{Blocks: []*Block{block4}} + chunk6BatchBytesSize, chunk6BlobSize, err := codecv4.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk6BatchBytesSize) + assert.Equal(t, uint64(55), chunk6BlobSize) + batch5 := &Batch{Chunks: []*Chunk{chunk5, chunk6}} + batch5BatchBytesSize, batch5BlobSize, err := codecv4.EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + assert.NoError(t, err) + assert.Equal(t, uint64(6125), batch5BatchBytesSize) + assert.Equal(t, uint64(3187), batch5BlobSize) +} + +func TestCodecV4BatchL1MessagePopped(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(0), daBatch.(*daBatchV3).totalL1MessagePopped) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(11), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(11), daBatch.(*daBatchV3).totalL1MessagePopped) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) + + batch.TotalL1MessagePoppedBefore = 37 + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(5), daBatch.(*daBatchV3).l1MessagePopped) // skip 37, include 5 + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(10), daBatch.(*daBatchV3).l1MessagePopped) // skip 7, include 3 + assert.Equal(t, uint64(10), daBatch.(*daBatchV3).totalL1MessagePopped) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(257), daBatch.(*daBatchV3).l1MessagePopped) // skip 255, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) + + batch.TotalL1MessagePoppedBefore = 1 + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(256), daBatch.(*daBatchV3).l1MessagePopped) // skip 254, include 2 + assert.Equal(t, uint64(257), daBatch.(*daBatchV3).totalL1MessagePopped) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} // queue index 10 + chunk9 := &Chunk{Blocks: []*Block{block5}} // queue index 37-41 + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) + + batch.TotalL1MessagePoppedBefore = 10 + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + assert.Equal(t, uint64(32), daBatch.(*daBatchV3).l1MessagePopped) + assert.Equal(t, uint64(42), daBatch.(*daBatchV3).totalL1MessagePopped) +} + +func TestCodecV4BlobEncodingAndHashing(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded := strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "0001609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c400c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808300019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a0041e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfa00ec288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf00039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f7731600a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed0032f1030060b26d07d8b028b005", encoded) + assert.Equal(t, common.HexToHash("0x01e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f29"), daBatch.(*daBatchV3).blobVersionedHash) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "000160e7159d580094830001000016310002f9162d82cf5502843b9b0a1783110097e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a0811037815160208301516040808501800051915193959294830192918464018211639083019060208201858179825181001182820188101794825250918201929091019080838360005b83c357818101005183820152602001620000a9565b50505050905090810190601f16f1578082000380516001836020036101000a031916819150805160405193929190011501002b01460175015b01a39081015185519093508592508491620001c891600391008501906200026b565b508051620001de90600490602084506005805461ff00001960ff1990911660121716905550600680546001600160a01b0380881619920083161790925560078054928716929091169190911790556200023081620002005562010000600160b01b03191633021790555062000307915050565b60ff19001660ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de010060010185558215620002de579182015b8202de5782518255916020019190600001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301570080639dc29fac14610309578063a457c2d714610335578063a9059cbb1461030061578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610002a55780638456cb59146102cb5780638e50817a146102d3313ce56711610000de571461021d578063395093511461023b5780633f4ba83a146102675780630040c10f191461027106fdde0314610110578063095ea7b31461018d5780631800160ddd146101cd57806323b872e7575b6101186103bb565b6040805160208000825283518183015283519192839290830161015261013a61017f9250508091000390f35b6101b9600480360360408110156101a381351690602001356104510091151582525190819003602001d561046e60fd81169160208101359091169000604074565b6102256104fb60ff90921640025105046f610552565b005b6102006f028705a956610654d520bb3516610662067d56e90135166106d21861075700031f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282008152606093909290918301828280156104475780601f1061041c57610100800083540402835291610447565b825b8154815260200180831161042a5782900300601f16820191565b600061046561045e610906565b848461090a565b506001009202548184f6565b6104f18461048d6104ec8560405180606080602861108500602891398a166000908152600160205260408120906104cb81019190915260004001600020549190610b51565b935460ff160511016000610522908116825200602080830193909352604091820120918c168152925290205490610be8565b00600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b001b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090000460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606004606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616300746f727960a0079283918216179091559390921660041561080808550e6508006c2511176025006108968dd491824080832093909416825233831661094f5700040180806020018281038252602401806110f36024913960400191fd821661000994223d60228084166000819487168084529482529182902085905581518500815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b20000ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a008b838383610f61565b610ac881265f60268685808220939093559084168152002054610af7908220409490945580905191937fddf252ad1be2c89b69c2b06800fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111500610be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53006166654d6174683a206164646974696f6e206f766572666c6f7700610c9c140073621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537b00d38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e742074006f20746865207a65726f72657373610d546000600254610d61025590205461000d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e42020070b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad602161000eb68260000ef3221b85839020550f199082610fb540805182600091851691009120565b610f6cb07415610fb02a113c602a00610c428383401e7375627472006163815250fe7472616e736665726275726e20616d6f756e742065786365650064732062616c616e6365617070726f7665616c6c6f7766726f6d646563726500617365642062656c6f775061757361626c653a20746f6b656e7768696c652000706175736564a2646970667358221220e96342bec8f6c2bf72815a3999897300b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a7700d9fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e0400c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f055003c633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5200095d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60300126013290b6398528818e2c8484081888c4890142465a631e63178f994004800f46ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a80049670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fa00b388531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee55000b5e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b163008aa1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637100664c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d44700c0318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a300958d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa800b597b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b001b3f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242008009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a07700b85b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc800bea3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf96244333647009fbd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1000392cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d4614217006fcdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15b00c9975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e4500e579308f554787b4d1f74e389823923f5d268be545466a2dd449963ad2540700bd3a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe276800a9091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c3953600c5de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98800998d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) + assert.Equal(t, common.HexToHash("0x01ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df71050"), daBatch.(*daBatchV3).blobVersionedHash) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "000120d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df0002d40343a7626a9d321e105808080808001002c0a1801", encoded) + assert.Equal(t, common.HexToHash("0x01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93"), daBatch.(*daBatchV3).blobVersionedHash) + + // this batch only contains L1 txs + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "00000001", encoded) + assert.Equal(t, common.HexToHash("0x016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a"), daBatch.(*daBatchV3).blobVersionedHash) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "00000001", encoded) + assert.Equal(t, common.HexToHash("0x016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a"), daBatch.(*daBatchV3).blobVersionedHash) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "00000001", encoded) + assert.Equal(t, common.HexToHash("0x016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a"), daBatch.(*daBatchV3).blobVersionedHash) + + // 45 chunks + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "00016024281d0700140d002d000000e6f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1040041e1491b3e82c9b61d60d39a727", encoded) + assert.Equal(t, common.HexToHash("0x0128a4e122c179a7c34ab1f22ceadf6fa66d2bb0d229933fe1ed061dd8b1fb5f"), daBatch.(*daBatchV3).blobVersionedHash) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + encoded = strings.TrimRight(hex.EncodeToString(daBatch.(*daBatchV3).blob[:]), "0") + assert.Equal(t, "000160ed16256000449200020000173700f87180843b9aec2e8307a12094c0c400c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808300019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a0041e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfa00ec288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf00039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f7731600a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed0032f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348000156200001157600080fd5b50604051620014b238038083398181016040526000a0811037815160208301516040808501805191519395929483019291846401008211639083019060208201858179825181118282018810179482525091820100929091019080838360005b83c3578181015183820152602001620000a9565b0050505050905090810190601f16f15780820380516001836020036101000a030019168191508051604051939291900115012b01460175015b01a3908101518500519093508592508491620001c8916003918501906200026b565b50805162000001de90600490602084506005805461ff001960ff199091166012171690555000600680546001600160a01b03808816199283161790925560078054928716920090911691909117905562000230816200025562010000600160b01b0319163300021790555062000307915050565b60ff191660ff929092565b828160011615006101000203166002900490600052602060002090601f01602090048101928200601f10620002ae5780518380011785de0160010185558215620002de57918200015b8202de57825182559160200191906001c1565b50620002ec9291f0565b005090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb11610000a257806395d89b4111610071146103015780639dc29fac14610309578063a40057c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576100010b565b1461029d57806370a08231146102a55780638456cb59146102cb570080638e50817a146102d3313ce567116100de571461021d57806339509351140061023b5780633f4ba83a1461026757806340c10f191461027106fdde031461000110578063095ea7b31461018d57806318160ddd146101cd57806323b872e700575b6101186103bb565b6040805160208082528351818301528351919283920090830161015261013a61017f92505080910390f35b6101b960048036036040008110156101a3813516906020013561045191151582525190819003602001d50061046e60fd811691602081013590911690604074565b6102256104fb60ff9000921640025105046f610552565b005b61026f028705a956610654d520bb351600610662067d56e90135166106d218610757031f07b856034b085f77c7d5a30800db565b6003805420601f600260001961010060018816150201909516949094000493840181900481028201810190925282815260609390929091830182828000156104475780601f1061041c576101008083540402835291610447565b825b008154815260200180831161042a57829003601f16820191565b60006104656100045e610906565b848461090a565b5060019202548184f6565b6104f1846104008d6104ec85604051806060806028611085602891398a16600090815260016000205260408120906104cb810191909152604001600020549190610b51565b93005460ff160511016000610522908116825260208083019390935260409182010020918c168152925290205490610be8565b600716331461059f5762461bcd6000e51b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529000640190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5c00d8589b194e881c185d5cd95960826006064606508282610ced90905260400600ca0ddd900407260c6b6f6e6c7920466163746f727960a007928391821617900091559390921660041561080808550e65086c2511176025006108968dd49182004080832093909416825233831661094f5704018080602001828103825260240001806110f36024913960400191fd8216610994223d60228084166000819487001680845294825291829020859055815185815291517f8c5be1e5ebec7d5bd1004f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831600610a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac88126005f602686858082209390935590841681522054610af790822040949094558000905191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f5005a4df523b3ef9291829003008184841115610be08381815191508051900ba5000b8d0bd2fd900300828201610c421b7f536166654d6174683a20616464697400696f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e600ff9c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a1821661000d481f7f45524332303a206d696e7420746f20746865207a65726f7265737300610d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdb00fc544b05a2588216610eaa6021ad6021610eb68260000ef3221b8583902055000f199082610fb5408051826000918516919120565b610f6cb07415610fb02a00113c602a00610c428383401e73756274726163815250fe7472616e73666572006275726e20616d6f756e7420657863656564732062616c616e636561707072006f7665616c6c6f7766726f6d6465637265617365642062656c6f77506175730061626c653a20746f6b656e7768696c6520706175736564a264697066735822001220e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda000265d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a500820b63a0e012095745544820636f696e04c001a0235c1a8d40e8c34789039700f1a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd0697044006e74229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a00258d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a914009a111111110549d2740105c410e61ca4d603126013290b6398528818e2c848004081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb8000ccba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc500c5ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69800511c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be007ea27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0500238c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e923001dd28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af001ff932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb410002cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a0013b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f00061093a37810212ba36db205219fab4032428009178588ad21f754085dd80700b09af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403300355c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d369005c0904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5200463d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71160024bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80004421f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7ac00db3071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38980023923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f6700ea8d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c608007efc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac008533de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c63006da70ee60a586fdb282babf53e01", encoded) + assert.Equal(t, common.HexToHash("0x0121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26"), daBatch.(*daBatchV3).blobVersionedHash) +} + +func TestCodecV4BatchBlobDataProofForPointEvaluation(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + chunk2 := &Chunk{Blocks: []*Block{block2}} + batch := &Batch{Chunks: []*Chunk{chunk2}} + daBatch, err := codecv4.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err := daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "26451ed31542ed15543973f8bc8c3b6382ba0cba5650a7faf14625377029203c1b6db22aa24613cb68dee10ca50bbbc88fc15b8a6abf9dcf3ad382a2642e480db5eb389fe4a7fcba73975e3ebc5f1f7f040022a51e20a94a1a67471fc0f4dfb23eaeff14ce3fd2d0928f644b6d6b11d5ac5e0f3f19d94f4e12b775d39c7d970363fe6ccd9b23c006b8dc25512cb7b9d1d85521c4893983e52f7e9844a7dc8eca", hex.EncodeToString(verifyData)) + + block3 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk3 := &Chunk{Blocks: []*Block{block3}} + batch = &Batch{Chunks: []*Chunk{chunk3}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "30702c0ea39553a0601a9c6fc5b27c076ddfc1044001fb0a8ad1fd9016304a61233de2770e0fb9a5578e5f633846ef9fa4c2ab8b80b8f9a30f09be07cda8d72598f7a0eb89cf859212035316e58dc2d291a73b84a36d61b94166ece830f7a6316bb378e098602ffc0e66adc1e33c8608a3b39da9b1c0565a19cbf3ab6415c7bb3ddfeb6d63d204c4670f5777fdee9ffa5f6aec4085924f4af2fe27142eec0cd2", hex.EncodeToString(verifyData)) + + block4 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + chunk4 := &Chunk{Blocks: []*Block{block4}} + batch = &Batch{Chunks: []*Chunk{chunk4}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "38122423f3cebb92645f9ac93c8ee50edb75ea93a951f278007e721a7b9f995824895b00195499dfe77d201cf3627050d866abb2685f87e10466c4fcaf3a8588a695aaff41dcefb301a7b597c201940b3c64439e4b74c23b7280def1d1b160e4121129f7f0015f3e880b9b7594de04a5a7445c20b31d8786754ed6f9fbafe69b24d738055c5cad62a502e9b7d717aa45636022a24c0a83bbf411157054957638", hex.EncodeToString(verifyData)) + + block5 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk5 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk5}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "04e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a92139250d65777a7748934f3e2992f17a66affd58b341854cf7a0837d976903f412189ad04ea1003bdc602ebf33d3af43e23a9c69bb3a38a5e633154ada88e361cc633194fc01bab0d496c1541654f112f5ed258d3bde8ca0ca38b69c26d8813c268", hex.EncodeToString(verifyData)) + + block6 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk6 := &Chunk{Blocks: []*Block{block6}} + batch = &Batch{Chunks: []*Chunk{chunk6}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "04e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a92139250d65777a7748934f3e2992f17a66affd58b341854cf7a0837d976903f412189ad04ea1003bdc602ebf33d3af43e23a9c69bb3a38a5e633154ada88e361cc633194fc01bab0d496c1541654f112f5ed258d3bde8ca0ca38b69c26d8813c268", hex.EncodeToString(verifyData)) + + block7 := readBlockFromJSON(t, "testdata/blockTrace_07.json") + chunk7 := &Chunk{Blocks: []*Block{block7}} + batch = &Batch{Chunks: []*Chunk{chunk7}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "04e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a92139250d65777a7748934f3e2992f17a66affd58b341854cf7a0837d976903f412189ad04ea1003bdc602ebf33d3af43e23a9c69bb3a38a5e633154ada88e361cc633194fc01bab0d496c1541654f112f5ed258d3bde8ca0ca38b69c26d8813c268", hex.EncodeToString(verifyData)) + + // 45 chunks + batch = &Batch{Chunks: []*Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "237ce1b89c4534d34df2f0102af375a93128e88d5f762d3af6d109b63986fef525261e41884dc3b9998b8929b38a7ed6a0b5c91e98f7bc280971a0ef265680cc902969e14a0716e5ff34fc4cdabf7e0319f8456301d1e5643be4ab4f86fe4dbcfa26594ffbf3a496ab07db4eb2471eb5a669bac77d6ff53dd202957a0d5b27f8a4fc94de92e01715a6c9d7cb54f1d25ccc13a7096b62592edb5c0f4ff6d45545", hex.EncodeToString(verifyData)) + + chunk8 := &Chunk{Blocks: []*Block{block2, block3, block4}} + chunk9 := &Chunk{Blocks: []*Block{block5}} + batch = &Batch{Chunks: []*Chunk{chunk8, chunk9}} + daBatch, err = codecv4.NewDABatch(batch) + assert.NoError(t, err) + verifyData, err = daBatch.BlobDataProofForPointEvaluation() + assert.NoError(t, err) + assert.Equal(t, "46aedf214a661b6b37b9c325fef4484ff3613a6fb52719609bf02a66bc7ba23b6e9b7bcbe3be0ba95654f16f715bf7e39ef87a84199340423f6487cf56058085a21962439624643e7ad898db06e9bf9432d937f3ae8cf465f1e92501497314abec74c632b4cde93d73acd1235755a4de8ef007cb7cb577864c81c4d5a80bf68e1b2bed33f54fa82b4f197b6614f69c4cfbbf2b63df630801d8abd8020a52b845", hex.EncodeToString(verifyData)) +} + +func TestCodecV4DecodeDAChunksRawTx(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + block0 := readBlockFromJSON(t, "testdata/blockTrace_02.json") + block1 := readBlockFromJSON(t, "testdata/blockTrace_03.json") + chunk0 := &Chunk{Blocks: []*Block{block0, block1}} + daChunk0, err := codecv4.NewDAChunk(chunk0, 0) + assert.NoError(t, err) + chunkBytes0, err := daChunk0.Encode() + assert.NoError(t, err) + + block2 := readBlockFromJSON(t, "testdata/blockTrace_04.json") + block3 := readBlockFromJSON(t, "testdata/blockTrace_05.json") + chunk1 := &Chunk{Blocks: []*Block{block2, block3}} + daChunk1, err := codecv4.NewDAChunk(chunk1, 0) + assert.NoError(t, err) + chunkBytes1, err := daChunk1.Encode() + assert.NoError(t, err) + + batch := &Batch{Chunks: []*Chunk{chunk0, chunk1}} + daBatch, err := codecv4.NewDABatch(batch) + assert.NoError(t, err) + + daChunksRawTx1, err := codecv4.DecodeDAChunksRawTx([][]byte{chunkBytes0, chunkBytes1}) + assert.NoError(t, err) + // assert number of chunks + assert.Equal(t, 2, len(daChunksRawTx1)) + + // assert block in first chunk + assert.Equal(t, 2, len(daChunksRawTx1[0].Blocks)) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[0], daChunksRawTx1[0].Blocks[0]) + assert.Equal(t, daChunk0.(*daChunkV1).blocks[1], daChunksRawTx1[0].Blocks[1]) + + // assert block in second chunk + assert.Equal(t, 2, len(daChunksRawTx1[1].Blocks)) + daChunksRawTx1[1].Blocks[0].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[0].(*daBlockV0), daChunksRawTx1[1].Blocks[0]) + daChunksRawTx1[1].Blocks[1].(*daBlockV0).baseFee = nil + assert.Equal(t, daChunk1.(*daChunkV1).blocks[1].(*daBlockV0), daChunksRawTx1[1].Blocks[1]) + + blob := daBatch.Blob() + err = codecv4.DecodeTxsFromBlob(blob, daChunksRawTx1) + assert.NoError(t, err) + + // assert transactions in first chunk + assert.Equal(t, 2, len(daChunksRawTx1[0].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 2, len(daChunksRawTx1[0].Transactions[0])) + assert.Equal(t, 1, len(daChunksRawTx1[0].Transactions[1])) + + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][0].TxHash, daChunksRawTx1[0].Transactions[0][0].Hash().String()) + assert.EqualValues(t, daChunk0.(*daChunkV1).transactions[0][1].TxHash, daChunksRawTx1[0].Transactions[0][1].Hash().String()) + + // assert transactions in second chunk + assert.Equal(t, 2, len(daChunksRawTx1[1].Transactions)) + // here number of transactions in encoded and decoded chunks may be different, because decodec chunks doesn't contain l1msgs + assert.Equal(t, 1, len(daChunksRawTx1[1].Transactions[0])) + assert.Equal(t, 0, len(daChunksRawTx1[1].Transactions[1])) + + // Uncompressed case + block4 := readBlockFromJSON(t, "testdata/blockTrace_06.json") + chunk2 := &Chunk{Blocks: []*Block{block4}} + daChunk2, err := codecv4.NewDAChunk(chunk2, 0) + assert.NoError(t, err) + chunkBytes2, err := daChunk2.Encode() + assert.NoError(t, err) + + daChunksRawTx2, err := codecv4.DecodeDAChunksRawTx([][]byte{chunkBytes2}) + assert.NoError(t, err) + + // assert number of chunks + assert.Equal(t, 1, len(daChunksRawTx2)) + + // assert block in uncompressed chunk + assert.Equal(t, 1, len(daChunksRawTx2[0].Blocks)) + assert.Equal(t, daChunk2.(*daChunkV1).blocks[0].Encode(), daChunksRawTx2[0].Blocks[0].Encode()) + + daBatchUncompressed, err := codecv4.NewDABatch(&Batch{Chunks: []*Chunk{chunk2}}) + assert.NoError(t, err) + blobUncompressed := daBatchUncompressed.Blob() + err = codecv4.DecodeTxsFromBlob(blobUncompressed, daChunksRawTx2) + assert.NoError(t, err) + + // assert transactions in first chunk + assert.Equal(t, 1, len(daChunksRawTx2[0].Transactions)) + assert.Equal(t, 0, len(daChunksRawTx2[0].Transactions[0])) +} + +func TestCodecV4BatchStandardTestCasesEnableCompression(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + // Taking into consideration compression, we allow up to 5x of max blob bytes minus 1 byte for the compression flag. + // We then ignore the metadata rows for MaxNumChunksPerBatch chunks. + nRowsData := 5*(maxEffectiveBlobBytes-1) - (codecv4.MaxNumChunksPerBatch()*4 + 2) + + repeat := func(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) + } + + for _, tc := range []struct { + chunks [][]string + expectedz string + expectedy string + expectedBlobVersionedHash string + expectedBatchHash string + }{ + // single empty chunk + {chunks: [][]string{{}}, expectedz: "1517a7f04a9f2517aaad8440792de202bd1fef70a861e12134c882ccf0c5a537", expectedy: "1ff0c5ea938308566ab022bc30d0136792084dc9adca93612ec925411915d4a9", expectedBlobVersionedHash: "015f16731c3e7864a08edae95f11db8c96e39a487427d7e58b691745d87f8a21", expectedBatchHash: "c3cfeead404a6de1ec5feaa29b6c1c1a5e6a40671c5d5e9cf1dd86fdf5a2e44a"}, + // single non-empty chunk + {chunks: [][]string{{"0x010203"}}, expectedz: "2cbd5fb174611060e72a2afcc385cea273b0f5ea8656f04f3661d757a6b00ff9", expectedy: "68d653e973d32fc5b79763d1b7de1699f37e2527830331b1a02f39d58d7070a9", expectedBlobVersionedHash: "019de38b4472451c5e8891dbb01bc2e834d660198cb9878e6b94fb55e4aaf92b", expectedBatchHash: "41e1c4a5220feb7fed5ba9e3980d138b8d5b4b06b8a46a87d796dbf5ed9265f5"}, + // multiple empty chunks + {chunks: [][]string{{}, {}}, expectedz: "0f9270fd0f21c1eef46334614c586759a2fb71ae46fef50560e92ef7ec926ccc", expectedy: "028f18fc74210d214d3e78a5f92f5c68a9d4dcc633e6e7ffb4144651a39b9dce", expectedBlobVersionedHash: "014a46e5be597971d313e300a052dc406b9f06fad394e1ba115df7da9ca5746d", expectedBatchHash: "94cac32609ae6c3d99dacf5af3650a7748b4dcf8c9779353b932a75e85bc2632"}, + // multiple non-empty chunks + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "3a199bd64627e67c320add8a5932870535c667236eda365c989f0b73176bb000", expectedy: "221d60db4912e9067df77ee3d71587ea1023ec0238c23044a3325f909fd5ceb3", expectedBlobVersionedHash: "0145df6dbf8070bb3137156fe4540c11330e84487fcac24239442859d95e925c", expectedBatchHash: "d2332749a82a3b94766493ee3826074b8af74efc98367d14fd82e1056e2abf88"}, + // empty chunk followed by non-empty chunk + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "0a421d448784eb111c2ae9a8031a7cf79e4638b300c48d0c7ff38322e25268fc", expectedy: "48ad5516b1370ac6be17a1d3220e286c9522366ec36fc66a584bbe1ee904eaf1", expectedBlobVersionedHash: "019e5c4c0bfa68324657a0d2e49075eeee2e7c928811bc9c8b2c03888d9d3a5d", expectedBatchHash: "5eac258323d1a4d166d2d116b330262440f46f1ecf07b247cc792bca4a905761"}, + // non-empty chunk followed by empty chunk + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "6aa26c5d595fa1b72c4e1aa4f06b35788060a7504137c7dd6896486819445230", expectedy: "72c082827841ab84576b49cd63bd06af07cb090626ea3e91a8e77de29b3e61dc", expectedBlobVersionedHash: "0166c93797bf7d4e5701d36bfc8bcea5270c1c4ff18d1aaa248125c87746cf3d", expectedBatchHash: "03e0bdf053fa21d37bf55ac27e7774298b95465123c353e30761e51965269a10"}, + // max number of chunks all empty + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4a04cb1860de2c0d03a78520da62a447ef2af92e36dc0b1806db501d7cf63469", expectedy: "17ca30439aed3d9a96f4336d2a416da04a0803667922c7b0765557bb0162493f", expectedBlobVersionedHash: "014b8172c9e2ef89ac8d2ff0c9991baafff3602459250f5870721ac4f05dca09", expectedBatchHash: "216add0492703b12b841ebf6d217a41d1907dd4acd54d07a870472d31d4fde0d"}, + // max number of chunks all non-empty + {chunks: [][]string{ + {"0x0a"}, + {"0x0a0b"}, + {"0x0a0b0c"}, + {"0x0a0b0c0d"}, + {"0x0a0b0c0d0e"}, + {"0x0a0b0c0d0e0f"}, + {"0x0a0b0c0d0e0f10"}, + {"0x0a0b0c0d0e0f1011"}, + {"0x0a0b0c0d0e0f101112"}, + {"0x0a0b0c0d0e0f10111213"}, + {"0x0a0b0c0d0e0f1011121314"}, + {"0x0a0b0c0d0e0f101112131415"}, + {"0x0a0b0c0d0e0f10111213141516"}, + {"0x0a0b0c0d0e0f1011121314151617"}, + {"0x0a0b0c0d0e0f101112131415161718"}, + {"0x0a0b0c0d0e0f10111213141516171819"}, + {"0x0a0b0c0d0e0f101112131415161718191a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, + }, expectedz: "53eafb50809b3473cb4f8764f7e5d598af9eaaddc45a5a6da7cddac3380e39bb", expectedy: "40751ed98861f5c2058b4062b275f94a3d505a3221f6abe8dbe1074a4f10d0f4", expectedBlobVersionedHash: "01b78b07dbe03b960cd73ea45088b231a50ce88408fa938765e971c5dc7bbb6b", expectedBatchHash: "257175785213c68b10bb94396b657892fb7ae70708bf98ce357752906a80a6f0"}, + // single chunk blob full + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "4a7e2416aed7aa1630b5dfac5de9f7140f0228a293e6507a98ca762f471bd4cb", expectedy: "39087ba100396ce50ea84f3cb196fd45ce7074888acc57f196b905e3bb4fffda", expectedBlobVersionedHash: "0196c25ea10bafe62aa334122d1e426eccc158423e35272ae009029caf7664b2", expectedBatchHash: "fed7eeba45afa4ac2f658e233adbc7beab27bd7472364a69ab5c16dafe3960b4"}, + // multiple chunks blob full + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "588908e72f3910e010ecbb38583e3c14d2de20e3fc0fcfca1fa573b6ae652009", expectedy: "4dd0fe025a1d27c21aa3c199e88d8f7bfa839b04e2fffb39d149b7d81ea2d81e", expectedBlobVersionedHash: "0146e7e489077de92fc8e90102560f1ea8d10f3dc5aca0c7ce3f362698e8dfed", expectedBatchHash: "5cd5ae7f3ca9d7777efef7b248fe0348841ea99b270e4c391fa5bed6a00c7aa9"}, + // max number of chunks only last one non-empty not full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "6fa8165246ac960a1a31c8f9950dad3c6cfd11393a8822738f392f096e0e27da", expectedy: "3391e91d228eee3a4341c25536741bb3d16387e47ca03548212a4a8acc898dad", expectedBlobVersionedHash: "01a65de32db70380b8728e048ed510cf4fbd9b82ff22955bbc27edebc4fd0188", expectedBatchHash: "f78751f5d548107925e31ace50234e3c926b0ade2aa2bd32f46814016f631d62"}, + // max number of chunks only last one non-empty full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "44c6b024e20a1b616c9619c23b612258ddb5489bb0631119598c89ddb2cf8565", expectedy: "6e3296728e406d16cf1d7342959bcbe0c4e4c1e9b1f705ae6b426a0dbb79838c", expectedBlobVersionedHash: "01cc8fbe921a7c0fb5d01a1e12ef090060740ca1ecebf279f1de3bb4499c7341", expectedBatchHash: "fcca8045e82349c28f6d8747bcd6fec84a34130b31097e2e08e854bc5c21c476"}, + // max number of chunks but last is empty + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "4affa105e7c5d72a3223482b237296fead99e6d716b97bab0cb3447f93309692", expectedy: "4a850a8c7b84d568d8505121c92ebf284e88aa7a881290cf3939d52040871e56", expectedBlobVersionedHash: "01d3ce566fbdbcab307095bdc05de7bc2905d25f3dd4453b0f7d5f7ba8da9f08", expectedBatchHash: "ac29c2e8c26749cf99fca994cde6d33147e9e9aa60f162c964720b4937cae8fb"}, + } { + chunks := []*Chunk{} + + for _, c := range tc.chunks { + block := &Block{Transactions: []*types.TransactionData{}} + + for _, data := range c { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + + chunk := &Chunk{Blocks: []*Block{block}} + chunks = append(chunks, chunk) + } + + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyFunc(convertTxDataToRLPEncoding, func(txData *types.TransactionData) ([]byte, error) { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, err + } + return data, nil + }) + + patches.ApplyFunc(checkCompressedDataCompatibility, func(_ []byte) error { + return nil + }) + + blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), true /* enable encode */) + require.NoError(t, err) + actualZ := hex.EncodeToString(z[:]) + assert.Equal(t, tc.expectedz, actualZ) + assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + + _, y, err := kzg4844.ComputeProof(blob, *z) + require.NoError(t, err) + actualY := hex.EncodeToString(y[:]) + assert.Equal(t, tc.expectedy, actualY) + + // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) + dataBytes := make([]byte, 32*len(chunks)) + for i := range chunks { + copy(dataBytes[32*i:32*i+32], []byte{math.MaxUint8 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + } + dataHash := crypto.Keccak256Hash(dataBytes) + + batch := daBatchV3{ + daBatchV0: daBatchV0{ + version: CodecV4, + batchIndex: 6789, + l1MessagePopped: 101, + totalL1MessagePopped: 10101, + dataHash: dataHash, + parentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + }, + lastBlockTimestamp: 192837, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } + batch.blobDataProof, err = batch.blobDataProofForPICircuit() + require.NoError(t, err) + assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) + } +} + +func TestCodecV4BatchStandardTestCasesDisableCompression(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + // Taking into consideration disabling compression, we allow up to max effective blob bytes. + // We then ignore the metadata rows for MaxNumChunksPerBatch chunks, plus 1 byte for the compression flag. + nRowsData := maxEffectiveBlobBytes - (codecv4.MaxNumChunksPerBatch()*4 + 2) - 1 + + repeat := func(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) + } + + for _, tc := range []struct { + chunks [][]string + expectedz string + expectedy string + expectedBlobVersionedHash string + expectedBatchHash string + }{ + // single empty chunk + {chunks: [][]string{{}}, expectedz: "04e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb071", expectedy: "5885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", expectedBlobVersionedHash: "016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a", expectedBatchHash: "7c67a67db562e51c9f86f0423275e470e85a214c477b5e01b03ad9bf04390bad"}, + // single non-empty chunk + {chunks: [][]string{{"0x010203"}}, expectedz: "5f4d24694355a9e3718495c43b24652b0151053f082262fa6e26073c42fd9818", expectedy: "1b69184f2a976099671c3ccffff7a2ea83af24dd578b38956d96d2ac8b8ed74d", expectedBlobVersionedHash: "019d0e2b1297544ce7675246005b5b8db84da926a4ae98001c8272b1e638d3ef", expectedBatchHash: "00d403466e836405efe3041818bf874d4200484f521bb2b684dd7450e7cecbc8"}, + // multiple empty chunks + {chunks: [][]string{{}, {}}, expectedz: "14160c76e0d43a3cf37faa4c24f215b9c3349d5709b84332da80ca0667ece780", expectedy: "6407aa706069f09c7b6481ea00a489f74e96673a39e197c6f34b30f2d1f9fe23", expectedBlobVersionedHash: "0190689489894e430d08513202be679dcce47e3ae77bac13e1750a99d15b9a1c", expectedBatchHash: "b5ee4048b5f05dbdecc7a49f1698a0e911c64224ebaf5f538547973223ac1cd1"}, + // multiple non-empty chunks + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "15ac8e175330a67d2bd8018a486ee1fbbcead23efd4f2e57cd94312cfb7830b1", expectedy: "12593c94d52eaed8be4b79f62397e86b3b75c2af6197533e5a917676e551ce26", expectedBlobVersionedHash: "01972ce3c3b894e9c381f2eed5395809eb7a762eb0c28b4beb73ac3c73ebd3f8", expectedBatchHash: "ae2893806a3dd7449c5bc10c47500f5df96e5cffdffe083171cb7ee908411e28"}, + // empty chunk followed by non-empty chunk + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "49ebeb74372d05b335f05d0e48f3155955c27ec9cac92a03a9d85050e24efdd6", expectedy: "7088f4810a4d61bcadcdf2debff998027eb10caa70474db18a8228ef4edc6cd7", expectedBlobVersionedHash: "015ea2df6fc4582fd704ae55157c1311f2d680240c8b8805e3435856a15da91b", expectedBatchHash: "cf4bee00c5e044bc6c9c168a3245f8edfcdeac602d63b2e75b45faa7b95d8c16"}, + // non-empty chunk followed by empty chunk + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "2374a8bcd2fcbfae4cc43a5e21a0c69cd206071e46db2c8a3c9bb7e9b8c60120", expectedy: "51b51d261d897e81e94498493b70ec425320002d9390be69b63c87e22871d5bf", expectedBlobVersionedHash: "01600a0cb0fb308f1202172f88764bafa9deddab52331a38e767267b6785d2a3", expectedBatchHash: "53cc0ff17ca71e1711f6b261537fc8da28a5d289325be33d5286920417fe9a6e"}, + // max number of chunks all empty + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "6908503d26b56b1eb9c94d25e8e5d6e8a14e48d3ac38b063d2bc20c25a361fb5", expectedy: "22d016c0d7ef4d74e371522a9da62a43bcf2dc69be21e4133d35bf8e6fe44f68", expectedBlobVersionedHash: "01baf85d7d36b7d7df4c684b78fa5d3f94dd893f92c8c4cc8ee26a67b2fce588", expectedBatchHash: "7585f286302ba26219b1229da0fd1f557f465fb244bd1839eef95df1d75f1457"}, + // max number of chunks all non-empty + {chunks: [][]string{ + {"0x0a"}, + {"0x0a0b"}, + {"0x0a0b0c"}, + {"0x0a0b0c0d"}, + {"0x0a0b0c0d0e"}, + {"0x0a0b0c0d0e0f"}, + {"0x0a0b0c0d0e0f10"}, + {"0x0a0b0c0d0e0f1011"}, + {"0x0a0b0c0d0e0f101112"}, + {"0x0a0b0c0d0e0f10111213"}, + {"0x0a0b0c0d0e0f1011121314"}, + {"0x0a0b0c0d0e0f101112131415"}, + {"0x0a0b0c0d0e0f10111213141516"}, + {"0x0a0b0c0d0e0f1011121314151617"}, + {"0x0a0b0c0d0e0f101112131415161718"}, + {"0x0a0b0c0d0e0f10111213141516171819"}, + {"0x0a0b0c0d0e0f101112131415161718191a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, + }, expectedz: "5fcba58abcc9a0ae4a3780a2a621e57e8f8c5d323134aa9623579e698e4d18b1", expectedy: "69570d3c97e9573b5529b213055b814d5e4b7dda2bb2c3a7d06456c157ab338d", expectedBlobVersionedHash: "018cd2721e76c37374e450382e2e53faa24393cfbcbbe134e1756392c8f1a4fc", expectedBatchHash: "52948b79f4457473836b44ea9bbb2c6fc61b5937fc881b95b2baa78af0e0623b"}, + // single chunk blob full + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "53dde3d5fe1a53f364a8a865e746d3c7ca7fadadbdb816c30b49958057f1e9d9", expectedy: "3c1f69a7180f98a8a39f26189ee73fca4fbc41ca91a5ae02b521625bd67628e7", expectedBlobVersionedHash: "01d9acf02b1ef5213e0bd530e1cf99d2a19f622318bf3d97c7ec693aa3a7fdb1", expectedBatchHash: "b9411a190cc9db47fd31c009efb7b2275c235f511780f0ed6874242cb2eb7b72"}, + // multiple chunks blob full + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "1843d3229313afb023d210a0be73f64fba2fe20b7ae14b2e1df37ebe32f55afa", expectedy: "29db4ab0e596593fad50784a3a6f802ba1d9daf760c09f64bdc3d1899b247d97", expectedBlobVersionedHash: "01e337f571c6079bb6c89dab463ff3b6b2b5139fbd4f5446996fea8c0df94c65", expectedBatchHash: "56ce765d11a10b89fb412c293756299fd803485aca595c6de8a35c790486f62c"}, + // max number of chunks only last one non-empty not full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "3df579b9d11368e712b9b23318e8ea2dfcc5d5a647b16fb8254d017b8804f4b1", expectedy: "4da6e30ac69fb2d65de9b9306de0fa15a2cee87aee245e831f313366c0809b46", expectedBlobVersionedHash: "01641976b8a50f5aa3d277f250904caae681a4e090e867c6abdbfe03e216003a", expectedBatchHash: "5160fc712e9dbaa52396b7662f2e393533a5b25457e5ca9475bc8fd27f24d78a"}, + // max number of chunks only last one non-empty full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "47ca3393ebaef699800bd666ff119c1978e938e04d22c9a024a1b17f523281f9", expectedy: "380704fe5da08d69a94c8af57f17153076f6eb20d5e69c60b343fb66c6266101", expectedBlobVersionedHash: "014aac5dbd6f5456f68635c6674caa374faa0dbe012c5800e0364749485bf1bf", expectedBatchHash: "c674d48d3a9146049b1ea2993d5cc070dd76617fa550234563591c366654d6c6"}, + // max number of chunks but last is empty + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "501e762800ca76490b61114d8a84a12f1f72fce71252f7c294a5f5b4190da6b1", expectedy: "524e879ce867b79cbeffd8aa5241731f5562addfc246dda20bb857eb55158399", expectedBlobVersionedHash: "01504b1eb6894cc96a8cac8f02fba838c086171cbb879ccd9cdeb44f9d4237f5", expectedBatchHash: "59a97a5d8e4206bb283b524b2d48a707c8869c87dea6563dd99dcb367bed6412"}, + } { + chunks := []*Chunk{} + + for _, c := range tc.chunks { + block := &Block{Transactions: []*types.TransactionData{}} + + for _, data := range c { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + + chunk := &Chunk{Blocks: []*Block{block}} + chunks = append(chunks, chunk) + } + + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyFunc(convertTxDataToRLPEncoding, func(txData *types.TransactionData) ([]byte, error) { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, err + } + return data, nil + }) + + patches.ApplyFunc(checkCompressedDataCompatibility, func(_ []byte) error { + return nil + }) + + blob, blobVersionedHash, z, _, err := codecv4.(*DACodecV4).constructBlobPayload(chunks, codecv4.MaxNumChunksPerBatch(), false /* disable encode */) + require.NoError(t, err) + actualZ := hex.EncodeToString(z[:]) + assert.Equal(t, tc.expectedz, actualZ) + assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + + _, y, err := kzg4844.ComputeProof(blob, *z) + require.NoError(t, err) + actualY := hex.EncodeToString(y[:]) + assert.Equal(t, tc.expectedy, actualY) + + // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) + dataBytes := make([]byte, 32*len(chunks)) + for i := range chunks { + copy(dataBytes[32*i:32*i+32], []byte{math.MaxUint8 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + } + dataHash := crypto.Keccak256Hash(dataBytes) + + batch := daBatchV3{ + daBatchV0: daBatchV0{ + version: CodecV4, + batchIndex: 6789, + l1MessagePopped: 101, + totalL1MessagePopped: 10101, + dataHash: dataHash, + parentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + }, + lastBlockTimestamp: 192837, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } + batch.blobDataProof, err = batch.blobDataProofForPICircuit() + require.NoError(t, err) + assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) + } +} + +func TestDACodecV4SimpleMethods(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + t.Run("Version", func(t *testing.T) { + version := codecv4.Version() + assert.Equal(t, CodecV4, version) + }) +} + +func TestCodecV4ChunkCompressedDataCompatibilityCheck(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + // chunk with a single empty block + emptyBlock := &Block{} + emptyChunk := &Chunk{Blocks: []*Block{emptyBlock}} + + compatible, err := codecv4.CheckChunkCompressedDataCompatibility(emptyChunk) + assert.NoError(t, err) + assert.Equal(t, false, compatible) + + txChunk := &Chunk{ + Blocks: []*Block{ + { + Transactions: []*types.TransactionData{ + {Type: types.L1MessageTxType}, + }, + }, + }, + } + compatible, err = codecv4.CheckChunkCompressedDataCompatibility(txChunk) + assert.NoError(t, err) + assert.Equal(t, false, compatible) + + testCases := []struct { + name string + jsonFile string + expectCompatible bool + }{ + {"Block 02", "testdata/blockTrace_02.json", true}, + {"Block 03", "testdata/blockTrace_03.json", true}, + {"Block 04", "testdata/blockTrace_04.json", true}, + {"Block 05", "testdata/blockTrace_05.json", false}, + {"Block 06", "testdata/blockTrace_06.json", false}, + {"Block 07", "testdata/blockTrace_07.json", false}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + block := readBlockFromJSON(t, tc.jsonFile) + chunk := &Chunk{Blocks: []*Block{block}} + compatible, err := codecv4.CheckChunkCompressedDataCompatibility(chunk) + assert.NoError(t, err) + assert.Equal(t, tc.expectCompatible, compatible) + }) + } +} + +func TestCodecV4BatchCompressedDataCompatibilityCheck(t *testing.T) { + codecv4, err := CodecFromVersion(CodecV4) + require.NoError(t, err) + + // empty batch + emptyBatch := &Batch{} + compatible, err := codecv4.CheckBatchCompressedDataCompatibility(emptyBatch) + assert.NoError(t, err) + assert.Equal(t, false, compatible) + + testCases := []struct { + name string + jsonFiles []string + expectCompatible bool + }{ + {"Single Block 02", []string{"testdata/blockTrace_02.json"}, true}, + {"Single Block 03", []string{"testdata/blockTrace_03.json"}, true}, + {"Single Block 04", []string{"testdata/blockTrace_04.json"}, true}, + {"Single Block 05", []string{"testdata/blockTrace_05.json"}, false}, + {"Single Block 06", []string{"testdata/blockTrace_06.json"}, false}, + {"Single Block 07", []string{"testdata/blockTrace_07.json"}, false}, + {"Multiple Blocks And Chunks", []string{"testdata/blockTrace_02.json", "testdata/blockTrace_03.json", "testdata/blockTrace_04.json", "testdata/blockTrace_05.json", "testdata/blockTrace_06.json", "testdata/blockTrace_07.json"}, true}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var chunks []*Chunk + for _, jsonFile := range tc.jsonFiles { + block := readBlockFromJSON(t, jsonFile) + chunks = append(chunks, &Chunk{Blocks: []*Block{block}}) + } + batch := &Batch{Chunks: chunks} + compatible, err := codecv4.CheckBatchCompressedDataCompatibility(batch) + assert.NoError(t, err) + assert.Equal(t, tc.expectCompatible, compatible) + }) + } +} diff --git a/encoding/da.go b/encoding/da.go index 8ce6c35..5f22756 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -12,29 +12,84 @@ import ( "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/params" ) -// BLSModulus is the BLS modulus defined in EIP-4844. -var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) +// blsModulus is the BLS modulus defined in EIP-4844. +var blsModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) -// CodecVersion defines the version of encoder and decoder. -type CodecVersion uint8 +// blockContextByteSize is the size of the block context in bytes. +const blockContextByteSize = 60 + +// blockContextBytesForHashing is the size of the block context in bytes for hashing. +const blockContextBytesForHashing = blockContextByteSize - 2 + +// txLenByteSize is the size of the transaction length in bytes. +const txLenByteSize = 4 + +// maxBlobBytes is the maximum number of bytes that can be stored in a blob. +const maxBlobBytes = 131072 + +// maxEffectiveBlobBytes is the maximum number of bytes that can be stored in a blob. +// We can only utilize 31/32 of a blob. +const maxEffectiveBlobBytes = maxBlobBytes / 32 * 31 + +// minCompressedDataCheckSize is the minimum size of compressed data to check compatibility. +// only used in codecv2 and codecv3. +const minCompressedDataCheckSize = 131072 + +// kzgPointByteSize is the size of a KZG point (z and y) in bytes. +const kzgPointByteSize = 32 + +// zstdMagicNumber is the magic number for zstd compressed data header. +var zstdMagicNumber = []byte{0x28, 0xb5, 0x2f, 0xfd} const ( - // CodecV0 represents the version 0 of the encoder and decoder. - CodecV0 CodecVersion = iota + daBatchOffsetVersion = 0 + daBatchOffsetBatchIndex = 1 + daBatchOffsetDataHash = 25 +) - // CodecV1 represents the version 1 of the encoder and decoder. - CodecV1 +const ( + daBatchV0OffsetL1MessagePopped = 9 + daBatchV0OffsetTotalL1MessagePopped = 17 + daBatchV0OffsetParentBatchHash = 57 + daBatchV0OffsetSkippedL1MessageBitmap = 89 + daBatchV0EncodedMinLength = 89 // min length of a v0 da batch, when there are no skipped L1 messages +) - // CodecV2 represents the version 2 of the encoder and decoder. - CodecV2 +const ( + daBatchV1OffsetL1MessagePopped = 9 + daBatchV1OffsetTotalL1MessagePopped = 17 + daBatchV1OffsetBlobVersionedHash = 57 + daBatchV1OffsetParentBatchHash = 89 + daBatchV1OffsetSkippedL1MessageBitmap = 121 + daBatchV1EncodedMinLength = 121 // min length of a v1 da batch, when there are no skipped L1 messages +) - // CodecV3 represents the version 3 of the encoder and decoder. - CodecV3 +const ( + daBatchV3OffsetL1MessagePopped = 9 + daBatchV3OffsetTotalL1MessagePopped = 17 + daBatchV3OffsetBlobVersionedHash = 57 + daBatchV3OffsetParentBatchHash = 89 + daBatchV3OffsetLastBlockTimestamp = 121 + daBatchV3OffsetBlobDataProof = 129 + daBatchV3EncodedLength = 193 +) - // CodecV4 represents the version 4 of the encoder and decoder. - CodecV4 +const ( + payloadLengthBytes = 4 + calldataNonZeroByteGas = 16 + coldSloadGas = 2100 + coldAddressAccessGas = 2600 + warmAddressAccessGas = 100 + warmSloadGas = 100 + baseTxGas = 21000 + sstoreGas = 20000 + extraGasCost = 100000 // over-estimate the gas cost for ops like _getAdmin, _implementation, _requireNotPaused, etc + skippedL1MessageBitmapByteSize = 32 + functionSignatureBytes = 4 + defaultParameterBytes = 32 ) // Block represents an L2 block. @@ -99,19 +154,13 @@ func (c *Chunk) NumL1Messages(totalL1MessagePoppedBefore uint64) uint64 { return numL1Messages } -// ConvertTxDataToRLPEncoding transforms []*TransactionData into []*types.Transaction. -func ConvertTxDataToRLPEncoding(txData *types.TransactionData, useMockTxData bool) ([]byte, error) { +// convertTxDataToRLPEncoding transforms []*TransactionData into []*types.Transaction. +func convertTxDataToRLPEncoding(txData *types.TransactionData) ([]byte, error) { data, err := hexutil.Decode(txData.Data) if err != nil { return nil, fmt.Errorf("failed to decode txData.Data: data=%v, err=%w", txData.Data, err) } - // This mock param is only used in testing comparing batch challenges with standard test cases. - // These tests use this param to set the tx data for convenience. - if useMockTxData { - return data, nil - } - var tx *types.Transaction switch txData.Type { case types.LegacyTxType: @@ -216,13 +265,13 @@ func (c *Chunk) NumL2Transactions() uint64 { return totalTxNum } -// L2GasUsed calculates the total gas of L2 transactions in a Chunk. -func (c *Chunk) L2GasUsed() uint64 { - var totalTxNum uint64 +// TotalGasUsed calculates the total gas of transactions in a Chunk. +func (c *Chunk) TotalGasUsed() uint64 { + var totalGasUsed uint64 for _, block := range c.Blocks { - totalTxNum += block.Header.GasUsed + totalGasUsed += block.Header.GasUsed } - return totalTxNum + return totalGasUsed } // StateRoot gets the state root after committing/finalizing the batch. @@ -284,7 +333,7 @@ func TxsToTxsData(txs types.Transactions) []*types.TransactionData { // Fast testing if the compressed data is compatible with our circuit // (require specified frame header and each block is compressed) -func CheckCompressedDataCompatibility(data []byte) error { +func checkCompressedDataCompatibility(data []byte) error { if len(data) < 16 { return fmt.Errorf("too small size (%x), what is it?", data) } @@ -332,11 +381,10 @@ func CheckCompressedDataCompatibility(data []byte) error { return nil } -// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. -func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { - // blob contains 131072 bytes but we can only utilize 31/32 of these - if len(blobBytes) > 126976 { - return nil, fmt.Errorf("oversized batch payload, blob bytes length: %v, max length: %v", len(blobBytes), 126976) +// makeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. +func makeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { + if len(blobBytes) > maxEffectiveBlobBytes { + return nil, fmt.Errorf("oversized batch payload, blob bytes length: %v, max length: %v", len(blobBytes), maxEffectiveBlobBytes) } // the canonical (padded) blob payload @@ -357,20 +405,20 @@ func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { return &blob, nil } -// BytesFromBlobCanonical converts the canonical blob representation into the raw blob data -func BytesFromBlobCanonical(blob *kzg4844.Blob) [126976]byte { - var blobBytes [126976]byte +// bytesFromBlobCanonical converts the canonical blob representation into the raw blob data +func bytesFromBlobCanonical(blob *kzg4844.Blob) [maxEffectiveBlobBytes]byte { + var blobBytes [maxEffectiveBlobBytes]byte for from := 0; from < len(blob); from += 32 { copy(blobBytes[from/32*31:], blob[from+1:from+32]) } return blobBytes } -// DecompressScrollBlobToBatch decompresses the given blob bytes into scroll batch bytes -func DecompressScrollBlobToBatch(compressedBytes []byte) ([]byte, error) { +// decompressScrollBlobToBatch decompresses the given blob bytes into scroll batch bytes +func decompressScrollBlobToBatch(compressedBytes []byte) ([]byte, error) { // decompress data in stream and in batches of bytes, because we don't know actual length of compressed data var res []byte - readBatchSize := 131072 + readBatchSize := maxBlobBytes batchOfBytes := make([]byte, readBatchSize) r := bytes.NewReader(compressedBytes) @@ -395,9 +443,9 @@ func DecompressScrollBlobToBatch(compressedBytes []byte) ([]byte, error) { return res, nil } -// CalculatePaddedBlobSize calculates the required size on blob storage +// calculatePaddedBlobSize calculates the required size on blob storage // where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. -func CalculatePaddedBlobSize(dataSize uint64) uint64 { +func calculatePaddedBlobSize(dataSize uint64) uint64 { paddedSize := (dataSize / 31) * 32 if dataSize%31 != 0 { @@ -407,11 +455,11 @@ func CalculatePaddedBlobSize(dataSize uint64) uint64 { return paddedSize } -// ConstructBatchPayloadInBlob constructs the batch payload. +// constructBatchPayloadInBlob constructs the batch payload. // This function is only used in compressed batch payload length estimation. -func ConstructBatchPayloadInBlob(chunks []*Chunk, MaxNumChunks uint64) ([]byte, error) { +func constructBatchPayloadInBlob(chunks []*Chunk, codec Codec) ([]byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 + metadataLength := 2 + codec.MaxNumChunksPerBatch()*4 // batchBytes represents the raw (un-compressed and un-padded) blob payload batchBytes := make([]byte, metadataLength) @@ -430,7 +478,7 @@ func ConstructBatchPayloadInBlob(chunks []*Chunk, MaxNumChunks uint64) ([]byte, } // encode L2 txs into batch payload - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, false /* no mock */) + rlpTxData, err := convertTxDataToRLPEncoding(tx) if err != nil { return nil, err } @@ -439,20 +487,40 @@ func ConstructBatchPayloadInBlob(chunks []*Chunk, MaxNumChunks uint64) ([]byte, } // batch metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } + chunkSize := len(batchBytes) - currentChunkStartIndex + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) } return batchBytes, nil } -// BlobDataProofFromValues creates the blob data proof from the given values. +// getKeccak256Gas calculates the gas cost for computing the keccak256 hash of a given size. +func getKeccak256Gas(size uint64) uint64 { + return getMemoryExpansionCost(size) + 30 + 6*((size+31)/32) +} + +// getMemoryExpansionCost calculates the cost of memory expansion for a given memoryByteSize. +func getMemoryExpansionCost(memoryByteSize uint64) uint64 { + memorySizeWord := (memoryByteSize + 31) / 32 + memoryCost := (memorySizeWord*memorySizeWord)/512 + (3 * memorySizeWord) + return memoryCost +} + +// getTxPayloadLength calculates the length of the transaction payload. +func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { + rlpTxData, err := convertTxDataToRLPEncoding(txData) + if err != nil { + return 0, err + } + return uint64(len(rlpTxData)), nil +} + +// blobDataProofFromValues creates the blob data proof from the given values. // Memory layout of ``_blobDataProof``: // | z | y | kzg_commitment | kzg_proof | // |---------|---------|----------------|-----------| // | bytes32 | bytes32 | bytes48 | bytes48 | -func BlobDataProofFromValues(z kzg4844.Point, y kzg4844.Claim, commitment kzg4844.Commitment, proof kzg4844.Proof) []byte { +func blobDataProofFromValues(z kzg4844.Point, y kzg4844.Claim, commitment kzg4844.Commitment, proof kzg4844.Proof) []byte { result := make([]byte, 32+32+48+48) copy(result[0:32], z[:]) @@ -462,3 +530,171 @@ func BlobDataProofFromValues(z kzg4844.Point, y kzg4844.Claim, commitment kzg484 return result } + +var errSmallLength error = fmt.Errorf("length of blob bytes is too small") + +// getNextTx parses blob bytes to find length of payload of next Tx and decode it +func getNextTx(bytes []byte, index int) (*types.Transaction, int, error) { + var nextIndex int + length := len(bytes) + if length < index+1 { + return nil, 0, errSmallLength + } + var txBytes []byte + if bytes[index] <= 0x7f { + // the first byte is transaction type, rlp encoding begins from next byte + txBytes = append(txBytes, bytes[index]) + index++ + } + if length < index+1 { + return nil, 0, errSmallLength + } + if bytes[index] >= 0xc0 && bytes[index] <= 0xf7 { + // length of payload is simply bytes[index] - 0xc0 + payloadLen := int(bytes[index] - 0xc0) + if length < index+1+payloadLen { + return nil, 0, errSmallLength + } + txBytes = append(txBytes, bytes[index:index+1+payloadLen]...) + nextIndex = index + 1 + payloadLen + } else if bytes[index] > 0xf7 { + // the length of payload is encoded in next bytes[index] - 0xf7 bytes + // length of bytes representation of length of payload + lenPayloadLen := int(bytes[index] - 0xf7) + if length < index+1+lenPayloadLen { + return nil, 0, errSmallLength + } + lenBytes := bytes[index+1 : index+1+lenPayloadLen] + for len(lenBytes) < 8 { + lenBytes = append([]byte{0x0}, lenBytes...) + } + payloadLen := binary.BigEndian.Uint64(lenBytes) + + if length < index+1+lenPayloadLen+int(payloadLen) { + return nil, 0, errSmallLength + } + txBytes = append(txBytes, bytes[index:index+1+lenPayloadLen+int(payloadLen)]...) + nextIndex = index + 1 + lenPayloadLen + int(payloadLen) + } else { + return nil, 0, fmt.Errorf("incorrect format of rlp encoding") + } + tx := &types.Transaction{} + err := tx.UnmarshalBinary(txBytes) + if err != nil { + return nil, 0, fmt.Errorf("failed to unmarshal tx, err: %w", err) + } + return tx, nextIndex, nil +} + +// decodeTxsFromBytes decodes txs from blob bytes and writes to chunks +func decodeTxsFromBytes(blobBytes []byte, chunks []*DAChunkRawTx, maxNumChunks int) error { + numChunks := int(binary.BigEndian.Uint16(blobBytes[0:2])) + if numChunks != len(chunks) { + return fmt.Errorf("blob chunk number is not same as calldata, blob num chunks: %d, calldata num chunks: %d", numChunks, len(chunks)) + } + index := 2 + maxNumChunks*4 + for chunkID, chunk := range chunks { + var transactions []types.Transactions + chunkSize := int(binary.BigEndian.Uint32(blobBytes[2+4*chunkID : 2+4*chunkID+4])) + + chunkBytes := blobBytes[index : index+chunkSize] + curIndex := 0 + for _, block := range chunk.Blocks { + var blockTransactions types.Transactions + txNum := int(block.NumTransactions()) - int(block.NumL1Messages()) + if txNum < 0 { + return fmt.Errorf("invalid transaction count: NumL1Messages (%d) exceeds NumTransactions (%d)", block.NumL1Messages(), block.NumTransactions()) + } + for i := 0; i < txNum; i++ { + tx, nextIndex, err := getNextTx(chunkBytes, curIndex) + if err != nil { + return fmt.Errorf("couldn't decode next tx from blob bytes: %w, index: %d", err, index+curIndex+4) + } + curIndex = nextIndex + blockTransactions = append(blockTransactions, tx) + } + transactions = append(transactions, blockTransactions) + } + chunk.Transactions = transactions + index += chunkSize + } + return nil +} + +// GetHardforkName returns the name of the hardfork active at the given block height and timestamp. +func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uint64) string { + blockHeightBigInt := new(big.Int).SetUint64(blockHeight) + if !config.IsBernoulli(blockHeightBigInt) { + return "homestead" + } else if !config.IsCurie(blockHeightBigInt) { + return "bernoulli" + } else if !config.IsDarwin(blockHeightBigInt, blockTimestamp) { + return "curie" + } else if !config.IsDarwinV2(blockHeightBigInt, blockTimestamp) { + return "darwin" + } else { + return "darwinV2" + } +} + +// GetCodecVersion returns the encoding codec version for the given block height and timestamp. +func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uint64) CodecVersion { + blockHeightBigInt := new(big.Int).SetUint64(blockHeight) + if !config.IsBernoulli(blockHeightBigInt) { + return CodecV0 + } else if !config.IsCurie(blockHeightBigInt) { + return CodecV1 + } else if !config.IsDarwin(blockHeightBigInt, blockTimestamp) { + return CodecV2 + } else if !config.IsDarwinV2(blockHeightBigInt, blockTimestamp) { + return CodecV3 + } else { + return CodecV4 + } +} + +// CheckChunkCompressedDataCompatibility checks compressed data compatibility of a batch built by a single chunk. +func CheckChunkCompressedDataCompatibility(chunk *Chunk, codecVersion CodecVersion) (bool, error) { + codec, err := CodecFromVersion(codecVersion) + if err != nil { + return false, fmt.Errorf("failed to get codec from version: %w", err) + } + return codec.CheckChunkCompressedDataCompatibility(chunk) +} + +// CheckBatchCompressedDataCompatibility checks compressed data compatibility of a batch built by a single chunk. +func CheckBatchCompressedDataCompatibility(batch *Batch, codecVersion CodecVersion) (bool, error) { + codec, err := CodecFromVersion(codecVersion) + if err != nil { + return false, fmt.Errorf("failed to get codec from version: %w", err) + } + return codec.CheckBatchCompressedDataCompatibility(batch) +} + +// GetChunkEnableCompression returns whether to enable compression for the given block height and timestamp. +func GetChunkEnableCompression(codecVersion CodecVersion, chunk *Chunk) (bool, error) { + switch codecVersion { + case CodecV0, CodecV1: + return false, nil + case CodecV2, CodecV3: + return true, nil + case CodecV4: + return CheckChunkCompressedDataCompatibility(chunk, codecVersion) + default: + return false, fmt.Errorf("unsupported codec version: %v", codecVersion) + } +} + +// GetBatchEnableCompression returns whether to enable compression for the given block height and timestamp. +func GetBatchEnableCompression(codecVersion CodecVersion, batch *Batch) (bool, error) { + switch codecVersion { + case CodecV0, CodecV1: + return false, nil + case CodecV2, CodecV3: + return true, nil + case CodecV4: + return CheckBatchCompressedDataCompatibility(batch, codecVersion) + default: + return false, fmt.Errorf("unsupported codec version: %v", codecVersion) + } +} diff --git a/encoding/da_test.go b/encoding/da_test.go index 0481597..dbfbaf1 100644 --- a/encoding/da_test.go +++ b/encoding/da_test.go @@ -1,6 +1,7 @@ package encoding import ( + "encoding/hex" "encoding/json" "os" "testing" @@ -10,6 +11,8 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/log" + + "github.com/scroll-tech/da-codec/encoding/zstd" ) func TestMain(m *testing.M) { @@ -55,7 +58,7 @@ func TestUtilFunctions(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(11), crc1Max) assert.Equal(t, uint64(3), chunk1.NumTransactions()) - assert.Equal(t, uint64(1194994), chunk1.L2GasUsed()) + assert.Equal(t, uint64(1194994), chunk1.TotalGasUsed()) assert.Equal(t, uint64(42), chunk2.NumL1Messages(0)) assert.Equal(t, uint64(1), chunk2.NumL2Transactions()) @@ -63,7 +66,7 @@ func TestUtilFunctions(t *testing.T) { assert.NoError(t, err) assert.Equal(t, uint64(0), crc2Max) assert.Equal(t, uint64(7), chunk2.NumTransactions()) - assert.Equal(t, uint64(144000), chunk2.L2GasUsed()) + assert.Equal(t, uint64(144000), chunk2.TotalGasUsed()) assert.Equal(t, uint64(257), chunk3.NumL1Messages(0)) assert.Equal(t, uint64(0), chunk3.NumL2Transactions()) @@ -73,7 +76,7 @@ func TestUtilFunctions(t *testing.T) { assert.EqualError(t, err, "block (17, 0x003fee335455c0c293dda17ea9365fe0caa94071ed7216baf61f7aeb808e8a28) has nil RowConsumption") assert.Equal(t, uint64(0), crc3Max) assert.Equal(t, uint64(5), chunk3.NumTransactions()) - assert.Equal(t, uint64(240000), chunk3.L2GasUsed()) + assert.Equal(t, uint64(240000), chunk3.TotalGasUsed()) // Test Batch methods assert.Equal(t, block6.Header.Root, batch.StateRoot()) @@ -96,7 +99,7 @@ func TestConvertTxDataToRLPEncoding(t *testing.T) { continue } - rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) + rlpTxData, err := convertTxDataToRLPEncoding(txData) assert.NoError(t, err) var tx types.Transaction err = tx.UnmarshalBinary(rlpTxData) @@ -112,6 +115,26 @@ func TestEmptyBatchRoots(t *testing.T) { assert.Equal(t, common.Hash{}, emptyBatch.WithdrawRoot()) } +func TestBlobCompressDecompress(t *testing.T) { + blobString := "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04" + blobBytes, err := hex.DecodeString(blobString) + assert.NoError(t, err) + + compressed, err := zstd.CompressScrollBatchBytes(blobBytes) + assert.NoError(t, err) + + blob, err := makeBlobCanonical(compressed) + assert.NoError(t, err) + + res := bytesFromBlobCanonical(blob) + compressedBytes := res[:] + compressedBytes = append(zstdMagicNumber, compressedBytes...) + + decompressedBlobBytes, err := decompressScrollBlobToBatch(compressedBytes) + assert.NoError(t, err) + assert.Equal(t, blobBytes, decompressedBlobBytes) +} + func readBlockFromJSON(t *testing.T, filename string) *Block { data, err := os.ReadFile(filename) assert.NoError(t, err) diff --git a/encoding/interfaces.go b/encoding/interfaces.go new file mode 100644 index 0000000..683057a --- /dev/null +++ b/encoding/interfaces.go @@ -0,0 +1,109 @@ +package encoding + +import ( + "fmt" + "math/big" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/params" +) + +// DABlock represents a Data Availability Block. +type DABlock interface { + Encode() []byte + Decode([]byte) error + Number() uint64 + NumTransactions() uint16 + NumL1Messages() uint16 +} + +// DAChunk groups consecutive DABlocks with their transactions. +type DAChunk interface { + Encode() ([]byte, error) + Hash() (common.Hash, error) + BlockRange() (uint64, uint64, error) +} + +// DABatch contains metadata about a batch of DAChunks. +type DABatch interface { + Encode() []byte + Hash() common.Hash + DataHash() common.Hash + BlobDataProofForPointEvaluation() ([]byte, error) + Blob() *kzg4844.Blob + BlobBytes() []byte + Version() CodecVersion + SkippedL1MessageBitmap() []byte +} + +// Codec represents the interface for encoding and decoding DA-related structures. +type Codec interface { + Version() CodecVersion + MaxNumChunksPerBatch() int + + NewDABlock(*Block, uint64) (DABlock, error) + NewDAChunk(*Chunk, uint64) (DAChunk, error) + NewDABatch(*Batch) (DABatch, error) + NewDABatchFromBytes([]byte) (DABatch, error) + + DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) + DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error + + CheckChunkCompressedDataCompatibility(*Chunk) (bool, error) + CheckBatchCompressedDataCompatibility(*Batch) (bool, error) + + EstimateChunkL1CommitBatchSizeAndBlobSize(*Chunk) (uint64, uint64, error) + EstimateBatchL1CommitBatchSizeAndBlobSize(*Batch) (uint64, uint64, error) + EstimateBlockL1CommitCalldataSize(*Block) (uint64, error) + EstimateChunkL1CommitCalldataSize(*Chunk) (uint64, error) + EstimateChunkL1CommitGas(*Chunk) (uint64, error) + EstimateBatchL1CommitGas(*Batch) (uint64, error) + EstimateBatchL1CommitCalldataSize(*Batch) (uint64, error) + + JSONFromBytes([]byte) ([]byte, error) // convert batch header bytes to JSON, this is only used to provide witness data for the prover. +} + +// CodecVersion represents the version of the codec. +type CodecVersion uint8 + +const ( + CodecV0 CodecVersion = iota + CodecV1 + CodecV2 + CodecV3 + CodecV4 +) + +// CodecFromVersion returns the appropriate codec for the given version. +func CodecFromVersion(version CodecVersion) (Codec, error) { + switch version { + case CodecV0: + return &DACodecV0{}, nil + case CodecV1: + return &DACodecV1{}, nil + case CodecV2: + return &DACodecV2{}, nil + case CodecV3: + return &DACodecV3{}, nil + case CodecV4: + return &DACodecV4{}, nil + default: + return nil, fmt.Errorf("unsupported codec version: %v", version) + } +} + +// CodecFromConfig determines and returns the appropriate codec based on chain configuration, block number, and timestamp. +func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { + if chainCfg.IsDarwinV2(startBlockNumber, startBlockTimestamp) { + return &DACodecV4{} + } else if chainCfg.IsDarwin(startBlockNumber, startBlockTimestamp) { + return &DACodecV3{} + } else if chainCfg.IsCurie(startBlockNumber) { + return &DACodecV2{} + } else if chainCfg.IsBernoulli(startBlockNumber) { + return &DACodecV1{} + } else { + return &DACodecV0{} + } +} diff --git a/encoding/interfaces_test.go b/encoding/interfaces_test.go new file mode 100644 index 0000000..72c2dda --- /dev/null +++ b/encoding/interfaces_test.go @@ -0,0 +1,117 @@ +package encoding + +import ( + "math/big" + "testing" + + "github.com/scroll-tech/go-ethereum/params" + "github.com/stretchr/testify/assert" +) + +func TestCodecFromVersion(t *testing.T) { + tests := []struct { + name string + version CodecVersion + want Codec + wantErr bool + }{ + {"CodecV0", CodecV0, &DACodecV0{}, false}, + {"CodecV1", CodecV1, &DACodecV1{}, false}, + {"CodecV2", CodecV2, &DACodecV2{}, false}, + {"CodecV3", CodecV3, &DACodecV3{}, false}, + {"CodecV4", CodecV4, &DACodecV4{}, false}, + {"InvalidCodec", CodecVersion(99), nil, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := CodecFromVersion(tt.version) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.IsType(t, tt.want, got) + } + }) + } +} + +func TestCodecFromConfig(t *testing.T) { + tests := []struct { + name string + config *params.ChainConfig + blockNum *big.Int + timestamp uint64 + want Codec + }{ + { + name: "DarwinV2 active", + config: ¶ms.ChainConfig{ + LondonBlock: big.NewInt(0), + BernoulliBlock: big.NewInt(0), + CurieBlock: big.NewInt(0), + DarwinTime: new(uint64), + DarwinV2Time: new(uint64), + }, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV4{}, + }, + { + name: "Darwin active", + config: ¶ms.ChainConfig{ + LondonBlock: big.NewInt(0), + BernoulliBlock: big.NewInt(0), + CurieBlock: big.NewInt(0), + DarwinTime: new(uint64), + }, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV3{}, + }, + { + name: "Curie active", + config: ¶ms.ChainConfig{ + LondonBlock: big.NewInt(0), + BernoulliBlock: big.NewInt(0), + CurieBlock: big.NewInt(0), + }, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV2{}, + }, + { + name: "Bernoulli active", + config: ¶ms.ChainConfig{ + LondonBlock: big.NewInt(0), + BernoulliBlock: big.NewInt(0), + }, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV1{}, + }, + { + name: "London active", + config: ¶ms.ChainConfig{ + LondonBlock: big.NewInt(0), + }, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV0{}, + }, + { + name: "No upgrades", + config: ¶ms.ChainConfig{}, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV0{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := CodecFromConfig(tt.config, tt.blockNum, tt.timestamp) + assert.IsType(t, tt.want, got) + }) + } +} diff --git a/encoding/zstd/zstd.go b/encoding/zstd/zstd.go index 58eab2b..aab718f 100644 --- a/encoding/zstd/zstd.go +++ b/encoding/zstd/zstd.go @@ -5,16 +5,24 @@ package zstd char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); */ import "C" + import ( "fmt" "unsafe" ) -// CompressScrollBatchBytes compresses the given batch of bytes. -// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. +const compressBufferOverhead = 128 + +// CompressScrollBatchBytes compresses the given batch of bytes using zstd compression. +// The output buffer is allocated with an extra compressBufferOverhead bytes to accommodate +// potential metadata overhead or error messages from the underlying C function. func CompressScrollBatchBytes(batchBytes []byte) ([]byte, error) { + if len(batchBytes) == 0 { + return nil, fmt.Errorf("input batch is empty") + } + srcSize := C.uint64_t(len(batchBytes)) - outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes + outbufSize := C.uint64_t(len(batchBytes) + compressBufferOverhead) outbuf := make([]byte, outbufSize) if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, diff --git a/go.mod b/go.mod index 0a84dd2..a3e1927 100644 --- a/go.mod +++ b/go.mod @@ -3,34 +3,36 @@ module github.com/scroll-tech/da-codec go 1.21 require ( - github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4 + github.com/agiledragon/gomonkey/v2 v2.12.0 + github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22 github.com/stretchr/testify v1.9.0 ) require ( github.com/bits-and-blooms/bitset v1.12.0 // indirect github.com/btcsuite/btcd v0.20.1-beta // indirect + github.com/btcsuite/btcd/btcec/v2 v2.2.0 // indirect github.com/consensys/bavard v0.1.13 // indirect github.com/consensys/gnark-crypto v0.12.1 // indirect github.com/crate-crypto/go-kzg-4844 v1.0.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 // indirect github.com/ethereum/c-kzg-4844/bindings/go v0.0.0-20230126171313-363c7d7593b4 // indirect github.com/go-ole/go-ole v1.3.0 // indirect github.com/go-stack/stack v1.8.1 // indirect github.com/holiman/uint256 v1.2.4 // indirect github.com/iden3/go-iden3-crypto v0.0.15 // indirect github.com/klauspost/compress v1.17.9 - github.com/kr/text v0.2.0 // indirect github.com/mmcloughlin/addchain v0.4.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/rogpeppe/go-internal v1.10.0 // indirect github.com/scroll-tech/zktrie v0.8.4 // indirect github.com/shirou/gopsutil v3.21.11+incompatible // indirect - github.com/supranational/blst v0.3.11-0.20230124161941-ca03e11a3ff2 // indirect + github.com/supranational/blst v0.3.11 // indirect github.com/tklauser/go-sysconf v0.3.12 // indirect github.com/tklauser/numcpus v0.6.1 // indirect github.com/yusufpapurcu/wmi v1.2.3 // indirect golang.org/x/crypto v0.17.0 // indirect + golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect golang.org/x/sync v0.6.0 // indirect golang.org/x/sys v0.17.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index 0a2e1b6..d8f830f 100644 --- a/go.sum +++ b/go.sum @@ -1,10 +1,18 @@ +github.com/DataDog/zstd v1.4.5 h1:EndNeuB0l9syBZhut0wns3gV1hL8zX8LIu6ZiVHWLIQ= +github.com/DataDog/zstd v1.4.5/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= github.com/VictoriaMetrics/fastcache v1.12.1 h1:i0mICQuojGDL3KblA7wUNlY5lOK6a4bwt3uRKnkZU40= github.com/VictoriaMetrics/fastcache v1.12.1/go.mod h1:tX04vaqcNoQeGLD+ra5pU5sWkuxnzWhEzLwhP9w653o= github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= +github.com/agiledragon/gomonkey/v2 v2.12.0 h1:ek0dYu9K1rSV+TgkW5LvNNPRWyDZVIxGMCFI6Pz9o38= +github.com/agiledragon/gomonkey/v2 v2.12.0/go.mod h1:ap1AmDzcVOAz1YpeJ3TCzIgstoaWLA6jbbgxfB4w2iY= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bits-and-blooms/bitset v1.12.0 h1:U/q1fAF7xXRhFCrhROzIfffYnu+dlS38vCZtmFVPHmA= github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/btcsuite/btcd v0.20.1-beta h1:Ik4hyJqN8Jfyv3S4AGBOmyouMsYE3EdYODkMbQjwPGw= github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= +github.com/btcsuite/btcd/btcec/v2 v2.2.0 h1:fzn1qaOt32TuLjFlkzYSsBC35Q3KUjT1SwPxiMSCF5k= +github.com/btcsuite/btcd/btcec/v2 v2.2.0/go.mod h1:U7MHm051Al6XmscBQ0BoNydpOTsFAn707034b5nY8zU= github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg= @@ -14,30 +22,50 @@ github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtE github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cockroachdb/errors v1.11.1 h1:xSEW75zKaKCWzR3OfxXUxgrk/NtT4G1MiOv5lWZazG8= +github.com/cockroachdb/errors v1.11.1/go.mod h1:8MUxA3Gi6b25tYlFEBGLf+D8aISL+M4MIpiWMSNRfxw= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b h1:r6VH0faHjZeQy818SGhaone5OnYfxFR/+AzdY3sf5aE= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= +github.com/cockroachdb/pebble v1.1.0 h1:pcFh8CdCIt2kmEpK0OIatq67Ln9uGDYY3d5XnE0LJG4= +github.com/cockroachdb/pebble v1.1.0/go.mod h1:sEHm5NOXxyiAoKWhoFxT8xMgd/f3RA6qUqQ1BXKrh2E= +github.com/cockroachdb/redact v1.1.5 h1:u1PMllDkdFfPWaNGMyLD1+so+aq3uUItthCFqzwPJ30= +github.com/cockroachdb/redact v1.1.5/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= +github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 h1:zuQyyAKVxetITBuuhv3BI9cMrmStnpT18zmgmTxunpo= +github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06/go.mod h1:7nc4anLGjupUW/PeY5qiNYsdNXj7zopG+eqsS7To5IQ= github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/YjhQ= github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI= github.com/consensys/gnark-crypto v0.12.1 h1:lHH39WuuFgVHONRl3J0LRBtuYdQTumFSDtJF7HpyG8M= github.com/consensys/gnark-crypto v0.12.1/go.mod h1:v2Gy7L/4ZRosZ7Ivs+9SfUDr0f5UlG+EM5t7MPHiLuY= github.com/crate-crypto/go-kzg-4844 v1.0.0 h1:TsSgHwrkTKecKJ4kadtHi4b3xHW5dCFUDFnUp1TsawI= github.com/crate-crypto/go-kzg-4844 v1.0.0/go.mod h1:1kMhvPgI0Ky3yIa+9lFySEBUBXkYxeOi8ZF1sYioxhc= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0= +github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 h1:YLtO71vCjJRCBcrPMtQ9nqBsqpA1m5sE92cU+pd5Mcc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs= github.com/ethereum/c-kzg-4844/bindings/go v0.0.0-20230126171313-363c7d7593b4 h1:B2mpK+MNqgPqk2/KNi1LbqwtZDy5F7iy0mynQiBr8VA= github.com/ethereum/c-kzg-4844/bindings/go v0.0.0-20230126171313-363c7d7593b4/go.mod h1:y4GA2JbAUama1S4QwYjC2hefgGLU8Ul0GMtL/ADMF1c= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/getsentry/sentry-go v0.18.0 h1:MtBW5H9QgdcJabtZcuJG80BMOwaBpkRDZkxRkNC1sN0= +github.com/getsentry/sentry-go v0.18.0/go.mod h1:Kgon4Mby+FJ7ZWHFUAZgVaIa8sxHtnRJRLTXZr51aKQ= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= github.com/go-stack/stack v1.8.1 h1:ntEHSVwIt7PNXNpgPmVfMrNhLtgjlmnZha2kOpuRiDw= github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4= +github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= +github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb h1:PBC98N2aIaM3XXiurYmW7fx4GZkL8feAMVq7nEjURHk= github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= -github.com/holiman/bloomfilter/v2 v2.0.3 h1:73e0e/V0tCydx14a0SCYS/EWCxgwLZ18CZcZKVu0fao= -github.com/holiman/bloomfilter/v2 v2.0.3/go.mod h1:zpoh+gs7qcpqrHr3dB55AMiJwo0iURXE7ZOP9L9hSkA= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/holiman/uint256 v1.2.4 h1:jUc4Nk8fm9jZabQuqr2JzednajVmBpC+oiTiXZJEApU= github.com/holiman/uint256 v1.2.4/go.mod h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= @@ -45,6 +73,7 @@ github.com/iden3/go-iden3-crypto v0.0.15 h1:4MJYlrot1l31Fzlo2sF56u7EVFeHHJkxGXXZ github.com/iden3/go-iden3-crypto v0.0.15/go.mod h1:dLpM4vEPJ3nDHzhWFXDjzkn1qHoBeOT/3UEhXsEsP3E= github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA= github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= @@ -52,10 +81,14 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c= github.com/leanovate/gopter v0.2.9/go.mod h1:U2L/78B+KVFIx2VmW6onHJQzXtFb+p5y3y2Sh+Jxxv8= -github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= -github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= +github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI= +github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/mmcloughlin/addchain v0.4.0 h1:SobOdjm2xLj1KkXN5/n0xTIWyZA2+s99UCY1iPfkHRY= github.com/mmcloughlin/addchain v0.4.0/go.mod h1:A86O+tHqZLMNO4w6ZZ4FlVQEadcoqkyU72HC5wJ4RlU= github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFVejUS1/tS/qU= @@ -68,22 +101,30 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/tsdb v0.7.1 h1:YZcsG11NqnK4czYLrWd9mpEuAJIHVQLwdrleYfszMAA= -github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= -github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= -github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/prometheus/client_golang v1.12.0 h1:C+UIj/QWtmqY13Arb8kwMt5j34/0Z2iKamrJ+ryC0Gg= +github.com/prometheus/client_golang v1.12.0/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_model v0.2.1-0.20210607210712-147c58e9608a h1:CmF68hwI0XsOQ5UwlBopMi2Ow4Pbg32akc4KIVCOm+Y= +github.com/prometheus/client_model v0.2.1-0.20210607210712-147c58e9608a/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= +github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= -github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4 h1:gheWXra3HdZsz6q+w4LrXy8ybHOO6/t6Kb/V64bR5wE= -github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ= +github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22 h1:s1/8G2HP1z9jd0FBbUVs7viv/lQZA/8QoQppXYTX1CU= +github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22/go.mod h1:r9FwtxCtybMkTbWYCyBuevT9TW3zHmOTHqD082Uh+Oo= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk= github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI= github.com/shirou/gopsutil v3.21.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/supranational/blst v0.3.11-0.20230124161941-ca03e11a3ff2 h1:wh1wzwAhZBNiZO37uWS/nDaKiIwHz4mDo4pnA+fqTO0= -github.com/supranational/blst v0.3.11-0.20230124161941-ca03e11a3ff2/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= +github.com/supranational/blst v0.3.11 h1:LyU6FolezeWAhvQk0k6O/d49jqgO52MSDDfYgbeoEm4= +github.com/supranational/blst v0.3.11/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY= github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= @@ -93,13 +134,18 @@ github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9f github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g= +golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -107,6 +153,11 @@ golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=