Skip to content

Commit

Permalink
wip: add update chunk/batch prove/verify
Browse files Browse the repository at this point in the history
  • Loading branch information
lastminutedev committed Nov 23, 2023
1 parent 0649057 commit b484a71
Show file tree
Hide file tree
Showing 7 changed files with 116 additions and 52 deletions.
20 changes: 17 additions & 3 deletions common/types/batch_header.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ type BatchHeader struct {
func NewBatchHeader(version uint8, batchIndex, totalL1MessagePoppedBefore uint64, parentBatchHash common.Hash, chunks []*Chunk) (*BatchHeader, error) {
// buffer for storing chunk hashes in order to compute the batch data hash
var dataBytes []byte
var l1BlockRangeHashBytes []byte

// skipped L1 message bitmap, an array of 256-bit bitmaps
var skippedBitmap []*big.Int
Expand All @@ -56,6 +57,7 @@ func NewBatchHeader(version uint8, batchIndex, totalL1MessagePoppedBefore uint64
return nil, err
}
dataBytes = append(dataBytes, chunkHash.Bytes()...)
l1BlockRangeHashBytes = append(l1BlockRangeHashBytes, chunk.L1BlockRangeHash.Bytes()...)

// build skip bitmap
for blockID, block := range chunk.Blocks {
Expand Down Expand Up @@ -95,6 +97,9 @@ func NewBatchHeader(version uint8, batchIndex, totalL1MessagePoppedBefore uint64
// compute data hash
dataHash := crypto.Keccak256Hash(dataBytes)

// compute l1 block range hash
l1BlockRangeHash := crypto.Keccak256Hash(l1BlockRangeHashBytes)

// compute skipped bitmap
bitmapBytes := make([]byte, len(skippedBitmap)*32)
for ii, num := range skippedBitmap {
Expand All @@ -111,9 +116,8 @@ func NewBatchHeader(version uint8, batchIndex, totalL1MessagePoppedBefore uint64
dataHash: dataHash,
parentBatchHash: parentBatchHash,
skippedL1MessageBitmap: bitmapBytes,
// TODO:
lastAppliedL1Block: 0,
l1BlockRangeHash: common.Hash{},
lastAppliedL1Block: chunks[len(chunks)-1].LastAppliedL1Block,
l1BlockRangeHash: l1BlockRangeHash,
}, nil
}

Expand All @@ -137,6 +141,16 @@ func (b *BatchHeader) SkippedL1MessageBitmap() []byte {
return b.skippedL1MessageBitmap
}

// LastAppliedL1Block returns the last applied L1 block in the BatchHeader.
func (b *BatchHeader) LastAppliedL1Block() uint64 {
return b.lastAppliedL1Block
}

// L1BlockRangeHash returns the batch L1 block range hash in the BatchHeader.
func (b *BatchHeader) L1BlockRangeHash() common.Hash {
return b.l1BlockRangeHash
}

// Encode encodes the BatchHeader into RollupV2 BatchHeaderV0Codec Encoding.
func (b *BatchHeader) Encode() []byte {
batchBytes := make([]byte, 129+len(b.skippedL1MessageBitmap))
Expand Down
26 changes: 14 additions & 12 deletions contracts/src/L1/L1ViewOracle.sol
Original file line number Diff line number Diff line change
Expand Up @@ -6,23 +6,25 @@ import {IL1ViewOracle} from "./IL1ViewOracle.sol";

contract L1ViewOracle is IL1ViewOracle {
/**
* @dev Returns hash of all the blockhashes in the range
* @param from The block number to get the hash of blockhashes after.
* @param to The block number to get the hash of blockhashes up to.
* @return hash The keccak hash of all blockhashes in the provided range
* @dev Returns hash of all the blockhashes in the range.
* @param _from The block number to get the hash of blockhashes after.
* @param _to The block number to get the hash of blockhashes up to.
* @return hash_ The keccak hash of all blockhashes in the provided range.
*/
function blockRangeHash(uint256 from, uint256 to) external view returns (bytes32 hash) {
require(to >= from, "End must be greater than or equal to start");
require(to < block.number, "Block range exceeds current block");
function blockRangeHash(uint256 _from, uint256 _to) external view returns (bytes32 hash_) {
require(_from > 0, "Incorrect from/to range");
require(_to >= _from, "Incorrect from/to range");
require(_to < block.number, "Incorrect from/to range");

hash = 0;
bytes32[] memory blockHashes = new bytes32[](_to - _from + 1);
uint256 cnt = 0;

for (uint256 i = from; i <= to; i++) {
for (uint256 i = _from; i <= _to; i++) {
bytes32 blockHash = blockhash(i);

require(blockHash != 0, "Blockhash not available");

hash = keccak256(abi.encodePacked(hash, blockHash));
blockHashes[cnt++] = blockHash;
}

hash_ = keccak256(abi.encodePacked(blockHashes));
}
}
54 changes: 46 additions & 8 deletions contracts/src/L1/rollup/ScrollChain.sol
Original file line number Diff line number Diff line change
Expand Up @@ -202,17 +202,26 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
mstore(0x40, add(dataPtr, mul(_chunksLength, 32)))
}

uint256 _lastAppliedL1Block;
uint256 _totalNumL1MessagesInChunk;
uint256 _lastAppliedL1BlockInChunk;
bytes32 _l1BlockRangeHashInChunk;
// compute the data hash for each chunk
uint256 _totalL1MessagesPoppedInBatch;
for (uint256 i = 0; i < _chunksLength; i++) {
uint256 _totalNumL1MessagesInChunk = _commitChunk(
(_totalNumL1MessagesInChunk, _lastAppliedL1BlockInChunk, _l1BlockRangeHashInChunk) = _commitChunk(
dataPtr,
_chunks[i],
_totalL1MessagesPoppedInBatch,
_totalL1MessagesPoppedOverall,
_skippedL1MessageBitmap
);

// if it is the last chunk, update the last applied L1 block
if (i == _chunksLength - 1) {
_lastAppliedL1Block = _lastAppliedL1BlockInChunk;
}

unchecked {
_totalL1MessagesPoppedInBatch += _totalNumL1MessagesInChunk;
_totalL1MessagesPoppedOverall += _totalNumL1MessagesInChunk;
Expand Down Expand Up @@ -245,10 +254,13 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
BatchHeaderV0Codec.storeTotalL1MessagePopped(batchPtr, _totalL1MessagesPoppedOverall);
BatchHeaderV0Codec.storeDataHash(batchPtr, _dataHash);
BatchHeaderV0Codec.storeParentBatchHash(batchPtr, _parentBatchHash);
BatchHeaderV0Codec.storeSkippedBitmap(batchPtr, _skippedL1MessageBitmap);
uint256 batchOffset = BatchHeaderV0Codec.storeSkippedBitmap(batchPtr, _skippedL1MessageBitmap);
BatchHeaderV0Codec.storeLastAppliedL1Block(batchOffset, _lastAppliedL1Block);
// TODO: store l1BlockRangeHash
// BatchHeaderV0Codec.storeL1BlockRangeHash(batchOffset, _l1BlockRangeHashInBatch);

// compute batch hash
bytes32 _batchHash = BatchHeaderV0Codec.computeBatchHash(batchPtr, 89 + _skippedL1MessageBitmap.length);
bytes32 _batchHash = BatchHeaderV0Codec.computeBatchHash(batchPtr, 129 + _skippedL1MessageBitmap.length);

committedBatches[_batchIndex] = _batchHash;
emit CommitBatch(_batchIndex, _batchHash);
Expand Down Expand Up @@ -310,6 +322,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
// avoid duplicated verification
require(finalizedStateRoots[_batchIndex] == bytes32(0), "batch already verified");

// TODO: add lastAppliedL1Block and l1BlockRangeHash
// compute public input hash
bytes32 _publicInputHash = keccak256(
abi.encodePacked(layer2ChainId, _prevStateRoot, _postStateRoot, _withdrawRoot, _dataHash)
Expand Down Expand Up @@ -454,7 +467,15 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
uint256 _totalL1MessagesPoppedInBatch,
uint256 _totalL1MessagesPoppedOverall,
bytes calldata _skippedL1MessageBitmap
) internal view returns (uint256 _totalNumL1MessagesInChunk) {
)
internal
view
returns (
uint256 _totalNumL1MessagesInChunk,
uint256 _lastAppliedL1BlockInChunk,
bytes32 _l1BlockRangeHashInChunk
)
{
uint256 chunkPtr;
uint256 startDataPtr;
uint256 dataPtr;
Expand All @@ -481,7 +502,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
}
}
assembly {
mstore(0x40, add(dataPtr, mul(_totalTransactionsInChunk, 0x20))) // reserve memory for tx hashes
mstore(0x40, add(add(dataPtr, mul(_totalTransactionsInChunk, 0x20)), 0x28)) // reserve memory for tx hashes and l1 block hashes data
}
}

Expand All @@ -492,6 +513,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
blockPtr := add(chunkPtr, 1) // reset block ptr
}

uint256 _lastAppliedL1Block;
// concatenate tx hashes
uint256 l2TxPtr = ChunkCodec.l2TxPtr(chunkPtr, _numBlocks);
while (_numBlocks > 0) {
Expand All @@ -517,6 +539,11 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
}
}

if (_numBlocks == 1) {
// check last block
_lastAppliedL1Block = ChunkCodec.lastAppliedL1BlockInBlock(blockPtr);
}

unchecked {
_totalNumL1MessagesInChunk += _numL1MessagesInBlock;
_totalL1MessagesPoppedInBatch += _numL1MessagesInBlock;
Expand All @@ -527,19 +554,30 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
}
}

uint256 _lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInChunk(l2TxPtr);
bytes32 _l1BlockRangeHashInChunk = ChunkCodec.l1BlockRangeHashInChunk(l2TxPtr);

require(_lastAppliedL1Block == _lastAppliedL1BlockInChunk, "incorrect lastAppliedL1Block in chunk");

// check the actual number of transactions in the chunk
require((dataPtr - txHashStartDataPtr) / 32 <= maxNumTxInChunk, "too many txs in one chunk");

// check chunk has correct length
require(l2TxPtr - chunkPtr == _chunk.length, "incomplete l2 transaction data");
assembly {
mstore(dataPtr, _lastAppliedL1BlockInChunk)
mstore(dataPtr, _l1BlockRangeHashInChunk)
dataPtr := add(dataPtr, 0x28)
}

// check chunk has correct length. 40 is the length of lastAppliedL1Block and l1BlockRangeHash
require(l2TxPtr - chunkPtr + 40 == _chunk.length, "incomplete l2 transaction data");

// compute data hash and store to memory
assembly {
let dataHash := keccak256(startDataPtr, sub(dataPtr, startDataPtr))
mstore(memPtr, dataHash)
}

return _totalNumL1MessagesInChunk;
return (_totalNumL1MessagesInChunk, _lastAppliedL1BlockInChunk, _l1BlockRangeHashInChunk);
}

/// @dev Internal function to load L1 message hashes from the message queue.
Expand Down
12 changes: 6 additions & 6 deletions contracts/src/libraries/codec/BatchHeaderV0Codec.sol
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ library BatchHeaderV0Codec {
/// @return length The length in bytes of the batch header.
function loadAndValidate(bytes calldata _batchHeader) internal pure returns (uint256 batchPtr, uint256 length) {
length = _batchHeader.length;
require(length >= 97, "batch header length too small");
require(length >= 129, "batch header length too small");

// copy batch header to memory.
assembly {
Expand All @@ -37,7 +37,7 @@ library BatchHeaderV0Codec {
uint256 _l1MessagePopped = BatchHeaderV0Codec.l1MessagePopped(batchPtr);

unchecked {
require(length == 97 + ((_l1MessagePopped + 255) / 256) * 32, "wrong bitmap length");
require(length == 129 + ((_l1MessagePopped + 255) / 256) * 32, "wrong bitmap length");
}
}

Expand Down Expand Up @@ -190,12 +190,12 @@ library BatchHeaderV0Codec {
}
}

/// @notice Store the block range hash of batch header.
/// @notice Store the l1 block range hash of batch header.
/// @param batchOffset The start memory offset of the batch header + dynamic offset.
/// @param _blockRangeHash The block range hash.
function storeBlockRangeHash(uint256 batchOffset, bytes32 _blockRangeHash) internal pure {
/// @param _l1BlockRangeHash The l1 block range hash.
function storeL1BlockRangeHash(uint256 batchOffset, bytes32 _l1BlockRangeHash) internal pure {
assembly {
mstore(add(batchOffset, 97), _blockRangeHash)
mstore(add(batchOffset, 97), _l1BlockRangeHash)
}
}

Expand Down
24 changes: 21 additions & 3 deletions contracts/src/libraries/codec/ChunkCodec.sol
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,20 @@ library ChunkCodec {
}

/// @notice Return the number of last applied L1 block.
/// @param blockPtr The start memory offset of the block context in memory.
/// @param l2TxEndPtr The end memory offset of `l2Transactions`.
/// @return _lastAppliedL1Block The number of last applied L1 block.
function lastAppliedL1Block(uint256 blockPtr) internal pure returns (uint256 _lastAppliedL1Block) {
function lastAppliedL1BlockInChunk(uint256 l2TxEndPtr) internal pure returns (uint256 _lastAppliedL1Block) {
assembly {
_lastAppliedL1Block := shr(240, mload(add(blockPtr, 60)))
_lastAppliedL1Block := shr(248, mload(l2TxEndPtr))
}
}

/// @notice Return the number of last applied L1 block.
/// @param l2TxEndPtr The end memory offset of `l2Transactions`.
/// @return _l1BlockRangeHash The hash of the L1 block range.
function l1BlockRangeHashInChunk(uint256 l2TxEndPtr) internal pure returns (bytes32 _l1BlockRangeHash) {
assembly {
_l1BlockRangeHash := shr(224, mload(add(l2TxEndPtr, 8)))
}
}

Expand Down Expand Up @@ -98,6 +107,15 @@ library ChunkCodec {
return dstPtr;
}

/// @notice Return the number of last applied L1 block.
/// @param blockPtr The start memory offset of the block context in memory.
/// @return _lastAppliedL1Block The number of last applied L1 block.
function lastAppliedL1BlockInBlock(uint256 blockPtr) internal pure returns (uint256 _lastAppliedL1Block) {
assembly {
_lastAppliedL1Block := shr(240, mload(add(blockPtr, 60)))
}
}

/// @notice Return the number of transactions in current block.
/// @param blockPtr The start memory offset of the block context in memory.
/// @return _numTransactions The number of transactions in current block.
Expand Down
22 changes: 11 additions & 11 deletions contracts/src/test/L1ViewOracle.t.sol
Original file line number Diff line number Diff line change
Expand Up @@ -21,47 +21,47 @@ contract L1ViewOracleTest is DSTestPlus {
uint256 from = block.number - 260;
uint256 to = from + 5;

bytes32 hash = oracle.blockRangeHash(from, to);
oracle.blockRangeHash(from, to);
}

function testTooNewBlocks() external {
hevm.expectRevert("Block range exceeds current block");
hevm.expectRevert("Incorrect from/to range");

hevm.roll(10);

uint256 from = block.number - 5;
uint256 to = block.number + 5;

bytes32 hash = oracle.blockRangeHash(from, to);
oracle.blockRangeHash(from, to);
}

function testInvalidRange() external {
hevm.expectRevert("End must be greater than or equal to start");
hevm.expectRevert("Incorrect from/to range");

uint256 from = 200;
uint256 to = 100;

bytes32 hash = oracle.blockRangeHash(from, to);
oracle.blockRangeHash(from, to);
}

function testCorrectness() external {
hevm.roll(150);

uint256 from = 15;
uint256 to = 48;
bytes32[] memory blockHashes = new bytes32[](to - from + 1);
uint256 cnt = 0;

bytes32 expectedHash = 0;
bytes32 blockRangeHash = oracle.blockRangeHash(from, to);

for (uint256 i = from; i <= to; i++) {
bytes32 blockHash = blockhash(i);

require(blockHash != 0, "Blockhash not available");

expectedHash = keccak256(abi.encodePacked(expectedHash, blockHash));
blockHashes[cnt++] = blockHash;
}

bytes32 gotHash = oracle.blockRangeHash(from, to);
bytes32 expected = keccak256(abi.encodePacked(blockHashes));

assertEq(expectedHash, gotHash);
assertEq(blockRangeHash, expected);
}
}
10 changes: 1 addition & 9 deletions rollup/internal/orm/batch.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import (

"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"golang.org/x/crypto/sha3"
"gorm.io/gorm"
)

Expand Down Expand Up @@ -273,13 +272,6 @@ func (o *Batch) InsertBatch(ctx context.Context, chunks []*types.Chunk, batchMet
return nil, err
}

hasher := sha3.NewLegacyKeccak256()
var l1BlockRangeHash common.Hash
for _, chunk := range chunks {
hasher.Write(chunk.L1BlockRangeHash.Bytes())
}
copy(l1BlockRangeHash[:], hasher.Sum(nil))

numChunks := len(chunks)
lastChunkBlockNum := len(chunks[numChunks-1].Blocks)

Expand All @@ -301,7 +293,7 @@ func (o *Batch) InsertBatch(ctx context.Context, chunks []*types.Chunk, batchMet
TotalL1CommitGas: batchMeta.TotalL1CommitGas,
TotalL1CommitCalldataSize: batchMeta.TotalL1CommitCalldataSize,
LastAppliedL1Block: chunks[numChunks-1].LastAppliedL1Block,
L1BlockRangeHash: l1BlockRangeHash,
L1BlockRangeHash: batchHeader.L1BlockRangeHash(),
}

db := o.db
Expand Down

0 comments on commit b484a71

Please sign in to comment.