Skip to content

Commit

Permalink
fix: fix PR comments
Browse files Browse the repository at this point in the history
  • Loading branch information
lastminutedev committed Nov 22, 2023
1 parent 533cf35 commit 8787180
Show file tree
Hide file tree
Showing 17 changed files with 97 additions and 115 deletions.
72 changes: 23 additions & 49 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ ark-std = "0.3"
ctor = "0.1"
env_logger = "0.10"
ethers = { version = "2.0.7", features = ["ethers-solc"] }
ethers-core = { git = "https://github.com/LimeChain/scroll-ethers-rs.git", branch = "LimeChain/block-hashes-poc", features = ["scroll"] }
ethers-core = { git = "https://github.com/LimeChain/scroll-ethers-rs.git", branch = "LimeChain/l1-block-hashes-poc", features = ["scroll"] }
ethers-providers = "2.0.7"
ethers-signers = "2.0.7"
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" }
Expand Down Expand Up @@ -60,8 +60,8 @@ tokio = { version = "1.13", features = ["macros", "rt-multi-thread"] }
url = "2.2"

[patch.crates-io]
ethers-core = { git = "https://github.com/LimeChain/scroll-ethers-rs.git", branch = "LimeChain/block-hashes-poc" }
ethers-etherscan = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
ethers-core = { git = "https://github.com/LimeChain/scroll-ethers-rs.git", branch = "LimeChain/l1-block-hashes-poc" }
ethers-etherscan = { git = "https://github.com/LimeChain/scroll-ethers-rs.git", branch = "LimeChain/l1-block-hashes-poc" }
[patch."https://github.com/privacy-scaling-explorations/halo2.git"]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" }
[patch."https://github.com/privacy-scaling-explorations/poseidon.git"]
Expand Down
26 changes: 13 additions & 13 deletions aggregator/src/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ use super::chunk::ChunkHash;
/// - the last (#MAX_AGG_SNARKS-k) chunks are from empty traces
/// A BatchHash consists of 3 hashes.
/// - batch_pi_hash := keccak(chain_id || chunk_0.prev_state_root || chunk_k-1.post_state_root ||
/// chunk_k-1.withdraw_root || batch_data_hash)
/// chunk_k-1.withdraw_root || batch_data_hash || chunk_k-1.last_applied_l1_block || batch_l1_block_range_hash)
/// - batch_data_hash := keccak(chunk_0.data_hash || ... || chunk_k-1.data_hash)
/// - batch_l1_block_range_hash := keccak(chunk_0.l1_block_range_hash || ... ||
/// - batch_l1_block_range_hash := keccak(chunk_0.l1_block_range_hash || ... || chunk_k-1.l1_block_range_hash)
pub struct BatchHash {
pub(crate) chain_id: u64,
// chunks with padding.
Expand Down Expand Up @@ -85,14 +85,14 @@ impl BatchHash {
chunks_with_padding[i + 1].withdraw_root,
chunks_with_padding[i].withdraw_root
);
assert_eq!(
chunks_with_padding[i + 1].l1_block_range_hash,
chunks_with_padding[i].l1_block_range_hash
);
assert_eq!(
chunks_with_padding[i + 1].last_applied_l1_block,
chunks_with_padding[i].last_applied_l1_block
);
assert_eq!(
chunks_with_padding[i + 1].l1_block_range_hash,
chunks_with_padding[i].l1_block_range_hash
);
} else {
assert_eq!(
chunks_with_padding[i].post_state_root,
Expand Down Expand Up @@ -140,8 +140,8 @@ impl BatchHash {
.withdraw_root
.as_bytes(),
batch_data_hash.as_slice(),
batch_l1_block_range_hash.as_slice(),
chunks_with_padding[MAX_AGG_SNARKS - 1].last_applied_l1_block.to_be_bytes().as_ref(),
batch_l1_block_range_hash.as_slice(),
]
.concat();
let public_input_hash = keccak256(preimage);
Expand Down Expand Up @@ -173,9 +173,9 @@ impl BatchHash {
// chunk[0].prev_state_root ||
// chunk[k-1].post_state_root ||
// chunk[k-1].withdraw_root ||
// batch_data_hash
// batch_l1_block_range_hash ||
// chunk[k-1].last_applied_l1_block)
// batch_data_hash ||
// chunk[k-1].last_applied_l1_block ||
// batch_l1_block_range_hash)
let batch_public_input_hash_preimage = [
self.chain_id.to_be_bytes().as_ref(),
self.chunks_with_padding[0].prev_state_root.as_bytes(),
Expand All @@ -186,8 +186,8 @@ impl BatchHash {
.withdraw_root
.as_bytes(),
self.data_hash.as_bytes(),
self.l1_block_range_hash.as_bytes(),
self.chunks_with_padding[MAX_AGG_SNARKS - 1].last_applied_l1_block.to_be_bytes().as_ref(),
self.l1_block_range_hash.as_bytes(),
]
.concat();
res.push(batch_public_input_hash_preimage);
Expand All @@ -197,16 +197,16 @@ impl BatchHash {
// keccak(
// chain id ||
// chunk[i].prevStateRoot || chunk[i].postStateRoot || chunk[i].withdrawRoot ||
// chunk[i].datahash || chunk[i].l1BlockRangeHash || chunk[i].lastAppliedL1Block)
// chunk[i].datahash || chunk[i].lastAppliedL1Block || chunk[i].l1BlockRangeHash)
for chunk in self.chunks_with_padding.iter() {
let chunk_public_input_hash_preimage = [
self.chain_id.to_be_bytes().as_ref(),
chunk.prev_state_root.as_bytes(),
chunk.post_state_root.as_bytes(),
chunk.withdraw_root.as_bytes(),
chunk.data_hash.as_bytes(),
chunk.l1_block_range_hash.as_bytes(),
chunk.last_applied_l1_block.to_be_bytes().as_ref(),
chunk.l1_block_range_hash.as_bytes(),
]
.concat();
res.push(chunk_public_input_hash_preimage)
Expand Down
14 changes: 7 additions & 7 deletions aggregator/src/chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ use zkevm_circuits::witness::Block;
/// - state root after this chunk
/// - the withdraw root after this chunk
/// - the data hash of this chunk
/// - the l1 block range hash of this chunk
/// - the last applied l1 block number of this chunk
/// - the l1 block range hash of this chunk
/// - if the chunk is padded (en empty but valid chunk that is padded for aggregation)
pub struct ChunkHash {
/// Chain identifier
Expand All @@ -28,10 +28,10 @@ pub struct ChunkHash {
pub withdraw_root: H256,
/// the data hash of this chunk
pub data_hash: H256,
/// the l1 block range hash of this chunk
pub l1_block_range_hash: H256,
/// the last applied l1 block number of this chunk
pub last_applied_l1_block: u64,
/// the l1 block range hash of this chunk
pub l1_block_range_hash: H256,
/// if the chunk is a padded chunk
pub is_padding: bool,
}
Expand Down Expand Up @@ -102,8 +102,8 @@ impl ChunkHash {
post_state_root,
withdraw_root: H256(block.withdraw_root.to_be_bytes()),
data_hash,
l1_block_range_hash: block.l1_block_range_hash.unwrap_or(H256(keccak256(vec![]))),
last_applied_l1_block: block.last_applied_l1_block.unwrap_or(0),
l1_block_range_hash: block.l1_block_range_hash.unwrap_or(H256(keccak256(vec![]))),
is_padding,
}
}
Expand All @@ -127,8 +127,8 @@ impl ChunkHash {
post_state_root: post_state_root.into(),
withdraw_root: withdraw_root.into(),
data_hash: data_hash.into(),
l1_block_range_hash: l1_block_range_hash.into(),
last_applied_l1_block: 0,
l1_block_range_hash: l1_block_range_hash.into(),
is_padding: false,
}
}
Expand All @@ -146,8 +146,8 @@ impl ChunkHash {
post_state_root: previous_chunk.post_state_root,
withdraw_root: previous_chunk.withdraw_root,
data_hash: previous_chunk.data_hash,
l1_block_range_hash: previous_chunk.l1_block_range_hash,
last_applied_l1_block: previous_chunk.last_applied_l1_block,
l1_block_range_hash: previous_chunk.l1_block_range_hash,
is_padding: true,
}
}
Expand All @@ -168,8 +168,8 @@ impl ChunkHash {
self.post_state_root.as_bytes(),
self.withdraw_root.as_bytes(),
self.data_hash.as_bytes(),
self.l1_block_range_hash.as_bytes(),
self.last_applied_l1_block.to_be_bytes().as_ref(),
self.l1_block_range_hash.as_bytes(),
]
.concat()
}
Expand Down
Loading

0 comments on commit 8787180

Please sign in to comment.