From 896c40a113dfedcddcf60dc400e68d6be476043d Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Mon, 25 Nov 2024 15:03:53 +1100 Subject: [PATCH 01/36] Add decode impl --- consensus/types/src/lib.rs | 1 + consensus/types/src/transactions_opaque.rs | 116 +++++++++++++++++++++ 2 files changed, 117 insertions(+) create mode 100644 consensus/types/src/transactions_opaque.rs diff --git a/consensus/types/src/lib.rs b/consensus/types/src/lib.rs index eff52378342..08baa1dc33d 100644 --- a/consensus/types/src/lib.rs +++ b/consensus/types/src/lib.rs @@ -71,6 +71,7 @@ pub mod signed_voluntary_exit; pub mod signing_data; pub mod sync_committee_subscription; pub mod sync_duty; +pub mod transactions_opaque; pub mod validator; pub mod validator_subscription; pub mod voluntary_exit; diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs new file mode 100644 index 00000000000..105edc1ce14 --- /dev/null +++ b/consensus/types/src/transactions_opaque.rs @@ -0,0 +1,116 @@ +use crate::EthSpec; +use ssz::{read_offset, Decode, DecodeError, BYTES_PER_LENGTH_OFFSET}; +use std::marker::PhantomData; + +#[derive(Default)] +struct TransactionsOpaque { + offsets: Vec, + bytes: Vec, + _phantom: PhantomData, +} + +impl Decode for TransactionsOpaque { + fn is_ssz_fixed_len() -> bool { + false + } + + fn ssz_fixed_len() -> usize { + panic!("TransactionsOpaque is not fixed length"); + } + + fn from_ssz_bytes(bytes: &[u8]) -> Result { + if bytes.is_empty() { + return Ok(Self::default()); + } + + let (offset_bytes, value_bytes) = { + let first_offset = read_offset(bytes)?; + sanitize_offset(first_offset, None, bytes.len(), Some(first_offset))?; + + if first_offset % BYTES_PER_LENGTH_OFFSET != 0 || first_offset < BYTES_PER_LENGTH_OFFSET + { + return Err(DecodeError::InvalidListFixedBytesLen(first_offset)); + } + + bytes + .split_at_checked(first_offset) + .ok_or(DecodeError::OffsetOutOfBounds(first_offset))? + }; + + // Disallow lists that have too many transactions. + let num_items = offset_bytes.len() / BYTES_PER_LENGTH_OFFSET; + let max_tx_count = ::max_transactions_per_payload(); + if num_items > max_tx_count { + return Err(DecodeError::BytesInvalid(format!( + "List of {} txs exceeds maximum of {:?}", + num_items, max_tx_count + ))); + } + + let max_tx_bytes = ::max_bytes_per_transaction(); + let mut offsets = Vec::with_capacity(num_items); + let mut offset_iter = offset_bytes.chunks(BYTES_PER_LENGTH_OFFSET).peekable(); + while let Some(offset) = offset_iter.next() { + let offset = read_offset(offset)?; + + // Make the offset assume that the values start at index 0, rather + // than following the offset bytes. + let offset = offset + .checked_sub(offset_bytes.len()) + .ok_or(DecodeError::OffsetIntoFixedPortion(offset))?; + + let next_offset = offset_iter + .peek() + .copied() + .map(read_offset) + .unwrap_or(Ok(value_bytes.len()))?; + + // Disallow any offset that is lower than the previous. + let tx_len = next_offset + .checked_sub(offset) + .ok_or(DecodeError::OffsetsAreDecreasing(offset))?; + + // Disallow transactions that are too large. + if tx_len > max_tx_bytes { + return Err(DecodeError::BytesInvalid(format!( + "length of {tx_len} exceeds maximum tx length of {max_tx_bytes}", + ))); + } + + // Disallow an offset that points outside of the value bytes. + if offset > value_bytes.len() { + return Err(DecodeError::OffsetOutOfBounds(offset)); + } + + offsets.push(offset); + } + + Ok(Self { + offsets, + bytes: value_bytes.to_vec(), + _phantom: PhantomData, + }) + } +} + +/// TODO: export from ssz crate. +pub fn sanitize_offset( + offset: usize, + previous_offset: Option, + num_bytes: usize, + num_fixed_bytes: Option, +) -> Result { + if num_fixed_bytes.map_or(false, |fixed_bytes| offset < fixed_bytes) { + Err(DecodeError::OffsetIntoFixedPortion(offset)) + } else if previous_offset.is_none() + && num_fixed_bytes.map_or(false, |fixed_bytes| offset != fixed_bytes) + { + Err(DecodeError::OffsetSkipsVariableBytes(offset)) + } else if offset > num_bytes { + Err(DecodeError::OffsetOutOfBounds(offset)) + } else if previous_offset.map_or(false, |prev| prev > offset) { + Err(DecodeError::OffsetsAreDecreasing(offset)) + } else { + Ok(offset) + } +} From c124eac286c6b04376fa17b8ebd7076ed746fad8 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Mon, 25 Nov 2024 15:23:50 +1100 Subject: [PATCH 02/36] Add encode impl --- consensus/types/src/lib.rs | 1 + consensus/types/src/transactions_opaque.rs | 55 ++++++++++++++++++++-- 2 files changed, 53 insertions(+), 3 deletions(-) diff --git a/consensus/types/src/lib.rs b/consensus/types/src/lib.rs index 08baa1dc33d..85928c3579d 100644 --- a/consensus/types/src/lib.rs +++ b/consensus/types/src/lib.rs @@ -248,6 +248,7 @@ pub use crate::sync_committee_subscription::SyncCommitteeSubscription; pub use crate::sync_duty::SyncDuty; pub use crate::sync_selection_proof::SyncSelectionProof; pub use crate::sync_subnet_id::SyncSubnetId; +pub use crate::transactions_opaque::TransactionsOpaque; pub use crate::validator::Validator; pub use crate::validator_registration_data::*; pub use crate::validator_subscription::ValidatorSubscription; diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index 105edc1ce14..acefb027573 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -1,14 +1,47 @@ use crate::EthSpec; -use ssz::{read_offset, Decode, DecodeError, BYTES_PER_LENGTH_OFFSET}; +use ssz::{encode_length, read_offset, Decode, DecodeError, Encode, BYTES_PER_LENGTH_OFFSET}; use std::marker::PhantomData; -#[derive(Default)] -struct TransactionsOpaque { +#[derive(Default, Debug, Clone)] +pub struct TransactionsOpaque { offsets: Vec, bytes: Vec, _phantom: PhantomData, } +impl TransactionsOpaque { + pub fn iter<'a>(&'a self) -> TransactionsOpaqueIter<'a> { + TransactionsOpaqueIter { + offsets: &self.offsets, + bytes: &self.bytes, + } + } + + fn len_offset_bytes(&self) -> usize { + self.offsets.len().saturating_mul(BYTES_PER_LENGTH_OFFSET) + } +} + +impl Encode for TransactionsOpaque { + fn is_ssz_fixed_len() -> bool { + false + } + + fn ssz_append(&self, buf: &mut Vec) { + let len_offset_bytes = self.len_offset_bytes(); + buf.reserve(self.ssz_bytes_len()); + for offset in &self.offsets { + let offset = offset.saturating_add(len_offset_bytes); + buf.extend_from_slice(&encode_length(offset)); + } + buf.extend_from_slice(&self.bytes); + } + + fn ssz_bytes_len(&self) -> usize { + self.len_offset_bytes().saturating_add(self.bytes.len()) + } +} + impl Decode for TransactionsOpaque { fn is_ssz_fixed_len() -> bool { false @@ -93,6 +126,22 @@ impl Decode for TransactionsOpaque { } } +pub struct TransactionsOpaqueIter<'a> { + offsets: &'a [usize], + bytes: &'a [u8], +} + +impl<'a> Iterator for TransactionsOpaqueIter<'a> { + type Item = &'a [u8]; + + fn next(&mut self) -> Option { + let (offset, offsets) = self.offsets.split_first()?; + let next_offset = offsets.first().copied().unwrap_or(self.bytes.len()); + self.offsets = offsets; + self.bytes.get(*offset..next_offset) + } +} + /// TODO: export from ssz crate. pub fn sanitize_offset( offset: usize, From 603b4b200dc33b52a2993eed9a36b5b75295673d Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Mon, 25 Nov 2024 15:32:53 +1100 Subject: [PATCH 03/36] Add todo impls --- consensus/types/src/transactions_opaque.rs | 51 ++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index acefb027573..7b05d06ab08 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -1,6 +1,11 @@ +use crate::test_utils::TestRandom; use crate::EthSpec; +use arbitrary::Arbitrary; +use rand::RngCore; +use serde::{de, ser::Serializer, Deserialize, Deserializer, Serialize}; use ssz::{encode_length, read_offset, Decode, DecodeError, Encode, BYTES_PER_LENGTH_OFFSET}; use std::marker::PhantomData; +use tree_hash::TreeHash; #[derive(Default, Debug, Clone)] pub struct TransactionsOpaque { @@ -142,6 +147,52 @@ impl<'a> Iterator for TransactionsOpaqueIter<'a> { } } +/// Serialization for http requests. +impl Serialize for TransactionsOpaque { + fn serialize(&self, serializer: S) -> Result { + todo!("impl serde serialize") + } +} + +impl<'de, E> Deserialize<'de> for TransactionsOpaque { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + todo!("impl serde deserialize") + } +} + +impl TreeHash for TransactionsOpaque { + fn tree_hash_type() -> tree_hash::TreeHashType { + todo!("impl tree hash") + } + + fn tree_hash_packed_encoding(&self) -> tree_hash::PackedEncoding { + todo!("impl tree hash") + } + + fn tree_hash_packing_factor() -> usize { + todo!("impl tree hash") + } + + fn tree_hash_root(&self) -> tree_hash::Hash256 { + todo!("impl tree hash") + } +} + +impl TestRandom for TransactionsOpaque { + fn random_for_test(rng: &mut impl RngCore) -> Self { + todo!("impl test random") + } +} + +impl Arbitrary<'_> for TransactionsOpaque { + fn arbitrary(_u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result { + todo!("impl arbitrary") + } +} + /// TODO: export from ssz crate. pub fn sanitize_offset( offset: usize, From 6a81218ea22375461f0d252fdb9621698890b2d3 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Mon, 25 Nov 2024 16:12:04 +1100 Subject: [PATCH 04/36] Fix compile errors --- beacon_node/beacon_chain/src/beacon_chain.rs | 2 +- beacon_node/beacon_chain/src/test_utils.rs | 4 +- beacon_node/execution_layer/src/block_hash.rs | 2 +- .../src/engine_api/json_structures.rs | 2 - .../test_utils/execution_block_generator.rs | 10 ++- .../execution_layer/src/versioned_hashes.rs | 8 +- consensus/types/src/execution_payload.rs | 6 +- consensus/types/src/transactions_opaque.rs | 75 ++++++++++++++++--- 8 files changed, 79 insertions(+), 30 deletions(-) diff --git a/beacon_node/beacon_chain/src/beacon_chain.rs b/beacon_node/beacon_chain/src/beacon_chain.rs index a78ae266e5a..2592864194d 100644 --- a/beacon_node/beacon_chain/src/beacon_chain.rs +++ b/beacon_node/beacon_chain/src/beacon_chain.rs @@ -1215,7 +1215,7 @@ impl BeaconChain { debug!( self.log, "Reconstructed txn"; - "bytes" => format!("0x{}", hex::encode(&**txn)), + "bytes" => format!("0x{}", hex::encode(txn)), ); } diff --git a/beacon_node/beacon_chain/src/test_utils.rs b/beacon_node/beacon_chain/src/test_utils.rs index 093ee0c44b4..83f5fd52f10 100644 --- a/beacon_node/beacon_chain/src/test_utils.rs +++ b/beacon_node/beacon_chain/src/test_utils.rs @@ -2838,7 +2838,7 @@ pub fn generate_rand_block_and_blobs( execution_layer::test_utils::generate_blobs::(num_blobs).unwrap(); payload.execution_payload.transactions = <_>::default(); - for tx in Vec::from(transactions) { + for tx in &transactions { payload.execution_payload.transactions.push(tx).unwrap(); } message.body.blob_kzg_commitments = bundle.commitments.clone(); @@ -2857,7 +2857,7 @@ pub fn generate_rand_block_and_blobs( let (bundle, transactions) = execution_layer::test_utils::generate_blobs::(num_blobs).unwrap(); payload.execution_payload.transactions = <_>::default(); - for tx in Vec::from(transactions) { + for tx in &transactions { payload.execution_payload.transactions.push(tx).unwrap(); } message.body.blob_kzg_commitments = bundle.commitments.clone(); diff --git a/beacon_node/execution_layer/src/block_hash.rs b/beacon_node/execution_layer/src/block_hash.rs index cdc172cff47..b51f35ceb99 100644 --- a/beacon_node/execution_layer/src/block_hash.rs +++ b/beacon_node/execution_layer/src/block_hash.rs @@ -22,7 +22,7 @@ pub fn calculate_execution_block_hash( // We're currently using a deprecated Parity library for this. We should move to a // better alternative when one appears, possibly following Reth. let rlp_transactions_root = ordered_trie_root::( - payload.transactions().iter().map(|txn_bytes| &**txn_bytes), + payload.transactions().iter().map(|txn_bytes| txn_bytes), ); // Calculate withdrawals root (post-Capella). diff --git a/beacon_node/execution_layer/src/engine_api/json_structures.rs b/beacon_node/execution_layer/src/engine_api/json_structures.rs index efd68f1023d..22d65ad63b7 100644 --- a/beacon_node/execution_layer/src/engine_api/json_structures.rs +++ b/beacon_node/execution_layer/src/engine_api/json_structures.rs @@ -96,7 +96,6 @@ pub struct JsonExecutionPayload { pub base_fee_per_gas: Uint256, pub block_hash: ExecutionBlockHash, - #[serde(with = "ssz_types::serde_utils::list_of_hex_var_list")] pub transactions: Transactions, #[superstruct(only(V2, V3, V4))] pub withdrawals: VariableList, @@ -792,7 +791,6 @@ impl From for JsonForkchoiceUpdatedV1Response { #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(bound = "E: EthSpec")] pub struct JsonExecutionPayloadBodyV1 { - #[serde(with = "ssz_types::serde_utils::list_of_hex_var_list")] pub transactions: Transactions, pub withdrawals: Option>, } diff --git a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs index 4deb91e0567..bfe746cbea5 100644 --- a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs +++ b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs @@ -662,7 +662,7 @@ impl ExecutionBlockGenerator { let mut rng = self.rng.lock(); let num_blobs = rng.gen::() % (E::max_blobs_per_block() + 1); let (bundle, transactions) = generate_blobs(num_blobs)?; - for tx in Vec::from(transactions) { + for tx in &transactions { execution_payload .transactions_mut() .push(tx) @@ -707,13 +707,15 @@ pub fn generate_blobs( let (kzg_commitment, kzg_proof, blob) = load_test_blobs_bundle::()?; let mut bundle = BlobsBundle::::default(); - let mut transactions = vec![]; + let mut transactions = Transactions::default(); for blob_index in 0..n_blobs { let tx = static_valid_tx::() .map_err(|e| format!("error creating valid tx SSZ bytes: {:?}", e))?; - transactions.push(tx); + transactions + .push(&tx) + .map_err(|e| format!("invalid tx: {e:?}"))?; bundle .blobs .push(blob.clone()) @@ -728,7 +730,7 @@ pub fn generate_blobs( .map_err(|_| format!("blobs are full, blob index: {:?}", blob_index))?; } - Ok((bundle, transactions.into())) + Ok((bundle, transactions)) } pub fn static_valid_tx() -> Result, String> { diff --git a/beacon_node/execution_layer/src/versioned_hashes.rs b/beacon_node/execution_layer/src/versioned_hashes.rs index 97c3100de99..b032d4f5e2d 100644 --- a/beacon_node/execution_layer/src/versioned_hashes.rs +++ b/beacon_node/execution_layer/src/versioned_hashes.rs @@ -1,6 +1,6 @@ use alloy_consensus::TxEnvelope; use alloy_rlp::Decodable; -use types::{EthSpec, ExecutionPayloadRef, Hash256, Unsigned, VersionedHash}; +use types::{EthSpec, ExecutionPayloadRef, Hash256, VersionedHash}; #[derive(Debug)] pub enum Error { @@ -59,10 +59,8 @@ pub fn extract_versioned_hashes_from_transactions( Ok(versioned_hashes) } -pub fn beacon_tx_to_tx_envelope( - tx: &types::Transaction, -) -> Result { - let tx_bytes = Vec::from(tx.clone()); +pub fn beacon_tx_to_tx_envelope(tx: &[u8]) -> Result { + let tx_bytes = Vec::from(tx); TxEnvelope::decode(&mut tx_bytes.as_slice()) .map_err(|e| Error::DecodingTransaction(e.to_string())) } diff --git a/consensus/types/src/execution_payload.rs b/consensus/types/src/execution_payload.rs index 9f16b676a6a..537e0ede70d 100644 --- a/consensus/types/src/execution_payload.rs +++ b/consensus/types/src/execution_payload.rs @@ -7,10 +7,7 @@ use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; pub type Transaction = VariableList; -pub type Transactions = VariableList< - Transaction<::MaxBytesPerTransaction>, - ::MaxTransactionsPerPayload, ->; +pub type Transactions = TransactionsOpaque; pub type Withdrawals = VariableList::MaxWithdrawalsPerPayload>; @@ -80,7 +77,6 @@ pub struct ExecutionPayload { pub base_fee_per_gas: Uint256, #[superstruct(getter(copy))] pub block_hash: ExecutionBlockHash, - #[serde(with = "ssz_types::serde_utils::list_of_hex_var_list")] pub transactions: Transactions, #[superstruct(only(Capella, Deneb, Electra))] pub withdrawals: Withdrawals, diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index 7b05d06ab08..b1a7bd4aa50 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -1,32 +1,75 @@ use crate::test_utils::TestRandom; use crate::EthSpec; use arbitrary::Arbitrary; +use derivative::Derivative; use rand::RngCore; -use serde::{de, ser::Serializer, Deserialize, Deserializer, Serialize}; +use serde::{ser::Serializer, Deserialize, Deserializer, Serialize}; use ssz::{encode_length, read_offset, Decode, DecodeError, Encode, BYTES_PER_LENGTH_OFFSET}; +use std::iter::IntoIterator; use std::marker::PhantomData; use tree_hash::TreeHash; -#[derive(Default, Debug, Clone)] +#[derive(Debug)] +pub enum Error { + TooManyTransactions, + TransactionTooBig, +} + +#[derive(Debug, Clone, Derivative)] +#[derivative(Default, PartialEq, Hash(bound = "E: EthSpec"))] pub struct TransactionsOpaque { offsets: Vec, bytes: Vec, _phantom: PhantomData, } -impl TransactionsOpaque { - pub fn iter<'a>(&'a self) -> TransactionsOpaqueIter<'a> { - TransactionsOpaqueIter { - offsets: &self.offsets, - bytes: &self.bytes, +impl TransactionsOpaque { + pub fn empty() -> Self { + Self::default() + } + + pub fn push(&mut self, item: &[u8]) -> Result<(), Error> { + let max_tx_count = ::max_transactions_per_payload(); + let max_tx_bytes = ::max_bytes_per_transaction(); + + if item.len() > max_tx_bytes { + Err(Error::TransactionTooBig) + } else if self.offsets.len() >= max_tx_count { + Err(Error::TooManyTransactions) + } else { + self.offsets.push(self.bytes.len()); + self.bytes.extend_from_slice(item); + Ok(()) } } + pub fn iter(&self) -> impl Iterator { + self.into_iter() + } + + pub fn len(&self) -> usize { + self.offsets.len() + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + fn len_offset_bytes(&self) -> usize { self.offsets.len().saturating_mul(BYTES_PER_LENGTH_OFFSET) } } +impl From>> for TransactionsOpaque { + fn from(vecs: Vec>) -> Self { + let mut txs = Self::default(); + for vec in vecs { + txs.push(&vec).unwrap(); + } + txs + } +} + impl Encode for TransactionsOpaque { fn is_ssz_fixed_len() -> bool { false @@ -131,6 +174,18 @@ impl Decode for TransactionsOpaque { } } +impl<'a, E> IntoIterator for &'a TransactionsOpaque { + type Item = &'a [u8]; + type IntoIter = TransactionsOpaqueIter<'a>; + + fn into_iter(self) -> TransactionsOpaqueIter<'a> { + TransactionsOpaqueIter { + offsets: &self.offsets, + bytes: &self.bytes, + } + } +} + pub struct TransactionsOpaqueIter<'a> { offsets: &'a [usize], bytes: &'a [u8], @@ -149,13 +204,13 @@ impl<'a> Iterator for TransactionsOpaqueIter<'a> { /// Serialization for http requests. impl Serialize for TransactionsOpaque { - fn serialize(&self, serializer: S) -> Result { + fn serialize(&self, _serializer: S) -> Result { todo!("impl serde serialize") } } impl<'de, E> Deserialize<'de> for TransactionsOpaque { - fn deserialize(deserializer: D) -> Result + fn deserialize(_deserializer: D) -> Result where D: Deserializer<'de>, { @@ -182,7 +237,7 @@ impl TreeHash for TransactionsOpaque { } impl TestRandom for TransactionsOpaque { - fn random_for_test(rng: &mut impl RngCore) -> Self { + fn random_for_test(_rng: &mut impl RngCore) -> Self { todo!("impl test random") } } From 8cd04c60e1b4288a468ea254e04a29da8934fa69 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Mon, 25 Nov 2024 16:20:52 +1100 Subject: [PATCH 05/36] Fix decode impl --- consensus/types/src/transactions_opaque.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index b1a7bd4aa50..6ff13fc3f1a 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -95,10 +95,6 @@ impl Decode for TransactionsOpaque { false } - fn ssz_fixed_len() -> usize { - panic!("TransactionsOpaque is not fixed length"); - } - fn from_ssz_bytes(bytes: &[u8]) -> Result { if bytes.is_empty() { return Ok(Self::default()); From 89431fd3d9526534afc9ad3a439964dd7cfa64d8 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 26 Nov 2024 06:52:39 +1100 Subject: [PATCH 06/36] Use updated ssz_types repo --- Cargo.lock | 3 +-- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b7ba237ac76..4ec2e1bf005 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8136,8 +8136,7 @@ dependencies = [ [[package]] name = "ssz_types" version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e0719d2b86ac738a55ae71a8429f52aa2741da988f1fd0975b4cc610fd1e08" +source = "git+https://github.com/paulhauner/ssz_types.git?rev=bac5bdc5ed81029bf910b24da59e8696080e3ed8#bac5bdc5ed81029bf910b24da59e8696080e3ed8" dependencies = [ "arbitrary", "derivative", diff --git a/Cargo.toml b/Cargo.toml index 8cf4abb33eb..02b9bde0a1e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -176,7 +176,7 @@ slog-term = "2" sloggers = { version = "2", features = ["json"] } smallvec = { version = "1.11.2", features = ["arbitrary"] } snap = "1" -ssz_types = "0.8" +ssz_types = { rev = "bac5bdc5ed81029bf910b24da59e8696080e3ed8", git = "https://github.com/paulhauner/ssz_types.git" } strum = { version = "0.24", features = ["derive"] } superstruct = "0.8" syn = "1" From 3964cba93750870a530bf6321bebb2286743c98c Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 26 Nov 2024 06:52:51 +1100 Subject: [PATCH 07/36] Avoid sanitize_offset fn --- consensus/types/src/transactions_opaque.rs | 25 ++-------------------- 1 file changed, 2 insertions(+), 23 deletions(-) diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index 6ff13fc3f1a..b4577f1b3a5 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -100,9 +100,10 @@ impl Decode for TransactionsOpaque { return Ok(Self::default()); } + // - `offset_bytes`: first section of bytes with pointers to items. + // - `value_bytes`: the list items pointed to by `offset_bytes`. let (offset_bytes, value_bytes) = { let first_offset = read_offset(bytes)?; - sanitize_offset(first_offset, None, bytes.len(), Some(first_offset))?; if first_offset % BYTES_PER_LENGTH_OFFSET != 0 || first_offset < BYTES_PER_LENGTH_OFFSET { @@ -243,25 +244,3 @@ impl Arbitrary<'_> for TransactionsOpaque { todo!("impl arbitrary") } } - -/// TODO: export from ssz crate. -pub fn sanitize_offset( - offset: usize, - previous_offset: Option, - num_bytes: usize, - num_fixed_bytes: Option, -) -> Result { - if num_fixed_bytes.map_or(false, |fixed_bytes| offset < fixed_bytes) { - Err(DecodeError::OffsetIntoFixedPortion(offset)) - } else if previous_offset.is_none() - && num_fixed_bytes.map_or(false, |fixed_bytes| offset != fixed_bytes) - { - Err(DecodeError::OffsetSkipsVariableBytes(offset)) - } else if offset > num_bytes { - Err(DecodeError::OffsetOutOfBounds(offset)) - } else if previous_offset.map_or(false, |prev| prev > offset) { - Err(DecodeError::OffsetsAreDecreasing(offset)) - } else { - Ok(offset) - } -} From 6eaace549753252318862fafb0b2614d0e3fb738 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 26 Nov 2024 12:06:57 +1100 Subject: [PATCH 08/36] Use ssz-types with bigger smallvec --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4ec2e1bf005..3897cc7d579 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8136,7 +8136,7 @@ dependencies = [ [[package]] name = "ssz_types" version = "0.8.0" -source = "git+https://github.com/paulhauner/ssz_types.git?rev=bac5bdc5ed81029bf910b24da59e8696080e3ed8#bac5bdc5ed81029bf910b24da59e8696080e3ed8" +source = "git+https://github.com/paulhauner/ssz_types.git?rev=26716095d94e02a2cbd72c94a8b3b8549ecf14d1#26716095d94e02a2cbd72c94a8b3b8549ecf14d1" dependencies = [ "arbitrary", "derivative", diff --git a/Cargo.toml b/Cargo.toml index 02b9bde0a1e..8d787d06d3d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -176,7 +176,7 @@ slog-term = "2" sloggers = { version = "2", features = ["json"] } smallvec = { version = "1.11.2", features = ["arbitrary"] } snap = "1" -ssz_types = { rev = "bac5bdc5ed81029bf910b24da59e8696080e3ed8", git = "https://github.com/paulhauner/ssz_types.git" } +ssz_types = { rev = "26716095d94e02a2cbd72c94a8b3b8549ecf14d1", git = "https://github.com/paulhauner/ssz_types.git" } strum = { version = "0.24", features = ["derive"] } superstruct = "0.8" syn = "1" From fe7e3f5c3b177d3995ba007277a00fe18975c297 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Wed, 27 Nov 2024 06:59:18 +1100 Subject: [PATCH 09/36] Impl tree hash --- consensus/types/src/transactions_opaque.rs | 35 ++++++++++++++++++---- 1 file changed, 29 insertions(+), 6 deletions(-) diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index b4577f1b3a5..71aa945abc9 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -7,7 +7,7 @@ use serde::{ser::Serializer, Deserialize, Deserializer, Serialize}; use ssz::{encode_length, read_offset, Decode, DecodeError, Encode, BYTES_PER_LENGTH_OFFSET}; use std::iter::IntoIterator; use std::marker::PhantomData; -use tree_hash::TreeHash; +use tree_hash::{mix_in_length, MerkleHasher, TreeHash}; #[derive(Debug)] pub enum Error { @@ -215,21 +215,44 @@ impl<'de, E> Deserialize<'de> for TransactionsOpaque { } } -impl TreeHash for TransactionsOpaque { +impl TreeHash for TransactionsOpaque { fn tree_hash_type() -> tree_hash::TreeHashType { - todo!("impl tree hash") + tree_hash::TreeHashType::List } fn tree_hash_packed_encoding(&self) -> tree_hash::PackedEncoding { - todo!("impl tree hash") + panic!("transactions should never be packed") } fn tree_hash_packing_factor() -> usize { - todo!("impl tree hash") + panic!("transactions should never be packed") } fn tree_hash_root(&self) -> tree_hash::Hash256 { - todo!("impl tree hash") + let mut hasher = MerkleHasher::with_leaves(::max_transactions_per_payload()); + + for tx in self.iter() { + // Produce a "leaf" hash of the transaction. + let leaf = { + let mut leaf_hasher = + MerkleHasher::with_leaves(::max_bytes_per_transaction()); + leaf_hasher + .write(tx) + .expect("tx too large for hasher write, logic error"); + leaf_hasher + .finish() + .expect("tx too large for hasher finish, logic error") + }; + // Add the leaf hash to the main tree. + hasher + .write(leaf.as_slice()) + .expect("cannot add leaf to transactions hash tree, logic error"); + } + + let root = hasher + .finish() + .expect("cannot finish transactions hash tree, logic error"); + mix_in_length(&root, self.len()) } } From b5b70b1ac962fd32d97ed1e29db075d44ea1b74a Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Wed, 27 Nov 2024 07:07:24 +1100 Subject: [PATCH 10/36] Tidy, add comments --- consensus/types/src/transactions_opaque.rs | 24 ++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index 71aa945abc9..ad0972ab242 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -15,6 +15,13 @@ pub enum Error { TransactionTooBig, } +/// The list of transactions in an execution payload. +/// +/// This data-structure represents the transactions very closely to how they're +/// encoded as SSZ. This makes for fast and low-allocation-count `ssz::Decode`. +/// +/// The impact on iterating/accessing transactions in this data structure is +/// minimal or negligible compared to a `Vec>`. #[derive(Debug, Clone, Derivative)] #[derivative(Default, PartialEq, Hash(bound = "E: EthSpec"))] pub struct TransactionsOpaque { @@ -24,10 +31,18 @@ pub struct TransactionsOpaque { } impl TransactionsOpaque { + /// Creates an empty list. pub fn empty() -> Self { Self::default() } + /// Adds an `item` (i.e. transaction) to the list. + /// + /// ## Errors + /// + /// - If the `item` is longer than `EthSpec::max_bytes_per_transaction()`. + /// - If the operation would make this list longer than + /// `EthSpec::max_transactions_per_payload()`. pub fn push(&mut self, item: &[u8]) -> Result<(), Error> { let max_tx_count = ::max_transactions_per_payload(); let max_tx_bytes = ::max_bytes_per_transaction(); @@ -43,27 +58,32 @@ impl TransactionsOpaque { } } + /// Iterates all transactions in `self`. pub fn iter(&self) -> impl Iterator { self.into_iter() } + /// The number of transactions in `self``. pub fn len(&self) -> usize { self.offsets.len() } + /// True if there are no transactions in `self`. pub fn is_empty(&self) -> bool { self.len() == 0 } + /// The length of the offset/fixed-length section of the SSZ bytes, when + /// serialized. fn len_offset_bytes(&self) -> usize { self.offsets.len().saturating_mul(BYTES_PER_LENGTH_OFFSET) } } impl From>> for TransactionsOpaque { - fn from(vecs: Vec>) -> Self { + fn from(v: Vec>) -> Self { let mut txs = Self::default(); - for vec in vecs { + for vec in v { txs.push(&vec).unwrap(); } txs From 1e211981a1460f1a3fc01e7e5edf1beb0864dc29 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 3 Dec 2024 10:30:43 +1100 Subject: [PATCH 11/36] Implement tests for tree_hash --- consensus/types/src/transactions_opaque.rs | 157 ++++++++++++++++++++- 1 file changed, 150 insertions(+), 7 deletions(-) diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index ad0972ab242..35b6924837e 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -1,5 +1,5 @@ -use crate::test_utils::TestRandom; use crate::EthSpec; +use crate::{test_utils::TestRandom, MainnetEthSpec}; use arbitrary::Arbitrary; use derivative::Derivative; use rand::RngCore; @@ -11,7 +11,9 @@ use tree_hash::{mix_in_length, MerkleHasher, TreeHash}; #[derive(Debug)] pub enum Error { + /// Exceeds `EthSpec::max_transactions_per_payload()` TooManyTransactions, + /// Exceeds `EthSpec::max_bytes_per_transaction()` TransactionTooBig, } @@ -249,19 +251,25 @@ impl TreeHash for TransactionsOpaque { } fn tree_hash_root(&self) -> tree_hash::Hash256 { - let mut hasher = MerkleHasher::with_leaves(::max_transactions_per_payload()); + let max_tx_count = ::max_transactions_per_payload(); + let max_tx_len = ::max_bytes_per_transaction(); + let bytes_per_leaf = 32; + let tx_leaf_count = (max_tx_len + bytes_per_leaf - 1) / bytes_per_leaf; + + let mut hasher = MerkleHasher::with_leaves(max_tx_count); for tx in self.iter() { - // Produce a "leaf" hash of the transaction. + // Produce a "leaf" hash of the transaction. This is the merkle root + // of the transaction. let leaf = { - let mut leaf_hasher = - MerkleHasher::with_leaves(::max_bytes_per_transaction()); + let mut leaf_hasher = MerkleHasher::with_leaves(tx_leaf_count); leaf_hasher .write(tx) .expect("tx too large for hasher write, logic error"); - leaf_hasher + let leaf = leaf_hasher .finish() - .expect("tx too large for hasher finish, logic error") + .expect("tx too large for hasher finish, logic error"); + mix_in_length(&leaf, tx.len()) }; // Add the leaf hash to the main tree. hasher @@ -287,3 +295,138 @@ impl Arbitrary<'_> for TransactionsOpaque { todo!("impl arbitrary") } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::VariableList; + + type E = MainnetEthSpec; + pub type ReferenceTransaction = VariableList; + pub type ReferenceTransactions = VariableList< + ReferenceTransaction<::MaxBytesPerTransaction>, + ::MaxTransactionsPerPayload, + >; + + struct TestVector { + name: &'static str, + vector: Vec>, + } + + struct TestVectors { + vectors: Vec, + } + + impl Default for TestVectors { + fn default() -> Self { + let vectors = vec![ + TestVector { + name: "empty", + vector: vec![], + }, + TestVector { + name: "single_item_single_element", + vector: vec![vec![0]], + }, + TestVector { + name: "two_items_single_element", + vector: vec![vec![0], vec![1]], + }, + TestVector { + name: "three_items_single_element", + vector: vec![vec![0], vec![1], vec![1]], + }, + TestVector { + name: "single_item_multiple_element", + vector: vec![vec![0, 1, 2]], + }, + TestVector { + name: "two_items_multiple_element", + vector: vec![vec![0, 1, 2], vec![3, 4, 5]], + }, + TestVector { + name: "three_items_multiple_element", + vector: vec![vec![0, 1, 2], vec![3, 4], vec![5, 6, 7, 8]], + }, + TestVector { + name: "empty_list_at_start", + vector: vec![vec![], vec![3, 4], vec![5, 6, 7, 8]], + }, + TestVector { + name: "empty_list_at_middle", + vector: vec![vec![0, 1, 2], vec![], vec![5, 6, 7, 8]], + }, + TestVector { + name: "empty_list_at_end", + vector: vec![vec![0, 1, 2], vec![3, 4, 5], vec![]], + }, + TestVector { + name: "two_empty_lists", + vector: vec![vec![], vec![]], + }, + TestVector { + name: "three_empty_lists", + vector: vec![vec![], vec![], vec![]], + }, + ]; + + Self { vectors } + } + } + + impl TestVectors { + fn iter( + &self, + ) -> impl Iterator< + Item = ( + &'static str, + TransactionsOpaque, + ReferenceTransactions, + ), + > + '_ { + self.vectors.iter().map(|vector| { + let name = vector.name; + let transactions = TransactionsOpaque::from(vector.vector.clone()); + + // Build a equivalent object using + // `VariableList>`. We can use this for + // reference testing + let mut reference = ReferenceTransactions::default(); + for tx in &vector.vector { + reference.push(tx.clone().into()).unwrap(); + } + + // Perform basic sanity checking against the reference. + assert_eq!(transactions.len(), reference.len()); + let mut transactions_iter = transactions.iter(); + let mut reference_iter = reference.iter(); + for _ in 0..transactions.len() { + assert_eq!( + transactions_iter.next().expect("not enough transactions"), + reference_iter + .next() + .expect("not enough reference txs") + .as_ref(), + "transaction not equal" + ); + } + assert!(transactions_iter.next().is_none(), "excess transactions"); + assert!(reference_iter.next().is_none(), "excess reference txs"); + drop((transactions_iter, reference_iter)); + + (name, transactions, reference) + }) + } + } + + #[test] + fn tree_hash() { + for (test, transactions, reference) in TestVectors::default().iter() { + assert_eq!( + transactions.tree_hash_root(), + reference.tree_hash_root(), + "{test}: root != reference" + ) + } + } +} From b51ccb145cf1fc947a7dcb3e3d27caa142db68ef Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 3 Dec 2024 10:34:18 +1100 Subject: [PATCH 12/36] Add ssz tests --- consensus/types/src/transactions_opaque.rs | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index 35b6924837e..e77130c81dc 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -419,13 +419,29 @@ mod tests { } } + #[test] + fn ssz_round_trip() { + for (test, transactions, reference) in TestVectors::default().iter() { + assert_eq!( + transactions.as_ssz_bytes(), + reference.as_ssz_bytes(), + "{test} - serialization" + ); + assert_eq!( + transactions, + TransactionsOpaque::from_ssz_bytes(&reference.as_ssz_bytes()).unwrap(), + "{test} - deserialization" + ) + } + } + #[test] fn tree_hash() { for (test, transactions, reference) in TestVectors::default().iter() { assert_eq!( transactions.tree_hash_root(), reference.tree_hash_root(), - "{test}: root != reference" + "{test}" ) } } From c75f203ad84bc256f471c52af6658e30ea9d3242 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 3 Dec 2024 15:23:03 +1100 Subject: [PATCH 13/36] Add malicious ssz tests --- consensus/types/src/transactions_opaque.rs | 119 ++++++++++++++++++--- 1 file changed, 107 insertions(+), 12 deletions(-) diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index e77130c81dc..73e459aca99 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -1,5 +1,5 @@ +use crate::test_utils::TestRandom; use crate::EthSpec; -use crate::{test_utils::TestRandom, MainnetEthSpec}; use arbitrary::Arbitrary; use derivative::Derivative; use rand::RngCore; @@ -159,11 +159,20 @@ impl Decode for TransactionsOpaque { .checked_sub(offset_bytes.len()) .ok_or(DecodeError::OffsetIntoFixedPortion(offset))?; - let next_offset = offset_iter - .peek() - .copied() - .map(read_offset) - .unwrap_or(Ok(value_bytes.len()))?; + // Disallow an offset that points outside of the value bytes. + if offset > value_bytes.len() { + return Err(DecodeError::OffsetOutOfBounds(offset)); + } + + // Read the next offset (if any) to determine the length of this + // transaction. + let next_offset = if let Some(next_offset) = offset_iter.peek() { + read_offset(next_offset)? + .checked_sub(offset_bytes.len()) + .ok_or(DecodeError::OffsetIntoFixedPortion(offset))? + } else { + value_bytes.len() + }; // Disallow any offset that is lower than the previous. let tx_len = next_offset @@ -177,11 +186,6 @@ impl Decode for TransactionsOpaque { ))); } - // Disallow an offset that points outside of the value bytes. - if offset > value_bytes.len() { - return Err(DecodeError::OffsetOutOfBounds(offset)); - } - offsets.push(offset); } @@ -299,7 +303,7 @@ impl Arbitrary<'_> for TransactionsOpaque { #[cfg(test)] mod tests { use super::*; - use crate::VariableList; + use crate::{ssz_tagged_signed_beacon_block::encode, MainnetEthSpec, VariableList}; type E = MainnetEthSpec; pub type ReferenceTransaction = VariableList; @@ -435,6 +439,97 @@ mod tests { } } + fn err_from_bytes(bytes: &[u8]) -> DecodeError { + TransactionsOpaque::::from_ssz_bytes(bytes).unwrap_err() + } + + /// Helper to build invalid SSZ bytes. + #[derive(Default)] + struct InvalidSszBuilder { + ssz: Vec, + } + + impl InvalidSszBuilder { + // Append a 4-byte offset to self. + pub fn append_offset(mut self, index: usize) -> Self { + self.ssz.extend_from_slice(&encode_length(index)); + self + } + + // Append some misc bytes to self. + pub fn append_value(mut self, value: &[u8]) -> Self { + self.ssz.extend_from_slice(value); + self + } + + pub fn ssz(&self) -> &[u8] { + &self.ssz + } + } + + #[test] + fn ssz_malicious() { + // Highest offset that's still a divisor of 4. + let max_offset = u32::MAX as usize - 3; + + assert_eq!( + err_from_bytes(&[0]), + DecodeError::InvalidLengthPrefix { + len: 1, + expected: 4 + } + ); + assert_eq!( + err_from_bytes( + InvalidSszBuilder::default() + // This offset points to itself. Illegal. + .append_offset(0) + .ssz() + ), + DecodeError::InvalidListFixedBytesLen(0) + ); + assert_eq!( + err_from_bytes( + InvalidSszBuilder::default() + .append_offset(8) + // This offset points back to the first offset. Illegal. + .append_offset(0) + .ssz() + ), + DecodeError::OffsetIntoFixedPortion(0) + ); + assert_eq!( + err_from_bytes( + InvalidSszBuilder::default() + // This offset is far bigger than the SSZ buffer. Illegal. + .append_offset(max_offset) + .ssz() + ), + DecodeError::OffsetOutOfBounds(max_offset) + ); + assert!(matches!( + err_from_bytes( + InvalidSszBuilder::default() + .append_offset(8) + // This infers a really huge transaction. Illegal. + .append_offset(max_offset) + .append_value(&[0]) + .ssz() + ), + DecodeError::BytesInvalid(_) + )); + assert_eq!( + err_from_bytes( + InvalidSszBuilder::default() + .append_offset(8) + // This points outside of the given bytes. Illegal. + .append_offset(9) + .ssz() + ), + DecodeError::OffsetOutOfBounds(1) + ); + } + #[test] fn tree_hash() { for (test, transactions, reference) in TestVectors::default().iter() { From 7fabbc00c5d0e1d9b7f78e9a65f555a90cd3d2ca Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Wed, 4 Dec 2024 07:40:59 +1100 Subject: [PATCH 14/36] Add random vectors to tests --- consensus/types/src/transactions_opaque.rs | 67 +++++++++++++++------- 1 file changed, 47 insertions(+), 20 deletions(-) diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index 73e459aca99..33d5503628b 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -9,6 +9,11 @@ use std::iter::IntoIterator; use std::marker::PhantomData; use tree_hash::{mix_in_length, MerkleHasher, TreeHash}; +/// Max number of transactions in a `TestRandom` instance. +const TEST_RANDOM_MAX_TX_COUNT: usize = 128; +/// Max length of a transaction in a `TestRandom` instance. +const TEST_RANDOM_MAX_TX_BYTES: usize = 1_024; + #[derive(Debug)] pub enum Error { /// Exceeds `EthSpec::max_transactions_per_payload()` @@ -288,9 +293,17 @@ impl TreeHash for TransactionsOpaque { } } -impl TestRandom for TransactionsOpaque { - fn random_for_test(_rng: &mut impl RngCore) -> Self { - todo!("impl test random") +impl TestRandom for TransactionsOpaque { + fn random_for_test(rng: &mut impl RngCore) -> Self { + let mut txs = Self::default(); + let num_txs = rng.next_u32() as usize % TEST_RANDOM_MAX_TX_COUNT; + for _ in 0..num_txs { + let tx_len = rng.next_u32() as usize % TEST_RANDOM_MAX_TX_BYTES; + let mut tx = vec![0; tx_len]; + rng.fill_bytes(&mut tx[..]); + txs.push(&tx).unwrap(); + } + txs } } @@ -303,7 +316,10 @@ impl Arbitrary<'_> for TransactionsOpaque { #[cfg(test)] mod tests { use super::*; - use crate::{ssz_tagged_signed_beacon_block::encode, MainnetEthSpec, VariableList}; + use crate::{ + test_utils::{SeedableRng, XorShiftRng}, + MainnetEthSpec, VariableList, + }; type E = MainnetEthSpec; pub type ReferenceTransaction = VariableList; @@ -312,8 +328,10 @@ mod tests { ::MaxTransactionsPerPayload, >; + const NUM_RANDOM_VECTORS: usize = 256; + struct TestVector { - name: &'static str, + name: String, vector: Vec>, } @@ -323,57 +341,66 @@ mod tests { impl Default for TestVectors { fn default() -> Self { - let vectors = vec![ + let mut vectors = vec![ TestVector { - name: "empty", + name: "empty".into(), vector: vec![], }, TestVector { - name: "single_item_single_element", + name: "single_item_single_element".into(), vector: vec![vec![0]], }, TestVector { - name: "two_items_single_element", + name: "two_items_single_element".into(), vector: vec![vec![0], vec![1]], }, TestVector { - name: "three_items_single_element", + name: "three_items_single_element".into(), vector: vec![vec![0], vec![1], vec![1]], }, TestVector { - name: "single_item_multiple_element", + name: "single_item_multiple_element".into(), vector: vec![vec![0, 1, 2]], }, TestVector { - name: "two_items_multiple_element", + name: "two_items_multiple_element".into(), vector: vec![vec![0, 1, 2], vec![3, 4, 5]], }, TestVector { - name: "three_items_multiple_element", + name: "three_items_multiple_element".into(), vector: vec![vec![0, 1, 2], vec![3, 4], vec![5, 6, 7, 8]], }, TestVector { - name: "empty_list_at_start", + name: "empty_list_at_start".into(), vector: vec![vec![], vec![3, 4], vec![5, 6, 7, 8]], }, TestVector { - name: "empty_list_at_middle", + name: "empty_list_at_middle".into(), vector: vec![vec![0, 1, 2], vec![], vec![5, 6, 7, 8]], }, TestVector { - name: "empty_list_at_end", + name: "empty_list_at_end".into(), vector: vec![vec![0, 1, 2], vec![3, 4, 5], vec![]], }, TestVector { - name: "two_empty_lists", + name: "two_empty_lists".into(), vector: vec![vec![], vec![]], }, TestVector { - name: "three_empty_lists", + name: "three_empty_lists".into(), vector: vec![vec![], vec![], vec![]], }, ]; + let mut rng = XorShiftRng::from_seed([42; 16]); + for i in 0..NUM_RANDOM_VECTORS { + let vector = TransactionsOpaque::::random_for_test(&mut rng); + vectors.push(TestVector { + name: format!("random_vector_{i}"), + vector: vector.iter().map(|slice| slice.to_vec()).collect(), + }) + } + Self { vectors } } } @@ -383,13 +410,13 @@ mod tests { &self, ) -> impl Iterator< Item = ( - &'static str, + String, TransactionsOpaque, ReferenceTransactions, ), > + '_ { self.vectors.iter().map(|vector| { - let name = vector.name; + let name = vector.name.clone(); let transactions = TransactionsOpaque::from(vector.vector.clone()); // Build a equivalent object using From 1313255198f67afac6b31648f19d5d91cd6987a2 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Wed, 4 Dec 2024 08:18:58 +1100 Subject: [PATCH 15/36] Custom Checkpoint SSZ impls --- consensus/types/src/checkpoint.rs | 60 +++++++++++++++++++++++++++++-- 1 file changed, 57 insertions(+), 3 deletions(-) diff --git a/consensus/types/src/checkpoint.rs b/consensus/types/src/checkpoint.rs index 044fc57f22a..1d0ff48ff6a 100644 --- a/consensus/types/src/checkpoint.rs +++ b/consensus/types/src/checkpoint.rs @@ -1,7 +1,7 @@ use crate::test_utils::TestRandom; use crate::{Epoch, Hash256}; use serde::{Deserialize, Serialize}; -use ssz_derive::{Decode, Encode}; +use ssz::{Decode, DecodeError, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; @@ -19,8 +19,6 @@ use tree_hash_derive::TreeHash; Hash, Serialize, Deserialize, - Encode, - Decode, TreeHash, TestRandom, )] @@ -29,6 +27,62 @@ pub struct Checkpoint { pub root: Hash256, } +/// Use a custom implementation of SSZ to avoid the overhead of the derive macro. +impl Encode for Checkpoint { + fn is_ssz_fixed_len() -> bool { + true + } + + fn ssz_append(&self, buf: &mut Vec) { + self.epoch.ssz_append(buf); + self.root.ssz_append(buf); + } + + fn ssz_bytes_len(&self) -> usize { + self.epoch.ssz_bytes_len() + self.root.ssz_bytes_len() + } +} + +/// Use a custom implementation of SSZ to avoid the overhead of the derive macro. +impl Decode for Checkpoint { + fn is_ssz_fixed_len() -> bool { + true + } + + fn ssz_fixed_len() -> usize { + ::ssz_fixed_len() + ::ssz_fixed_len() + } + + fn from_ssz_bytes(bytes: &[u8]) -> Result { + let expected = ::ssz_fixed_len(); + + let (epoch, root) = bytes + .split_at_checked(::ssz_fixed_len()) + .ok_or(DecodeError::InvalidByteLength { + len: bytes.len(), + expected, + })?; + + if root.len() != ::ssz_fixed_len() { + return Err(DecodeError::InvalidByteLength { + len: bytes.len(), + expected, + }); + } + + let epoch = { + let mut array = [0; 8]; + array.copy_from_slice(&epoch); + u64::from_le_bytes(array) + }; + + Ok(Self { + epoch: Epoch::new(epoch), + root: Hash256::from_slice(root), + }) + } +} + #[cfg(test)] mod tests { use super::*; From 31ea1d503a344c80c15247b61056e71808beb9ad Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Thu, 5 Dec 2024 06:06:46 +1100 Subject: [PATCH 16/36] Implement serde --- consensus/types/src/transactions_opaque.rs | 156 ++++++++++++++++----- 1 file changed, 121 insertions(+), 35 deletions(-) diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index 33d5503628b..55092779614 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -3,7 +3,11 @@ use crate::EthSpec; use arbitrary::Arbitrary; use derivative::Derivative; use rand::RngCore; -use serde::{ser::Serializer, Deserialize, Deserializer, Serialize}; +use serde::{ + ser::{SerializeSeq, Serializer}, + Deserialize, Deserializer, Serialize, +}; +use serde_utils::hex; use ssz::{encode_length, read_offset, Decode, DecodeError, Encode, BYTES_PER_LENGTH_OFFSET}; use std::iter::IntoIterator; use std::marker::PhantomData; @@ -24,47 +28,25 @@ pub enum Error { /// The list of transactions in an execution payload. /// -/// This data-structure represents the transactions very closely to how they're +/// This data-structure represents the transactions similarly to how they're /// encoded as SSZ. This makes for fast and low-allocation-count `ssz::Decode`. -/// -/// The impact on iterating/accessing transactions in this data structure is -/// minimal or negligible compared to a `Vec>`. #[derive(Debug, Clone, Derivative)] #[derivative(Default, PartialEq, Hash(bound = "E: EthSpec"))] pub struct TransactionsOpaque { + /// Points to the first byte of each transaction in `bytes`. offsets: Vec, + /// All transactions, concatenated together. bytes: Vec, + /// `EthSpec` to capture maximum allowed lengths. _phantom: PhantomData, } -impl TransactionsOpaque { +impl TransactionsOpaque { /// Creates an empty list. pub fn empty() -> Self { Self::default() } - /// Adds an `item` (i.e. transaction) to the list. - /// - /// ## Errors - /// - /// - If the `item` is longer than `EthSpec::max_bytes_per_transaction()`. - /// - If the operation would make this list longer than - /// `EthSpec::max_transactions_per_payload()`. - pub fn push(&mut self, item: &[u8]) -> Result<(), Error> { - let max_tx_count = ::max_transactions_per_payload(); - let max_tx_bytes = ::max_bytes_per_transaction(); - - if item.len() > max_tx_bytes { - Err(Error::TransactionTooBig) - } else if self.offsets.len() >= max_tx_count { - Err(Error::TooManyTransactions) - } else { - self.offsets.push(self.bytes.len()); - self.bytes.extend_from_slice(item); - Ok(()) - } - } - /// Iterates all transactions in `self`. pub fn iter(&self) -> impl Iterator { self.into_iter() @@ -87,6 +69,30 @@ impl TransactionsOpaque { } } +impl TransactionsOpaque { + /// Adds an `item` (i.e. transaction) to the list. + /// + /// ## Errors + /// + /// - If the `item` is longer than `EthSpec::max_bytes_per_transaction()`. + /// - If the operation would make this list longer than + /// `EthSpec::max_transactions_per_payload()`. + pub fn push(&mut self, item: &[u8]) -> Result<(), Error> { + let max_tx_count = ::max_transactions_per_payload(); + let max_tx_bytes = ::max_bytes_per_transaction(); + + if item.len() > max_tx_bytes { + Err(Error::TransactionTooBig) + } else if self.offsets.len() >= max_tx_count { + Err(Error::TooManyTransactions) + } else { + self.offsets.push(self.bytes.len()); + self.bytes.extend_from_slice(item); + Ok(()) + } + } +} + impl From>> for TransactionsOpaque { fn from(v: Vec>) -> Self { let mut txs = Self::default(); @@ -230,19 +236,54 @@ impl<'a> Iterator for TransactionsOpaqueIter<'a> { } } -/// Serialization for http requests. +#[derive(Default)] +pub struct Visitor { + _phantom: PhantomData, +} + +impl<'a, E> serde::de::Visitor<'a> for Visitor +where + E: EthSpec, +{ + type Value = TransactionsOpaque; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(formatter, "a list of 0x-prefixed hex bytes") + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: serde::de::SeqAccess<'a>, + { + let mut txs: TransactionsOpaque = <_>::default(); + + while let Some(hex_str) = seq.next_element::<&str>()? { + let bytes = hex::decode(hex_str).map_err(serde::de::Error::custom)?; + txs.push(&bytes).map_err(|e| { + serde::de::Error::custom(format!("failed to deserialize transaction: {:?}.", e)) + })?; + } + + Ok(txs) + } +} + impl Serialize for TransactionsOpaque { - fn serialize(&self, _serializer: S) -> Result { - todo!("impl serde serialize") + fn serialize(&self, serializer: S) -> Result { + let mut seq = serializer.serialize_seq(Some(self.len()))?; + for bytes in self { + seq.serialize_element(&hex::encode(&bytes))?; + } + seq.end() } } -impl<'de, E> Deserialize<'de> for TransactionsOpaque { - fn deserialize(_deserializer: D) -> Result +impl<'de, E: EthSpec> Deserialize<'de> for TransactionsOpaque { + fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { - todo!("impl serde deserialize") + deserializer.deserialize_seq(Visitor::default()) } } @@ -451,7 +492,7 @@ mod tests { } #[test] - fn ssz_round_trip() { + fn ssz() { for (test, transactions, reference) in TestVectors::default().iter() { assert_eq!( transactions.as_ssz_bytes(), @@ -567,4 +608,49 @@ mod tests { ) } } + + #[derive(Serialize, Deserialize)] + #[serde(transparent)] + struct SerdeWrapper { + #[serde(with = "ssz_types::serde_utils::list_of_hex_var_list")] + reference: ReferenceTransactions, + } + + #[test] + fn json() { + for (test, transactions, reference) in TestVectors::default().iter() { + let reference = SerdeWrapper { reference }; + + assert_eq!( + serde_json::to_string(&transactions).unwrap(), + serde_json::to_string(&reference).unwrap(), + "{test} - to json" + ); + + assert_eq!( + transactions, + serde_json::from_str(&serde_json::to_string(&reference).unwrap()).unwrap(), + "{test} - deserialize" + ); + } + } + + #[test] + fn yaml() { + for (test, transactions, reference) in TestVectors::default().iter() { + let reference = SerdeWrapper { reference }; + + assert_eq!( + serde_yaml::to_string(&transactions).unwrap(), + serde_yaml::to_string(&reference).unwrap(), + "{test} - to json" + ); + + assert_eq!( + transactions, + serde_yaml::from_str(&serde_yaml::to_string(&reference).unwrap()).unwrap(), + "{test} - deserialize" + ); + } + } } From dd7b97e7bd543a7e5a1054a34eae46e4f0a537a7 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Thu, 5 Dec 2024 06:15:49 +1100 Subject: [PATCH 17/36] Impl arbitrary --- consensus/types/src/transactions_opaque.rs | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions_opaque.rs index 55092779614..ff57f8af79a 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions_opaque.rs @@ -348,9 +348,17 @@ impl TestRandom for TransactionsOpaque { } } -impl Arbitrary<'_> for TransactionsOpaque { - fn arbitrary(_u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result { - todo!("impl arbitrary") +impl Arbitrary<'_> for TransactionsOpaque { + fn arbitrary(u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result { + let mut txs = Self::default(); + let num_txs = usize::arbitrary(u).unwrap() % TEST_RANDOM_MAX_TX_COUNT; + for _ in 0..num_txs { + let tx_len = usize::arbitrary(u).unwrap() % TEST_RANDOM_MAX_TX_BYTES; + let mut tx = vec![0; tx_len]; + u.fill_buffer(&mut tx[..]).unwrap(); + txs.push(&tx).unwrap(); + } + Ok(txs) } } From 56ad1ce5d0c036829736cdad267d9fbcbfad6d61 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Thu, 5 Dec 2024 07:53:33 +1100 Subject: [PATCH 18/36] Rename struct, fix tests and lints --- beacon_node/execution_layer/src/block_hash.rs | 4 +- .../execution_layer/src/engine_api/http.rs | 9 +-- .../test_utils/execution_block_generator.rs | 12 ++-- .../execution_layer/src/versioned_hashes.rs | 12 +--- .../lighthouse_network/src/rpc/codec.rs | 17 ++++-- .../lighthouse_network/tests/rpc_tests.rs | 19 +++--- consensus/types/src/checkpoint.rs | 4 +- consensus/types/src/execution_payload.rs | 3 - consensus/types/src/lib.rs | 6 +- ...transactions_opaque.rs => transactions.rs} | 58 +++++++++---------- 10 files changed, 66 insertions(+), 78 deletions(-) rename consensus/types/src/{transactions_opaque.rs => transactions.rs} (93%) diff --git a/beacon_node/execution_layer/src/block_hash.rs b/beacon_node/execution_layer/src/block_hash.rs index b51f35ceb99..51efd407a31 100644 --- a/beacon_node/execution_layer/src/block_hash.rs +++ b/beacon_node/execution_layer/src/block_hash.rs @@ -21,9 +21,7 @@ pub fn calculate_execution_block_hash( // Calculate the transactions root. // We're currently using a deprecated Parity library for this. We should move to a // better alternative when one appears, possibly following Reth. - let rlp_transactions_root = ordered_trie_root::( - payload.transactions().iter().map(|txn_bytes| txn_bytes), - ); + let rlp_transactions_root = ordered_trie_root::(payload.transactions().iter()); // Calculate withdrawals root (post-Capella). let rlp_withdrawals_root = if let Ok(withdrawals) = payload.withdrawals() { diff --git a/beacon_node/execution_layer/src/engine_api/http.rs b/beacon_node/execution_layer/src/engine_api/http.rs index d4734be448d..135ef1e7d21 100644 --- a/beacon_node/execution_layer/src/engine_api/http.rs +++ b/beacon_node/execution_layer/src/engine_api/http.rs @@ -1472,14 +1472,11 @@ mod test { /// Example: if `spec == &[1, 1]`, then two one-byte transactions will be created. fn generate_transactions(spec: &[usize]) -> Transactions { - let mut txs = VariableList::default(); + let mut txs = Transactions::default(); for &num_bytes in spec { - let mut tx = VariableList::default(); - for _ in 0..num_bytes { - tx.push(0).unwrap(); - } - txs.push(tx).unwrap(); + let tx = vec![0; num_bytes]; + txs.push(&tx).unwrap(); } txs diff --git a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs index bfe746cbea5..09171afac0f 100644 --- a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs +++ b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs @@ -12,7 +12,6 @@ use parking_lot::Mutex; use rand::{rngs::StdRng, Rng, SeedableRng}; use serde::{Deserialize, Serialize}; use ssz::Decode; -use ssz_types::VariableList; use std::collections::HashMap; use std::sync::Arc; use tree_hash::TreeHash; @@ -20,8 +19,7 @@ use tree_hash_derive::TreeHash; use types::{ Blob, ChainSpec, EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadElectra, - ExecutionPayloadHeader, FixedBytesExtended, ForkName, Hash256, Transaction, Transactions, - Uint256, + ExecutionPayloadHeader, FixedBytesExtended, ForkName, Hash256, Transactions, Uint256, }; use super::DEFAULT_TERMINAL_BLOCK; @@ -710,8 +708,7 @@ pub fn generate_blobs( let mut transactions = Transactions::default(); for blob_index in 0..n_blobs { - let tx = static_valid_tx::() - .map_err(|e| format!("error creating valid tx SSZ bytes: {:?}", e))?; + let tx = static_valid_tx::(); transactions .push(&tx) @@ -733,7 +730,7 @@ pub fn generate_blobs( Ok((bundle, transactions)) } -pub fn static_valid_tx() -> Result, String> { +pub fn static_valid_tx() -> Vec { // This is a real transaction hex encoded, but we don't care about the contents of the transaction. let transaction: EthersTransaction = serde_json::from_str( r#"{ @@ -754,8 +751,7 @@ pub fn static_valid_tx() -> Result PayloadId { diff --git a/beacon_node/execution_layer/src/versioned_hashes.rs b/beacon_node/execution_layer/src/versioned_hashes.rs index b032d4f5e2d..8ad2e60438e 100644 --- a/beacon_node/execution_layer/src/versioned_hashes.rs +++ b/beacon_node/execution_layer/src/versioned_hashes.rs @@ -76,7 +76,7 @@ mod test { #[test] fn test_decode_static_transaction() { - let valid_tx = static_valid_tx::().expect("should give me known valid transaction"); + let valid_tx = static_valid_tx::(); let tx_envelope = beacon_tx_to_tx_envelope(&valid_tx).expect("should decode tx"); let TxEnvelope::Legacy(signed_tx) = tx_envelope else { panic!("should decode to legacy transaction"); @@ -96,15 +96,9 @@ mod test { #[test] fn test_extract_versioned_hashes() { - use serde::Deserialize; + use types::Transactions; - #[derive(Deserialize)] - #[serde(transparent)] - struct TestTransactions( - #[serde(with = "ssz_types::serde_utils::list_of_hex_var_list")] types::Transactions, - ); - - let TestTransactions(raw_transactions): TestTransactions = serde_json::from_str(r#"[ + let raw_transactions: Transactions = serde_json::from_str(r#"[ "0x03f901388501a1f0ff430f843b9aca00843b9aca0082520894e7249813d8ccf6fa95a2203f46a64166073d58878080c002f8c6a0012e98362c814f1724262c0d211a1463418a5f6382a8d457b37a2698afbe7b5ea00100ef985761395dfa8ed5ce91f3f2180b612401909e4cb8f33b90c8a454d9baa0013d45411623b90d90f916e4025ada74b453dd4ca093c017c838367c9de0f801a001753e2af0b1e70e7ef80541355b2a035cc9b2c177418bb2a4402a9b346cf84da0011789b520a8068094a92aa0b04db8d8ef1c6c9818947c5210821732b8744049a0011c4c4f95597305daa5f62bf5f690e37fa11f5de05a95d05cac4e2119e394db80a0ccd86a742af0e042d08cbb35d910ddc24bbc6538f9e53be6620d4b6e1bb77662a01a8bacbc614940ac2f5c23ffc00a122c9f085046883de65c88ab0edb859acb99", "0x02f9017a8501a1f0ff4382363485012a05f2008512a05f2000830249f094c1b0bc605e2c808aa0867bfc98e51a1fe3e9867f80b901040cc7326300000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000009445a285baa43e00000000000000000000000000c500931f24edb821cef6e28f7adb33b38578c82000000000000000000000000fc7360b3b28cf4204268a8354dbec60720d155d2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000009a054a063f0fe7b9c68de8df91aaa5e96c15ab540000000000000000000000000c8d41b8fcc066cdabaf074d78e5153e8ce018a9c080a008e14475c1173cd9f5740c24c08b793f9e16c36c08fa73769db95050e31e3396a019767dcdda26c4a774ca28c9df15d0c20e43bd07bd33ee0f84d6096cb5a1ebed" ]"#).expect("should get raw transactions"); diff --git a/beacon_node/lighthouse_network/src/rpc/codec.rs b/beacon_node/lighthouse_network/src/rpc/codec.rs index 5d86936d41d..f0a0ef43ba0 100644 --- a/beacon_node/lighthouse_network/src/rpc/codec.rs +++ b/beacon_node/lighthouse_network/src/rpc/codec.rs @@ -933,6 +933,7 @@ mod tests { use super::*; use crate::rpc::protocol::*; use crate::types::{EnrAttestationBitfield, EnrSyncCommitteeBitfield}; + use types::Transactions; use types::{ blob_sidecar::BlobIdentifier, BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockBellatrix, DataColumnIdentifier, EmptyBlock, Epoch, FixedBytesExtended, @@ -987,6 +988,14 @@ mod tests { Arc::new(DataColumnSidecar::empty()) } + fn transactions() -> Transactions { + let mut transactions = Transactions::default(); + for _ in 0..100000 { + transactions.push(&[0; 1024]).unwrap() + } + transactions + } + /// Bellatrix block with length < max_rpc_size. fn bellatrix_block_small( fork_context: &ForkContext, @@ -994,10 +1003,8 @@ mod tests { ) -> SignedBeaconBlock { let mut block: BeaconBlockBellatrix<_, FullPayload> = BeaconBlockBellatrix::empty(&Spec::default_spec()); - let tx = VariableList::from(vec![0; 1024]); - let txs = VariableList::from(std::iter::repeat(tx).take(5000).collect::>()); - block.body.execution_payload.execution_payload.transactions = txs; + block.body.execution_payload.execution_payload.transactions = transactions(); let block = BeaconBlock::Bellatrix(block); assert!(block.ssz_bytes_len() <= max_rpc_size(fork_context, spec.max_chunk_size as usize)); @@ -1013,10 +1020,8 @@ mod tests { ) -> SignedBeaconBlock { let mut block: BeaconBlockBellatrix<_, FullPayload> = BeaconBlockBellatrix::empty(&Spec::default_spec()); - let tx = VariableList::from(vec![0; 1024]); - let txs = VariableList::from(std::iter::repeat(tx).take(100000).collect::>()); - block.body.execution_payload.execution_payload.transactions = txs; + block.body.execution_payload.execution_payload.transactions = transactions(); let block = BeaconBlock::Bellatrix(block); assert!(block.ssz_bytes_len() > max_rpc_size(fork_context, spec.max_chunk_size as usize)); diff --git a/beacon_node/lighthouse_network/tests/rpc_tests.rs b/beacon_node/lighthouse_network/tests/rpc_tests.rs index f721c8477cf..fcd986306e6 100644 --- a/beacon_node/lighthouse_network/tests/rpc_tests.rs +++ b/beacon_node/lighthouse_network/tests/rpc_tests.rs @@ -8,7 +8,6 @@ use lighthouse_network::service::api_types::AppRequestId; use lighthouse_network::{rpc::max_rpc_size, NetworkEvent, ReportSource, Response}; use slog::{debug, warn, Level}; use ssz::Encode; -use ssz_types::VariableList; use std::sync::Arc; use std::time::Duration; use tokio::runtime::Runtime; @@ -16,18 +15,24 @@ use tokio::time::sleep; use types::{ BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockBellatrix, BlobSidecar, ChainSpec, EmptyBlock, Epoch, EthSpec, FixedBytesExtended, ForkContext, ForkName, Hash256, MinimalEthSpec, - Signature, SignedBeaconBlock, Slot, + Signature, SignedBeaconBlock, Slot, Transactions, }; type E = MinimalEthSpec; +fn transactions(n: usize) -> Transactions { + let mut transactions = Transactions::default(); + for _ in 0..n { + transactions.push(&[0; 1024]).unwrap() + } + transactions +} + /// Bellatrix block with length < max_rpc_size. fn bellatrix_block_small(fork_context: &ForkContext, spec: &ChainSpec) -> BeaconBlock { let mut block = BeaconBlockBellatrix::::empty(spec); - let tx = VariableList::from(vec![0; 1024]); - let txs = VariableList::from(std::iter::repeat(tx).take(5000).collect::>()); - block.body.execution_payload.execution_payload.transactions = txs; + block.body.execution_payload.execution_payload.transactions = transactions(5000); let block = BeaconBlock::Bellatrix(block); assert!(block.ssz_bytes_len() <= max_rpc_size(fork_context, spec.max_chunk_size as usize)); @@ -39,10 +44,8 @@ fn bellatrix_block_small(fork_context: &ForkContext, spec: &ChainSpec) -> Beacon /// Hence, we generate a bellatrix block just greater than `MAX_RPC_SIZE` to test rejection on the rpc layer. fn bellatrix_block_large(fork_context: &ForkContext, spec: &ChainSpec) -> BeaconBlock { let mut block = BeaconBlockBellatrix::::empty(spec); - let tx = VariableList::from(vec![0; 1024]); - let txs = VariableList::from(std::iter::repeat(tx).take(100000).collect::>()); - block.body.execution_payload.execution_payload.transactions = txs; + block.body.execution_payload.execution_payload.transactions = transactions(100_000); let block = BeaconBlock::Bellatrix(block); assert!(block.ssz_bytes_len() > max_rpc_size(fork_context, spec.max_chunk_size as usize)); diff --git a/consensus/types/src/checkpoint.rs b/consensus/types/src/checkpoint.rs index 1d0ff48ff6a..74c2775ba06 100644 --- a/consensus/types/src/checkpoint.rs +++ b/consensus/types/src/checkpoint.rs @@ -38,6 +38,7 @@ impl Encode for Checkpoint { self.root.ssz_append(buf); } + #[allow(clippy::arithmetic_side_effects)] fn ssz_bytes_len(&self) -> usize { self.epoch.ssz_bytes_len() + self.root.ssz_bytes_len() } @@ -49,6 +50,7 @@ impl Decode for Checkpoint { true } + #[allow(clippy::arithmetic_side_effects)] fn ssz_fixed_len() -> usize { ::ssz_fixed_len() + ::ssz_fixed_len() } @@ -72,7 +74,7 @@ impl Decode for Checkpoint { let epoch = { let mut array = [0; 8]; - array.copy_from_slice(&epoch); + array.copy_from_slice(epoch); u64::from_le_bytes(array) }; diff --git a/consensus/types/src/execution_payload.rs b/consensus/types/src/execution_payload.rs index 537e0ede70d..4308f171c31 100644 --- a/consensus/types/src/execution_payload.rs +++ b/consensus/types/src/execution_payload.rs @@ -6,9 +6,6 @@ use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; -pub type Transaction = VariableList; -pub type Transactions = TransactionsOpaque; - pub type Withdrawals = VariableList::MaxWithdrawalsPerPayload>; #[superstruct( diff --git a/consensus/types/src/lib.rs b/consensus/types/src/lib.rs index 85928c3579d..0151a78bed7 100644 --- a/consensus/types/src/lib.rs +++ b/consensus/types/src/lib.rs @@ -71,7 +71,7 @@ pub mod signed_voluntary_exit; pub mod signing_data; pub mod sync_committee_subscription; pub mod sync_duty; -pub mod transactions_opaque; +pub mod transactions; pub mod validator; pub mod validator_subscription; pub mod voluntary_exit; @@ -164,7 +164,7 @@ pub use crate::execution_block_hash::ExecutionBlockHash; pub use crate::execution_block_header::{EncodableExecutionBlockHeader, ExecutionBlockHeader}; pub use crate::execution_payload::{ ExecutionPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, ExecutionPayloadDeneb, - ExecutionPayloadElectra, ExecutionPayloadRef, Transaction, Transactions, Withdrawals, + ExecutionPayloadElectra, ExecutionPayloadRef, Withdrawals, }; pub use crate::execution_payload_header::{ ExecutionPayloadHeader, ExecutionPayloadHeaderBellatrix, ExecutionPayloadHeaderCapella, @@ -248,7 +248,7 @@ pub use crate::sync_committee_subscription::SyncCommitteeSubscription; pub use crate::sync_duty::SyncDuty; pub use crate::sync_selection_proof::SyncSelectionProof; pub use crate::sync_subnet_id::SyncSubnetId; -pub use crate::transactions_opaque::TransactionsOpaque; +pub use crate::transactions::Transactions; pub use crate::validator::Validator; pub use crate::validator_registration_data::*; pub use crate::validator_subscription::ValidatorSubscription; diff --git a/consensus/types/src/transactions_opaque.rs b/consensus/types/src/transactions.rs similarity index 93% rename from consensus/types/src/transactions_opaque.rs rename to consensus/types/src/transactions.rs index ff57f8af79a..4e7e712c43e 100644 --- a/consensus/types/src/transactions_opaque.rs +++ b/consensus/types/src/transactions.rs @@ -32,7 +32,7 @@ pub enum Error { /// encoded as SSZ. This makes for fast and low-allocation-count `ssz::Decode`. #[derive(Debug, Clone, Derivative)] #[derivative(Default, PartialEq, Hash(bound = "E: EthSpec"))] -pub struct TransactionsOpaque { +pub struct Transactions { /// Points to the first byte of each transaction in `bytes`. offsets: Vec, /// All transactions, concatenated together. @@ -41,7 +41,7 @@ pub struct TransactionsOpaque { _phantom: PhantomData, } -impl TransactionsOpaque { +impl Transactions { /// Creates an empty list. pub fn empty() -> Self { Self::default() @@ -69,7 +69,7 @@ impl TransactionsOpaque { } } -impl TransactionsOpaque { +impl Transactions { /// Adds an `item` (i.e. transaction) to the list. /// /// ## Errors @@ -93,7 +93,7 @@ impl TransactionsOpaque { } } -impl From>> for TransactionsOpaque { +impl From>> for Transactions { fn from(v: Vec>) -> Self { let mut txs = Self::default(); for vec in v { @@ -103,7 +103,7 @@ impl From>> for TransactionsOpaque { } } -impl Encode for TransactionsOpaque { +impl Encode for Transactions { fn is_ssz_fixed_len() -> bool { false } @@ -123,7 +123,7 @@ impl Encode for TransactionsOpaque { } } -impl Decode for TransactionsOpaque { +impl Decode for Transactions { fn is_ssz_fixed_len() -> bool { false } @@ -208,24 +208,24 @@ impl Decode for TransactionsOpaque { } } -impl<'a, E> IntoIterator for &'a TransactionsOpaque { +impl<'a, E> IntoIterator for &'a Transactions { type Item = &'a [u8]; - type IntoIter = TransactionsOpaqueIter<'a>; + type IntoIter = TransactionsIter<'a>; - fn into_iter(self) -> TransactionsOpaqueIter<'a> { - TransactionsOpaqueIter { + fn into_iter(self) -> TransactionsIter<'a> { + TransactionsIter { offsets: &self.offsets, bytes: &self.bytes, } } } -pub struct TransactionsOpaqueIter<'a> { +pub struct TransactionsIter<'a> { offsets: &'a [usize], bytes: &'a [u8], } -impl<'a> Iterator for TransactionsOpaqueIter<'a> { +impl<'a> Iterator for TransactionsIter<'a> { type Item = &'a [u8]; fn next(&mut self) -> Option { @@ -245,7 +245,7 @@ impl<'a, E> serde::de::Visitor<'a> for Visitor where E: EthSpec, { - type Value = TransactionsOpaque; + type Value = Transactions; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { write!(formatter, "a list of 0x-prefixed hex bytes") @@ -255,7 +255,7 @@ where where A: serde::de::SeqAccess<'a>, { - let mut txs: TransactionsOpaque = <_>::default(); + let mut txs: Transactions = <_>::default(); while let Some(hex_str) = seq.next_element::<&str>()? { let bytes = hex::decode(hex_str).map_err(serde::de::Error::custom)?; @@ -268,17 +268,17 @@ where } } -impl Serialize for TransactionsOpaque { +impl Serialize for Transactions { fn serialize(&self, serializer: S) -> Result { let mut seq = serializer.serialize_seq(Some(self.len()))?; for bytes in self { - seq.serialize_element(&hex::encode(&bytes))?; + seq.serialize_element(&hex::encode(bytes))?; } seq.end() } } -impl<'de, E: EthSpec> Deserialize<'de> for TransactionsOpaque { +impl<'de, E: EthSpec> Deserialize<'de> for Transactions { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, @@ -287,7 +287,7 @@ impl<'de, E: EthSpec> Deserialize<'de> for TransactionsOpaque { } } -impl TreeHash for TransactionsOpaque { +impl TreeHash for Transactions { fn tree_hash_type() -> tree_hash::TreeHashType { tree_hash::TreeHashType::List } @@ -300,6 +300,7 @@ impl TreeHash for TransactionsOpaque { panic!("transactions should never be packed") } + #[allow(clippy::arithmetic_side_effects)] fn tree_hash_root(&self) -> tree_hash::Hash256 { let max_tx_count = ::max_transactions_per_payload(); let max_tx_len = ::max_bytes_per_transaction(); @@ -334,7 +335,7 @@ impl TreeHash for TransactionsOpaque { } } -impl TestRandom for TransactionsOpaque { +impl TestRandom for Transactions { fn random_for_test(rng: &mut impl RngCore) -> Self { let mut txs = Self::default(); let num_txs = rng.next_u32() as usize % TEST_RANDOM_MAX_TX_COUNT; @@ -348,7 +349,7 @@ impl TestRandom for TransactionsOpaque { } } -impl Arbitrary<'_> for TransactionsOpaque { +impl Arbitrary<'_> for Transactions { fn arbitrary(u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result { let mut txs = Self::default(); let num_txs = usize::arbitrary(u).unwrap() % TEST_RANDOM_MAX_TX_COUNT; @@ -443,7 +444,7 @@ mod tests { let mut rng = XorShiftRng::from_seed([42; 16]); for i in 0..NUM_RANDOM_VECTORS { - let vector = TransactionsOpaque::::random_for_test(&mut rng); + let vector = Transactions::::random_for_test(&mut rng); vectors.push(TestVector { name: format!("random_vector_{i}"), vector: vector.iter().map(|slice| slice.to_vec()).collect(), @@ -457,16 +458,11 @@ mod tests { impl TestVectors { fn iter( &self, - ) -> impl Iterator< - Item = ( - String, - TransactionsOpaque, - ReferenceTransactions, - ), - > + '_ { + ) -> impl Iterator, ReferenceTransactions)> + '_ + { self.vectors.iter().map(|vector| { let name = vector.name.clone(); - let transactions = TransactionsOpaque::from(vector.vector.clone()); + let transactions = Transactions::from(vector.vector.clone()); // Build a equivalent object using // `VariableList>`. We can use this for @@ -509,14 +505,14 @@ mod tests { ); assert_eq!( transactions, - TransactionsOpaque::from_ssz_bytes(&reference.as_ssz_bytes()).unwrap(), + Transactions::from_ssz_bytes(&reference.as_ssz_bytes()).unwrap(), "{test} - deserialization" ) } } fn err_from_bytes(bytes: &[u8]) -> DecodeError { - TransactionsOpaque::::from_ssz_bytes(bytes).unwrap_err() + Transactions::::from_ssz_bytes(bytes).unwrap_err() } /// Helper to build invalid SSZ bytes. From e0fd18986d9a1b7ba9b7575ade549933e6fd926a Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Thu, 5 Dec 2024 08:08:55 +1100 Subject: [PATCH 19/36] Fix clippy lint --- consensus/types/src/transactions.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/consensus/types/src/transactions.rs b/consensus/types/src/transactions.rs index 4e7e712c43e..b4e280b1279 100644 --- a/consensus/types/src/transactions.rs +++ b/consensus/types/src/transactions.rs @@ -305,7 +305,7 @@ impl TreeHash for Transactions { let max_tx_count = ::max_transactions_per_payload(); let max_tx_len = ::max_bytes_per_transaction(); let bytes_per_leaf = 32; - let tx_leaf_count = (max_tx_len + bytes_per_leaf - 1) / bytes_per_leaf; + let tx_leaf_count = max_tx_len.div_ceil(bytes_per_leaf); let mut hasher = MerkleHasher::with_leaves(max_tx_count); From 3ca39f2790444aaeaf75b4d0b3902a45babfa16a Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Fri, 6 Dec 2024 06:12:10 +1100 Subject: [PATCH 20/36] Bump some crate versions --- Cargo.lock | 52 ++++++++++++++++++++++++++++++++++++++++++++-------- Cargo.toml | 35 +++++++++++++++++++++++++---------- 2 files changed, 69 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c8d3c1c1a4d..0c897337568 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2328,6 +2328,18 @@ dependencies = [ "zeroize", ] +[[package]] +name = "educe" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7bc049e1bd8cdeb31b68bbd586a9464ecf9f3944af3958a7a9d0f8b9799417" +dependencies = [ + "enum-ordinalize", + "proc-macro2", + "quote", + "syn 2.0.89", +] + [[package]] name = "ef_tests" version = "0.2.0" @@ -2445,6 +2457,26 @@ dependencies = [ "syn 2.0.89", ] +[[package]] +name = "enum-ordinalize" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea0dcfa4e54eeb516fe454635a95753ddd39acda650ce703031c6973e315dd5" +dependencies = [ + "enum-ordinalize-derive", +] + +[[package]] +name = "enum-ordinalize-derive" +version = "4.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d28318a75d4aead5c4db25382e8ef717932d0346600cacae6357eb5941bc5ff" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", +] + [[package]] name = "env_logger" version = "0.8.4" @@ -2808,20 +2840,25 @@ dependencies = [ [[package]] name = "ethereum_ssz" -version = "0.7.1" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e999563461faea0ab9bc0024e5e66adcee35881f3d5062f52f31a4070fe1522" +checksum = "036c84bd29bff35e29bbee3c8fc0e2fb95db12b6f2f3cae82a827fbc97256f3a" dependencies = [ "alloy-primitives", + "arbitrary", + "ethereum_serde_utils", "itertools 0.13.0", + "serde", + "serde_derive", "smallvec", + "typenum", ] [[package]] name = "ethereum_ssz_derive" -version = "0.7.1" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3deae99c8e74829a00ba7a92d49055732b3c1f093f2ccfa3cbc621679b6fa91" +checksum = "9dc8e67e1f770f5aa4c2c2069aaaf9daee7ac21bed357a71b911b37a58966cfb" dependencies = [ "darling 0.20.10", "proc-macro2", @@ -5630,12 +5667,11 @@ dependencies = [ [[package]] name = "milhouse" version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f68e33f98199224d1073f7c1468ea6abfea30736306fb79c7181a881e97ea32f" +source = "git+https://github.com/paulhauner/milhouse?rev=b6bccd3df29c7056bec56987f428df5f7a0f8b04#b6bccd3df29c7056bec56987f428df5f7a0f8b04" dependencies = [ "alloy-primitives", "arbitrary", - "derivative", + "educe", "ethereum_hashing", "ethereum_ssz", "ethereum_ssz_derive", @@ -8307,7 +8343,7 @@ dependencies = [ [[package]] name = "ssz_types" version = "0.8.0" -source = "git+https://github.com/paulhauner/ssz_types.git?rev=26716095d94e02a2cbd72c94a8b3b8549ecf14d1#26716095d94e02a2cbd72c94a8b3b8549ecf14d1" +source = "git+https://github.com/sigp/ssz_types.git?rev=2b01793356bb1b5946df8f9d317b2a8d8feb602b#2b01793356bb1b5946df8f9d317b2a8d8feb602b" dependencies = [ "arbitrary", "derivative", diff --git a/Cargo.toml b/Cargo.toml index 5fe3038a95e..0da20c93743 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -126,8 +126,8 @@ delay_map = "0.4" derivative = "2" dirs = "3" either = "1.9" - # TODO: rust_eth_kzg is pinned for now while a perf regression is investigated - # The crate_crypto_* dependencies can be removed from this file completely once we update +# TODO: rust_eth_kzg is pinned for now while a perf regression is investigated +# The crate_crypto_* dependencies can be removed from this file completely once we update rust_eth_kzg = "=0.5.1" crate_crypto_internal_eth_kzg_bls12_381 = "=0.5.1" crate_crypto_internal_eth_kzg_erasure_codes = "=0.5.1" @@ -138,8 +138,8 @@ discv5 = { version = "0.9", features = ["libp2p"] } env_logger = "0.9" ethereum_hashing = "0.7.0" ethereum_serde_utils = "0.7" -ethereum_ssz = "0.7" -ethereum_ssz_derive = "0.7" +ethereum_ssz = "0.8" +ethereum_ssz_derive = "0.8" ethers-core = "1" ethers-providers = { version = "1", default-features = false } exit-future = "0.2" @@ -155,7 +155,7 @@ libsecp256k1 = "0.7" log = "0.4" lru = "0.12" maplit = "1" -milhouse = "0.3" +milhouse = { rev = "b6bccd3df29c7056bec56987f428df5f7a0f8b04", git = "https://github.com/paulhauner/milhouse" } num_cpus = "1" parking_lot = "0.12" paste = "1" @@ -167,7 +167,13 @@ r2d2 = "0.8" rand = "0.8" rayon = "1.7" regex = "1" -reqwest = { version = "0.11", default-features = false, features = ["blocking", "json", "stream", "rustls-tls", "native-tls-vendored"] } +reqwest = { version = "0.11", default-features = false, features = [ + "blocking", + "json", + "stream", + "rustls-tls", + "native-tls-vendored", +] } ring = "0.16" rpds = "0.11" rusqlite = { version = "0.28", features = ["bundled"] } @@ -176,19 +182,28 @@ serde_json = "1" serde_repr = "0.1" serde_yaml = "0.9" sha2 = "0.9" -slog = { version = "2", features = ["max_level_debug", "release_max_level_debug", "nested-values"] } +slog = { version = "2", features = [ + "max_level_debug", + "release_max_level_debug", + "nested-values", +] } slog-async = "2" slog-term = "2" sloggers = { version = "2", features = ["json"] } smallvec = { version = "1.11.2", features = ["arbitrary"] } snap = "1" -ssz_types = { rev = "26716095d94e02a2cbd72c94a8b3b8549ecf14d1", git = "https://github.com/paulhauner/ssz_types.git" } +ssz_types = { rev = "2b01793356bb1b5946df8f9d317b2a8d8feb602b", git = "https://github.com/sigp/ssz_types.git" } strum = { version = "0.24", features = ["derive"] } superstruct = "0.8" syn = "1" sysinfo = "0.26" tempfile = "3" -tokio = { version = "1", features = ["rt-multi-thread", "sync", "signal", "macros"] } +tokio = { version = "1", features = [ + "rt-multi-thread", + "sync", + "signal", + "macros", +] } tokio-stream = { version = "0.1", features = ["sync"] } tokio-util = { version = "0.7", features = ["codec", "compat", "time"] } tracing = "0.1.40" @@ -267,7 +282,7 @@ validator_dir = { path = "common/validator_dir" } validator_http_api = { path = "validator_client/http_api" } validator_http_metrics = { path = "validator_client/http_metrics" } validator_metrics = { path = "validator_client/validator_metrics" } -validator_store= { path = "validator_client/validator_store" } +validator_store = { path = "validator_client/validator_store" } warp_utils = { path = "common/warp_utils" } xdelta3 = { git = "http://github.com/sigp/xdelta3-rs", rev = "50d63cdf1878e5cf3538e9aae5eed34a22c64e4a" } zstd = "0.13" From 432755c5bd7d2abdd7b31e06ad0b31d7b8ee1cdd Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 7 Jan 2025 08:48:35 +1100 Subject: [PATCH 21/36] Bump ssz_types version --- Cargo.lock | 59 +++++++++++++++++++++++++++++++++--------------------- Cargo.toml | 6 +++--- 2 files changed, 39 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 995ebcebdcd..45263d544c0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -827,7 +827,7 @@ dependencies = [ "tempfile", "tokio", "tokio-stream", - "tree_hash", + "tree_hash 0.9.0", "tree_hash_derive", "types", ] @@ -1035,7 +1035,7 @@ dependencies = [ "rand", "safe_arith", "serde", - "tree_hash", + "tree_hash 0.9.0", "zeroize", ] @@ -1969,7 +1969,7 @@ dependencies = [ "reqwest", "serde_json", "sha2 0.9.9", - "tree_hash", + "tree_hash 0.9.0", "types", ] @@ -2368,7 +2368,7 @@ dependencies = [ "snap", "state_processing", "swap_or_not_shuffle", - "tree_hash", + "tree_hash 0.9.0", "tree_hash_derive", "types", ] @@ -2571,7 +2571,7 @@ dependencies = [ "superstruct", "task_executor", "tokio", - "tree_hash", + "tree_hash 0.9.0", "types", ] @@ -3094,7 +3094,7 @@ dependencies = [ "tempfile", "tokio", "tokio-stream", - "tree_hash", + "tree_hash 0.9.0", "tree_hash_derive", "triehash", "types", @@ -3487,7 +3487,7 @@ dependencies = [ "slog", "state_processing", "tokio", - "tree_hash", + "tree_hash 0.9.0", "types", ] @@ -4012,7 +4012,7 @@ dependencies = [ "task_executor", "tokio", "tokio-stream", - "tree_hash", + "tree_hash 0.9.0", "types", "warp", "warp_utils", @@ -4706,7 +4706,7 @@ dependencies = [ "rust_eth_kzg", "serde", "serde_json", - "tree_hash", + "tree_hash 0.9.0", ] [[package]] @@ -4755,7 +4755,7 @@ dependencies = [ "snap", "state_processing", "store", - "tree_hash", + "tree_hash 0.9.0", "types", "validator_dir", ] @@ -5692,7 +5692,7 @@ dependencies = [ "rayon", "serde", "smallvec", - "tree_hash", + "tree_hash 0.8.0", "triomphe", "typenum", "vec_map", @@ -8144,7 +8144,7 @@ dependencies = [ "ssz_types", "strum", "tempfile", - "tree_hash", + "tree_hash 0.9.0", "tree_hash_derive", "types", ] @@ -8370,18 +8370,18 @@ dependencies = [ [[package]] name = "ssz_types" -version = "0.8.0" -source = "git+https://github.com/sigp/ssz_types.git?rev=2b01793356bb1b5946df8f9d317b2a8d8feb602b#2b01793356bb1b5946df8f9d317b2a8d8feb602b" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22bc24c8a61256950632fb6b68ea09f6b5c988070924c6292eb5933635202e00" dependencies = [ "arbitrary", - "derivative", "ethereum_serde_utils", "ethereum_ssz", "itertools 0.13.0", "serde", "serde_derive", "smallvec", - "tree_hash", + "tree_hash 0.9.0", "typenum", ] @@ -8415,7 +8415,7 @@ dependencies = [ "ssz_types", "test_random_derive", "tokio", - "tree_hash", + "tree_hash 0.9.0", "types", ] @@ -9284,11 +9284,24 @@ dependencies = [ "smallvec", ] +[[package]] +name = "tree_hash" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cc60ae4c4236ee721305d0f0b5aa3e8ef5b66f3fa61d17072430bc246d6694a" +dependencies = [ + "alloy-primitives", + "ethereum_hashing", + "ethereum_ssz", + "smallvec", + "typenum", +] + [[package]] name = "tree_hash_derive" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0857056ca4eb5de8c417309be42bcff6017b47e86fbaddde609b4633f66061e" +checksum = "5c8ee219344a44410237d7b849d150a445a621d0cae92d6fbab10d84d4428870" dependencies = [ "darling 0.20.10", "proc-macro2", @@ -9377,7 +9390,7 @@ dependencies = [ "tempfile", "test_random_derive", "tokio", - "tree_hash", + "tree_hash 0.9.0", "tree_hash_derive", ] @@ -9603,7 +9616,7 @@ dependencies = [ "lockfile", "rand", "tempfile", - "tree_hash", + "tree_hash 0.9.0", "types", ] @@ -9690,7 +9703,7 @@ dependencies = [ "serde_json", "tempfile", "tokio", - "tree_hash", + "tree_hash 0.9.0", "types", "validator_http_api", "zeroize", @@ -9719,7 +9732,7 @@ dependencies = [ "slog", "slot_clock", "tokio", - "tree_hash", + "tree_hash 0.9.0", "types", "validator_metrics", "validator_store", diff --git a/Cargo.toml b/Cargo.toml index 57c8bb0751f..d068e8f54c1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -192,7 +192,7 @@ slog-term = "2" sloggers = { version = "2", features = ["json"] } smallvec = { version = "1.11.2", features = ["arbitrary"] } snap = "1" -ssz_types = { rev = "2b01793356bb1b5946df8f9d317b2a8d8feb602b", git = "https://github.com/sigp/ssz_types.git" } +ssz_types = "0.10" strum = { version = "0.24", features = ["derive"] } superstruct = "0.8" syn = "1" @@ -211,8 +211,8 @@ tracing-appender = "0.2" tracing-core = "0.1" tracing-log = "0.2" tracing-subscriber = { version = "0.3", features = ["env-filter"] } -tree_hash = "0.8" -tree_hash_derive = "0.8" +tree_hash = "0.9" +tree_hash_derive = "0.9" url = "2" uuid = { version = "0.8", features = ["serde", "v4"] } warp = { version = "0.3.7", default-features = false, features = ["tls"] } From 9dffa74ae563e641b04ba83aeff64503820c1dee Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 7 Jan 2025 09:08:27 +1100 Subject: [PATCH 22/36] Change milhouse commit --- Cargo.lock | 51 ++++++++++++++++++++------------------------------- Cargo.toml | 2 +- 2 files changed, 21 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 45263d544c0..dae7834f3b8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -827,7 +827,7 @@ dependencies = [ "tempfile", "tokio", "tokio-stream", - "tree_hash 0.9.0", + "tree_hash", "tree_hash_derive", "types", ] @@ -1035,7 +1035,7 @@ dependencies = [ "rand", "safe_arith", "serde", - "tree_hash 0.9.0", + "tree_hash", "zeroize", ] @@ -1969,7 +1969,7 @@ dependencies = [ "reqwest", "serde_json", "sha2 0.9.9", - "tree_hash 0.9.0", + "tree_hash", "types", ] @@ -2368,7 +2368,7 @@ dependencies = [ "snap", "state_processing", "swap_or_not_shuffle", - "tree_hash 0.9.0", + "tree_hash", "tree_hash_derive", "types", ] @@ -2571,7 +2571,7 @@ dependencies = [ "superstruct", "task_executor", "tokio", - "tree_hash 0.9.0", + "tree_hash", "types", ] @@ -3094,7 +3094,7 @@ dependencies = [ "tempfile", "tokio", "tokio-stream", - "tree_hash 0.9.0", + "tree_hash", "tree_hash_derive", "triehash", "types", @@ -3487,7 +3487,7 @@ dependencies = [ "slog", "state_processing", "tokio", - "tree_hash 0.9.0", + "tree_hash", "types", ] @@ -4012,7 +4012,7 @@ dependencies = [ "task_executor", "tokio", "tokio-stream", - "tree_hash 0.9.0", + "tree_hash", "types", "warp", "warp_utils", @@ -4706,7 +4706,7 @@ dependencies = [ "rust_eth_kzg", "serde", "serde_json", - "tree_hash 0.9.0", + "tree_hash", ] [[package]] @@ -4755,7 +4755,7 @@ dependencies = [ "snap", "state_processing", "store", - "tree_hash 0.9.0", + "tree_hash", "types", "validator_dir", ] @@ -5678,8 +5678,8 @@ dependencies = [ [[package]] name = "milhouse" -version = "0.3.0" -source = "git+https://github.com/paulhauner/milhouse?rev=b6bccd3df29c7056bec56987f428df5f7a0f8b04#b6bccd3df29c7056bec56987f428df5f7a0f8b04" +version = "0.4.0" +source = "git+https://github.com/paulhauner/milhouse?rev=1df8db756200fa1ad77af17dbc036b85a444fd1a#1df8db756200fa1ad77af17dbc036b85a444fd1a" dependencies = [ "alloy-primitives", "arbitrary", @@ -5692,7 +5692,7 @@ dependencies = [ "rayon", "serde", "smallvec", - "tree_hash 0.8.0", + "tree_hash", "triomphe", "typenum", "vec_map", @@ -8144,7 +8144,7 @@ dependencies = [ "ssz_types", "strum", "tempfile", - "tree_hash 0.9.0", + "tree_hash", "tree_hash_derive", "types", ] @@ -8381,7 +8381,7 @@ dependencies = [ "serde", "serde_derive", "smallvec", - "tree_hash 0.9.0", + "tree_hash", "typenum", ] @@ -8415,7 +8415,7 @@ dependencies = [ "ssz_types", "test_random_derive", "tokio", - "tree_hash 0.9.0", + "tree_hash", "types", ] @@ -9273,17 +9273,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "tree_hash" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "373495c23db675a5192de8b610395e1bec324d596f9e6111192ce903dc11403a" -dependencies = [ - "alloy-primitives", - "ethereum_hashing", - "smallvec", -] - [[package]] name = "tree_hash" version = "0.9.0" @@ -9390,7 +9379,7 @@ dependencies = [ "tempfile", "test_random_derive", "tokio", - "tree_hash 0.9.0", + "tree_hash", "tree_hash_derive", ] @@ -9616,7 +9605,7 @@ dependencies = [ "lockfile", "rand", "tempfile", - "tree_hash 0.9.0", + "tree_hash", "types", ] @@ -9703,7 +9692,7 @@ dependencies = [ "serde_json", "tempfile", "tokio", - "tree_hash 0.9.0", + "tree_hash", "types", "validator_http_api", "zeroize", @@ -9732,7 +9721,7 @@ dependencies = [ "slog", "slot_clock", "tokio", - "tree_hash 0.9.0", + "tree_hash", "types", "validator_metrics", "validator_store", diff --git a/Cargo.toml b/Cargo.toml index d068e8f54c1..929e7764a2d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -155,7 +155,7 @@ libsecp256k1 = "0.7" log = "0.4" lru = "0.12" maplit = "1" -milhouse = { rev = "b6bccd3df29c7056bec56987f428df5f7a0f8b04", git = "https://github.com/paulhauner/milhouse" } +milhouse = { rev = "1df8db756200fa1ad77af17dbc036b85a444fd1a", git = "https://github.com/paulhauner/milhouse" } num_cpus = "1" parking_lot = "0.12" paste = "1" From 07785d97155ac602666cbc7938a6880c3ab84a4a Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 7 Jan 2025 10:50:13 +1100 Subject: [PATCH 23/36] Bump ssz version --- Cargo.lock | 8 ++++---- Cargo.toml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dae7834f3b8..b44e0bb0083 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2839,9 +2839,9 @@ dependencies = [ [[package]] name = "ethereum_ssz" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "036c84bd29bff35e29bbee3c8fc0e2fb95db12b6f2f3cae82a827fbc97256f3a" +checksum = "862e41ea8eea7508f70cfd8cd560f0c34bb0af37c719a8e06c2672f0f031d8e5" dependencies = [ "alloy-primitives", "arbitrary", @@ -2855,9 +2855,9 @@ dependencies = [ [[package]] name = "ethereum_ssz_derive" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dc8e67e1f770f5aa4c2c2069aaaf9daee7ac21bed357a71b911b37a58966cfb" +checksum = "d31ecf6640112f61dc34b4d8359c081102969af0edd18381fed2052f6db6a410" dependencies = [ "darling 0.20.10", "proc-macro2", diff --git a/Cargo.toml b/Cargo.toml index 929e7764a2d..060ec771306 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -138,8 +138,8 @@ discv5 = { version = "0.9", features = ["libp2p"] } env_logger = "0.9" ethereum_hashing = "0.7.0" ethereum_serde_utils = "0.7" -ethereum_ssz = "0.8" -ethereum_ssz_derive = "0.8" +ethereum_ssz = "0.8.2" +ethereum_ssz_derive = "0.8.2" ethers-core = "1" ethers-providers = { version = "1", default-features = false } exit-future = "0.2" From c67f9b1991e52ac04f698c80aaffe94e8e4271c1 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 7 Jan 2025 10:50:25 +1100 Subject: [PATCH 24/36] Fix bitfield compile errors --- .../src/per_epoch_processing/errors.rs | 9 ++++++++- consensus/types/src/attestation.rs | 13 +++++++------ consensus/types/src/sync_aggregate.rs | 3 ++- consensus/types/src/sync_committee_contribution.rs | 3 ++- 4 files changed, 19 insertions(+), 9 deletions(-) diff --git a/consensus/state_processing/src/per_epoch_processing/errors.rs b/consensus/state_processing/src/per_epoch_processing/errors.rs index f45c55a7acf..7485e365ec2 100644 --- a/consensus/state_processing/src/per_epoch_processing/errors.rs +++ b/consensus/state_processing/src/per_epoch_processing/errors.rs @@ -19,9 +19,10 @@ pub enum EpochProcessingError { BeaconStateError(BeaconStateError), InclusionError(InclusionError), SszTypesError(ssz_types::Error), + BitfieldError(ssz::BitfieldError), ArithError(safe_arith::ArithError), InconsistentStateFork(InconsistentFork), - InvalidJustificationBit(ssz_types::Error), + InvalidJustificationBit(ssz::BitfieldError), InvalidFlagIndex(usize), MilhouseError(milhouse::Error), EpochCache(EpochCacheError), @@ -49,6 +50,12 @@ impl From for EpochProcessingError { } } +impl From for EpochProcessingError { + fn from(e: ssz::BitfieldError) -> EpochProcessingError { + EpochProcessingError::BitfieldError(e) + } +} + impl From for EpochProcessingError { fn from(e: safe_arith::ArithError) -> EpochProcessingError { EpochProcessingError::ArithError(e) diff --git a/consensus/types/src/attestation.rs b/consensus/types/src/attestation.rs index 190964736fe..73450750a57 100644 --- a/consensus/types/src/attestation.rs +++ b/consensus/types/src/attestation.rs @@ -19,6 +19,7 @@ use super::{ #[derive(Debug, PartialEq)] pub enum Error { SszTypesError(ssz_types::Error), + BitfieldError(ssz::BitfieldError), AlreadySigned(usize), SubnetCountIsZero(ArithError), IncorrectStateVariant, @@ -225,7 +226,7 @@ impl Attestation { } } - pub fn get_aggregation_bit(&self, index: usize) -> Result { + pub fn get_aggregation_bit(&self, index: usize) -> Result { match self { Attestation::Base(att) => att.aggregation_bits.get(index), Attestation::Electra(att) => att.aggregation_bits.get(index), @@ -337,13 +338,13 @@ impl AttestationElectra { if self .aggregation_bits .get(committee_position) - .map_err(Error::SszTypesError)? + .map_err(Error::BitfieldError)? { Err(Error::AlreadySigned(committee_position)) } else { self.aggregation_bits .set(committee_position, true) - .map_err(Error::SszTypesError)?; + .map_err(Error::BitfieldError)?; self.signature.add_assign(signature); @@ -395,13 +396,13 @@ impl AttestationBase { if self .aggregation_bits .get(committee_position) - .map_err(Error::SszTypesError)? + .map_err(Error::BitfieldError)? { Err(Error::AlreadySigned(committee_position)) } else { self.aggregation_bits .set(committee_position, true) - .map_err(Error::SszTypesError)?; + .map_err(Error::BitfieldError)?; self.signature.add_assign(signature); @@ -411,7 +412,7 @@ impl AttestationBase { pub fn extend_aggregation_bits( &self, - ) -> Result, ssz_types::Error> { + ) -> Result, ssz::BitfieldError> { self.aggregation_bits.resize::() } } diff --git a/consensus/types/src/sync_aggregate.rs b/consensus/types/src/sync_aggregate.rs index 43f72a39240..12b91501ae0 100644 --- a/consensus/types/src/sync_aggregate.rs +++ b/consensus/types/src/sync_aggregate.rs @@ -11,6 +11,7 @@ use tree_hash_derive::TreeHash; #[derive(Debug, PartialEq)] pub enum Error { SszTypesError(ssz_types::Error), + BitfieldError(ssz::BitfieldError), ArithError(ArithError), } @@ -68,7 +69,7 @@ impl SyncAggregate { sync_aggregate .sync_committee_bits .set(participant_index, true) - .map_err(Error::SszTypesError)?; + .map_err(Error::BitfieldError)?; } } sync_aggregate diff --git a/consensus/types/src/sync_committee_contribution.rs b/consensus/types/src/sync_committee_contribution.rs index c348c3e8be3..e6870b9442f 100644 --- a/consensus/types/src/sync_committee_contribution.rs +++ b/consensus/types/src/sync_committee_contribution.rs @@ -10,6 +10,7 @@ use tree_hash_derive::TreeHash; #[derive(Debug, PartialEq)] pub enum Error { SszTypesError(ssz_types::Error), + BitfieldError(ssz::BitfieldError), AlreadySigned(usize), SubnetCountIsZero(ArithError), } @@ -53,7 +54,7 @@ impl SyncCommitteeContribution { ) -> Result { let mut bits = BitVector::new(); bits.set(validator_sync_committee_index, true) - .map_err(Error::SszTypesError)?; + .map_err(Error::BitfieldError)?; Ok(Self { slot: message.slot, beacon_block_root: message.beacon_block_root, From af132401255af985458eda7edd503616cb80d5ac Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Thu, 16 Jan 2025 13:21:25 +1100 Subject: [PATCH 25/36] Bump to milhouse 0.5 --- Cargo.lock | 5 +++-- Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b44e0bb0083..736db44e6d7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5678,8 +5678,9 @@ dependencies = [ [[package]] name = "milhouse" -version = "0.4.0" -source = "git+https://github.com/paulhauner/milhouse?rev=1df8db756200fa1ad77af17dbc036b85a444fd1a#1df8db756200fa1ad77af17dbc036b85a444fd1a" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb1ada1f56cc1c79f40517fdcbf57e19f60424a3a1ce372c3fe9b22e4fdd83eb" dependencies = [ "alloy-primitives", "arbitrary", diff --git a/Cargo.toml b/Cargo.toml index 060ec771306..d02a3d4435c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -155,7 +155,7 @@ libsecp256k1 = "0.7" log = "0.4" lru = "0.12" maplit = "1" -milhouse = { rev = "1df8db756200fa1ad77af17dbc036b85a444fd1a", git = "https://github.com/paulhauner/milhouse" } +milhouse = "0.5" num_cpus = "1" parking_lot = "0.12" paste = "1" From 6fe58c919075bbcfad712287eaa602f76dbfd47f Mon Sep 17 00:00:00 2001 From: Michael Sproul Date: Mon, 20 Jan 2025 16:59:03 +1100 Subject: [PATCH 26/36] Fix test_utils --- beacon_node/beacon_chain/src/test_utils.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beacon_node/beacon_chain/src/test_utils.rs b/beacon_node/beacon_chain/src/test_utils.rs index 8ae5e3bcabc..5e95475b70a 100644 --- a/beacon_node/beacon_chain/src/test_utils.rs +++ b/beacon_node/beacon_chain/src/test_utils.rs @@ -3104,7 +3104,7 @@ pub fn generate_rand_block_and_blobs( let (bundle, transactions) = execution_layer::test_utils::generate_blobs::(num_blobs).unwrap(); payload.execution_payload.transactions = <_>::default(); - for tx in Vec::from(transactions) { + for tx in &transactions { payload.execution_payload.transactions.push(tx).unwrap(); } message.body.blob_kzg_commitments = bundle.commitments.clone(); From 01e1c1757b1735b48e883acc7857f2f628f7e20d Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 4 Feb 2025 17:45:57 +1100 Subject: [PATCH 27/36] Add ssz_fixed_len impl --- consensus/types/src/checkpoint.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/consensus/types/src/checkpoint.rs b/consensus/types/src/checkpoint.rs index 74c2775ba06..59f58c6aab8 100644 --- a/consensus/types/src/checkpoint.rs +++ b/consensus/types/src/checkpoint.rs @@ -33,6 +33,10 @@ impl Encode for Checkpoint { true } + fn ssz_fixed_len() -> usize { + ::ssz_fixed_len().saturating_add(::ssz_fixed_len()) + } + fn ssz_append(&self, buf: &mut Vec) { self.epoch.ssz_append(buf); self.root.ssz_append(buf); From 2d39282ac35d60fd15c3c3bf799272c2edcdb3c4 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 4 Feb 2025 17:46:08 +1100 Subject: [PATCH 28/36] Avoid clippy ignores with saturating math --- consensus/types/src/checkpoint.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/consensus/types/src/checkpoint.rs b/consensus/types/src/checkpoint.rs index 59f58c6aab8..4491566e944 100644 --- a/consensus/types/src/checkpoint.rs +++ b/consensus/types/src/checkpoint.rs @@ -42,9 +42,10 @@ impl Encode for Checkpoint { self.root.ssz_append(buf); } - #[allow(clippy::arithmetic_side_effects)] fn ssz_bytes_len(&self) -> usize { - self.epoch.ssz_bytes_len() + self.root.ssz_bytes_len() + self.epoch + .ssz_bytes_len() + .saturating_add(self.root.ssz_bytes_len()) } } @@ -54,9 +55,8 @@ impl Decode for Checkpoint { true } - #[allow(clippy::arithmetic_side_effects)] fn ssz_fixed_len() -> usize { - ::ssz_fixed_len() + ::ssz_fixed_len() + ::ssz_fixed_len().saturating_add(::ssz_fixed_len()) } fn from_ssz_bytes(bytes: &[u8]) -> Result { From 10cf10f2282dddb8c89ec73860995086ffddace5 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Tue, 4 Feb 2025 17:59:49 +1100 Subject: [PATCH 29/36] Use `String` for decoding --- consensus/types/src/transactions.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/consensus/types/src/transactions.rs b/consensus/types/src/transactions.rs index b4e280b1279..aeb3ee70125 100644 --- a/consensus/types/src/transactions.rs +++ b/consensus/types/src/transactions.rs @@ -257,8 +257,8 @@ where { let mut txs: Transactions = <_>::default(); - while let Some(hex_str) = seq.next_element::<&str>()? { - let bytes = hex::decode(hex_str).map_err(serde::de::Error::custom)?; + while let Some(hex_str) = seq.next_element::()? { + let bytes = hex::decode(&hex_str).map_err(serde::de::Error::custom)?; txs.push(&bytes).map_err(|e| { serde::de::Error::custom(format!("failed to deserialize transaction: {:?}.", e)) })?; From 9900ad870ad5fa8acdc931962deaf3169a112c92 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Wed, 5 Feb 2025 06:27:29 +1100 Subject: [PATCH 30/36] Revert "Avoid clippy ignores with saturating math" This reverts commit 2d39282ac35d60fd15c3c3bf799272c2edcdb3c4. --- consensus/types/src/checkpoint.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/consensus/types/src/checkpoint.rs b/consensus/types/src/checkpoint.rs index 4491566e944..59f58c6aab8 100644 --- a/consensus/types/src/checkpoint.rs +++ b/consensus/types/src/checkpoint.rs @@ -42,10 +42,9 @@ impl Encode for Checkpoint { self.root.ssz_append(buf); } + #[allow(clippy::arithmetic_side_effects)] fn ssz_bytes_len(&self) -> usize { - self.epoch - .ssz_bytes_len() - .saturating_add(self.root.ssz_bytes_len()) + self.epoch.ssz_bytes_len() + self.root.ssz_bytes_len() } } @@ -55,8 +54,9 @@ impl Decode for Checkpoint { true } + #[allow(clippy::arithmetic_side_effects)] fn ssz_fixed_len() -> usize { - ::ssz_fixed_len().saturating_add(::ssz_fixed_len()) + ::ssz_fixed_len() + ::ssz_fixed_len() } fn from_ssz_bytes(bytes: &[u8]) -> Result { From 88c85dabe1e11ee26c43aa28a679cadd5b1b406b Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Wed, 5 Feb 2025 06:29:38 +1100 Subject: [PATCH 31/36] Use unchecked math --- consensus/types/src/checkpoint.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/consensus/types/src/checkpoint.rs b/consensus/types/src/checkpoint.rs index 59f58c6aab8..0caaedce5fa 100644 --- a/consensus/types/src/checkpoint.rs +++ b/consensus/types/src/checkpoint.rs @@ -33,8 +33,9 @@ impl Encode for Checkpoint { true } + #[allow(clippy::arithmetic_side_effects)] fn ssz_fixed_len() -> usize { - ::ssz_fixed_len().saturating_add(::ssz_fixed_len()) + ::ssz_fixed_len() + ::ssz_fixed_len() } fn ssz_append(&self, buf: &mut Vec) { From 91ecdde04d24c195ee25285b336925b33c2496a1 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Wed, 5 Feb 2025 09:08:13 +1100 Subject: [PATCH 32/36] Add test for ssz_bytes_len --- consensus/types/src/transactions.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/consensus/types/src/transactions.rs b/consensus/types/src/transactions.rs index aeb3ee70125..2cbec9de901 100644 --- a/consensus/types/src/transactions.rs +++ b/consensus/types/src/transactions.rs @@ -498,6 +498,11 @@ mod tests { #[test] fn ssz() { for (test, transactions, reference) in TestVectors::default().iter() { + assert_eq!( + transactions.ssz_bytes_len(), + reference.ssz_bytes_len(), + "{test} - ssz_bytes_len" + ); assert_eq!( transactions.as_ssz_bytes(), reference.as_ssz_bytes(), From ebfdc0bfab89320237168167c669f8e94df6bdaf Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Wed, 5 Feb 2025 09:28:41 +1100 Subject: [PATCH 33/36] Fix tests --- beacon_node/lighthouse_network/src/rpc/codec.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/beacon_node/lighthouse_network/src/rpc/codec.rs b/beacon_node/lighthouse_network/src/rpc/codec.rs index 2b85167d2d0..e02b4818003 100644 --- a/beacon_node/lighthouse_network/src/rpc/codec.rs +++ b/beacon_node/lighthouse_network/src/rpc/codec.rs @@ -1002,9 +1002,9 @@ mod tests { }) } - fn transactions() -> Transactions { + fn transactions(count: usize) -> Transactions { let mut transactions = Transactions::default(); - for _ in 0..100000 { + for _ in 0..count { transactions.push(&[0; 1024]).unwrap() } transactions @@ -1018,7 +1018,7 @@ mod tests { let mut block: BeaconBlockBellatrix<_, FullPayload> = BeaconBlockBellatrix::empty(&Spec::default_spec()); - block.body.execution_payload.execution_payload.transactions = transactions(); + block.body.execution_payload.execution_payload.transactions = transactions(5000); let block = BeaconBlock::Bellatrix(block); assert!(block.ssz_bytes_len() <= max_rpc_size(fork_context, spec.max_chunk_size as usize)); @@ -1035,7 +1035,7 @@ mod tests { let mut block: BeaconBlockBellatrix<_, FullPayload> = BeaconBlockBellatrix::empty(&Spec::default_spec()); - block.body.execution_payload.execution_payload.transactions = transactions(); + block.body.execution_payload.execution_payload.transactions = transactions(100000); let block = BeaconBlock::Bellatrix(block); assert!(block.ssz_bytes_len() > max_rpc_size(fork_context, spec.max_chunk_size as usize)); From 505429b1f1727d1403e13e1ae7a3a8a64daf5e12 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Wed, 5 Feb 2025 13:36:07 +1100 Subject: [PATCH 34/36] Bump size tests --- consensus/types/src/attestation.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/consensus/types/src/attestation.rs b/consensus/types/src/attestation.rs index 3d463dea3ab..da27c53d295 100644 --- a/consensus/types/src/attestation.rs +++ b/consensus/types/src/attestation.rs @@ -641,12 +641,12 @@ mod tests { let attestation_data = size_of::(); let signature = size_of::(); - assert_eq!(aggregation_bits, 56); + assert_eq!(aggregation_bits, 152); assert_eq!(attestation_data, 128); assert_eq!(signature, 288 + 16); let attestation_expected = aggregation_bits + attestation_data + signature; - assert_eq!(attestation_expected, 488); + assert_eq!(attestation_expected, 584); assert_eq!( size_of::>(), attestation_expected @@ -664,13 +664,13 @@ mod tests { size_of::::MaxCommitteesPerSlot>>(); let signature = size_of::(); - assert_eq!(aggregation_bits, 56); - assert_eq!(committee_bits, 56); + assert_eq!(aggregation_bits, 152); + assert_eq!(committee_bits, 152); assert_eq!(attestation_data, 128); assert_eq!(signature, 288 + 16); let attestation_expected = aggregation_bits + committee_bits + attestation_data + signature; - assert_eq!(attestation_expected, 544); + assert_eq!(attestation_expected, 736); assert_eq!( size_of::>(), attestation_expected From 200ff5342997acf08a55084b533c7806fa8f9d24 Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Wed, 5 Feb 2025 16:45:57 +1100 Subject: [PATCH 35/36] Wrap `MetaData` in an Arc --- .../lighthouse_network/src/rpc/codec.rs | 21 +++++++++++-------- .../lighthouse_network/src/rpc/methods.rs | 2 +- .../lighthouse_network/src/service/mod.rs | 4 ++-- 3 files changed, 15 insertions(+), 12 deletions(-) diff --git a/beacon_node/lighthouse_network/src/rpc/codec.rs b/beacon_node/lighthouse_network/src/rpc/codec.rs index e02b4818003..07631970bc5 100644 --- a/beacon_node/lighthouse_network/src/rpc/codec.rs +++ b/beacon_node/lighthouse_network/src/rpc/codec.rs @@ -765,8 +765,8 @@ fn handle_rpc_response( SupportedProtocol::PingV1 => Ok(Some(RpcSuccessResponse::Pong(Ping { data: u64::from_ssz_bytes(decoded_buffer)?, }))), - SupportedProtocol::MetaDataV1 => Ok(Some(RpcSuccessResponse::MetaData(MetaData::V1( - MetaDataV1::from_ssz_bytes(decoded_buffer)?, + SupportedProtocol::MetaDataV1 => Ok(Some(RpcSuccessResponse::MetaData(Arc::new( + MetaData::V1(MetaDataV1::from_ssz_bytes(decoded_buffer)?), )))), SupportedProtocol::LightClientBootstrapV1 => match fork_name { Some(fork_name) => Ok(Some(RpcSuccessResponse::LightClientBootstrap(Arc::new( @@ -826,11 +826,11 @@ fn handle_rpc_response( )), }, // MetaData V2/V3 responses have no context bytes, so behave similarly to V1 responses - SupportedProtocol::MetaDataV3 => Ok(Some(RpcSuccessResponse::MetaData(MetaData::V3( - MetaDataV3::from_ssz_bytes(decoded_buffer)?, + SupportedProtocol::MetaDataV3 => Ok(Some(RpcSuccessResponse::MetaData(Arc::new( + MetaData::V3(MetaDataV3::from_ssz_bytes(decoded_buffer)?), )))), - SupportedProtocol::MetaDataV2 => Ok(Some(RpcSuccessResponse::MetaData(MetaData::V2( - MetaDataV2::from_ssz_bytes(decoded_buffer)?, + SupportedProtocol::MetaDataV2 => Ok(Some(RpcSuccessResponse::MetaData(Arc::new( + MetaData::V2(MetaDataV2::from_ssz_bytes(decoded_buffer)?), )))), SupportedProtocol::BlocksByRangeV2 => match fork_name { Some(ForkName::Altair) => Ok(Some(RpcSuccessResponse::BlocksByRange(Arc::new( @@ -1110,28 +1110,31 @@ mod tests { Ping { data: 1 } } - fn metadata() -> MetaData { + fn metadata() -> Arc> { MetaData::V1(MetaDataV1 { seq_number: 1, attnets: EnrAttestationBitfield::::default(), }) + .into() } - fn metadata_v2() -> MetaData { + fn metadata_v2() -> Arc> { MetaData::V2(MetaDataV2 { seq_number: 1, attnets: EnrAttestationBitfield::::default(), syncnets: EnrSyncCommitteeBitfield::::default(), }) + .into() } - fn metadata_v3() -> MetaData { + fn metadata_v3() -> Arc> { MetaData::V3(MetaDataV3 { seq_number: 1, attnets: EnrAttestationBitfield::::default(), syncnets: EnrSyncCommitteeBitfield::::default(), custody_group_count: 1, }) + .into() } /// Encodes the given protocol response as bytes. diff --git a/beacon_node/lighthouse_network/src/rpc/methods.rs b/beacon_node/lighthouse_network/src/rpc/methods.rs index ad6bea455ec..0f7daa9feb4 100644 --- a/beacon_node/lighthouse_network/src/rpc/methods.rs +++ b/beacon_node/lighthouse_network/src/rpc/methods.rs @@ -557,7 +557,7 @@ pub enum RpcSuccessResponse { Pong(Ping), /// A response to a META_DATA request. - MetaData(MetaData), + MetaData(Arc>), } /// Indicates which response is being terminated by a stream termination response. diff --git a/beacon_node/lighthouse_network/src/service/mod.rs b/beacon_node/lighthouse_network/src/service/mod.rs index 8586fd9cd36..9c3534aef90 100644 --- a/beacon_node/lighthouse_network/src/service/mod.rs +++ b/beacon_node/lighthouse_network/src/service/mod.rs @@ -1186,7 +1186,7 @@ impl Network { ) { let metadata = self.network_globals.local_metadata.read().clone(); // The encoder is responsible for sending the negotiated version of the metadata - let event = RpcResponse::Success(RpcSuccessResponse::MetaData(metadata)); + let event = RpcResponse::Success(RpcSuccessResponse::MetaData(Arc::new(metadata))); self.eth2_rpc_mut() .send_response(peer_id, id, request_id, event); } @@ -1601,7 +1601,7 @@ impl Network { } RpcSuccessResponse::MetaData(meta_data) => { self.peer_manager_mut() - .meta_data_response(&peer_id, meta_data); + .meta_data_response(&peer_id, meta_data.as_ref().clone()); None } /* Network propagated protocols */ From adf95fa7878668c5904d43f5b3b1f2ad61cf642c Mon Sep 17 00:00:00 2001 From: Paul Hauner Date: Thu, 6 Feb 2025 05:05:02 +1100 Subject: [PATCH 36/36] Fix stack size clippy lint --- beacon_node/beacon_chain/tests/rewards.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/beacon_node/beacon_chain/tests/rewards.rs b/beacon_node/beacon_chain/tests/rewards.rs index be7045c54a9..7ce73b6d9c9 100644 --- a/beacon_node/beacon_chain/tests/rewards.rs +++ b/beacon_node/beacon_chain/tests/rewards.rs @@ -296,8 +296,7 @@ async fn test_rewards_base_multi_inclusion() { .extend_slots(E::slots_per_epoch() as usize * 2 - 4) .await; - // pin to reduce stack size for clippy - Box::pin(check_all_base_rewards(&harness, initial_balances)).await; + check_all_base_rewards(&harness, initial_balances).await; } #[tokio::test] @@ -583,7 +582,8 @@ async fn check_all_base_rewards( harness: &BeaconChainHarness>, balances: Vec, ) { - check_all_base_rewards_for_subset(harness, balances, vec![]).await; + // The box reduces the size on the stack for a clippy lint. + Box::pin(check_all_base_rewards_for_subset(harness, balances, vec![])).await; } async fn check_all_base_rewards_for_subset(