Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

perf(katana): optimisations + benchmark setup #2900

Merged
merged 6 commits into from
Feb 7, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,7 @@ alloy-primitives = { version = "0.8.3", default-features = false }
alloy-sol-types = { version = "0.8.3", default-features = false }

criterion = "0.5.1"
pprof = { version = "0.13.0", features = [ "criterion", "flamegraph" ] }

# Slot integration. Dojo don't need to manually include `account_sdk` as dependency as `slot` already re-exports it.
slot = { git = "https://github.com/cartridge-gg/slot", rev = "1298a30" }
Expand Down
9 changes: 8 additions & 1 deletion crates/katana/core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ katana-chain-spec.workspace = true
katana-db.workspace = true
katana-executor = { workspace = true, features = [ "blockifier" ] }
katana-pool.workspace = true
katana-primitives.workspace = true
katana-primitives = { workspace = true, features = [ "arbitrary" ]}
katana-provider.workspace = true
katana-tasks.workspace = true
katana-trie.workspace = true
Expand All @@ -26,6 +26,7 @@ metrics.workspace = true
num-traits.workspace = true
parking_lot.workspace = true
reqwest.workspace = true
rayon.workspace = true
serde.workspace = true
serde_json.workspace = true
starknet.workspace = true
Expand All @@ -48,10 +49,16 @@ alloy-transport = { workspace = true, default-features = false }
[dev-dependencies]
arbitrary.workspace = true
assert_matches.workspace = true
criterion.workspace = true
hex.workspace = true
rand.workspace = true
rstest.workspace = true
tempfile.workspace = true
pprof.workspace = true

[features]
starknet-messaging = [ "dep:starknet-crypto" ]

[[bench]]
name = "commit"
harness = false
197 changes: 197 additions & 0 deletions crates/katana/core/benches/commit.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,197 @@
use std::collections::BTreeMap;
use std::time::Duration;

use arbitrary::{Arbitrary, Unstructured};
use criterion::{black_box, criterion_group, criterion_main, BatchSize, Criterion};
use katana_core::backend::UncommittedBlock;
use katana_primitives::block::PartialHeader;
use katana_primitives::receipt::ReceiptWithTxHash;
use katana_primitives::state::StateUpdates;
use katana_primitives::transaction::TxWithHash;
use katana_primitives::{ContractAddress, Felt};
use katana_provider::providers::db::DbProvider;
use pprof::criterion::{Output, PProfProfiler};

struct BlockConfig {
nb_of_txs: usize,
nb_of_receipts: usize,
nb_of_nonces: usize,
nb_of_storage_keys: usize,
nb_of_storage_values: usize,
nb_of_classes: usize,
nb_of_contracts: usize,
}

const SMALL_BLOCK_CONFIG: BlockConfig = BlockConfig {
nb_of_txs: 1,
nb_of_receipts: 1,
nb_of_nonces: 1,
nb_of_storage_keys: 1,
nb_of_storage_values: 1,
nb_of_classes: 1,
nb_of_contracts: 1,
};

const BIG_BLOCK_CONFIG: BlockConfig = BlockConfig {
nb_of_txs: 20,
nb_of_receipts: 20,
nb_of_nonces: 100,
nb_of_storage_keys: 100,
nb_of_storage_values: 100,
nb_of_classes: 100,
nb_of_contracts: 100,
};

fn commit(block: UncommittedBlock<'_, DbProvider>) {
let _ = block.commit();
}

fn commit_parallel(block: UncommittedBlock<'_, DbProvider>) {
let _ = block.commit_parallel();
}

#[inline(always)]
fn random_array(size: usize) -> Vec<u8> {
(0..size).map(|_| rand::random::<u8>()).collect()
}

#[inline(always)]
fn random_felt() -> Felt {
Felt::arbitrary(&mut Unstructured::new(&random_array(Felt::size_hint(0).0))).unwrap()
}

#[inline(always)]
fn random_tx_with_hash() -> TxWithHash {
TxWithHash::arbitrary(&mut Unstructured::new(&random_array(TxWithHash::size_hint(0).0)))
.unwrap()
}

#[inline(always)]
fn random_receipt_with_hash() -> ReceiptWithTxHash {
ReceiptWithTxHash::arbitrary(&mut Unstructured::new(&random_array(
ReceiptWithTxHash::size_hint(0).0,
)))
.unwrap()
}

#[inline(always)]
fn random_felt_to_felt_map(size: usize) -> BTreeMap<Felt, Felt> {
(0..size).map(|_| (random_felt(), random_felt())).collect()
}

#[inline(always)]
fn random_address_to_felt_map(size: usize) -> BTreeMap<ContractAddress, Felt> {
(0..size).map(|_| (ContractAddress::new(random_felt()), random_felt())).collect()
}

#[inline(always)]
fn random_header() -> PartialHeader {
PartialHeader::arbitrary(&mut Unstructured::new(&random_array(PartialHeader::size_hint(0).0)))
.unwrap()
}

fn build_block(
config: BlockConfig,
) -> (PartialHeader, Vec<TxWithHash>, Vec<ReceiptWithTxHash>, StateUpdates) {
let transactions: Vec<TxWithHash> =
(0..config.nb_of_txs).map(|_| random_tx_with_hash()).collect();

let receipts: Vec<ReceiptWithTxHash> =
(0..config.nb_of_receipts).map(|_| random_receipt_with_hash()).collect();

let nonce_updates: BTreeMap<ContractAddress, Felt> = (0..config.nb_of_nonces)
.map(|_| (ContractAddress::new(random_felt()), random_felt()))
.collect();

let storage_updates: BTreeMap<ContractAddress, BTreeMap<Felt, Felt>> = (0..config
.nb_of_storage_keys)
.map(|_| {
(
ContractAddress::new(random_felt()),
random_felt_to_felt_map(config.nb_of_storage_values),
)
})
.collect();

let declared_classes: BTreeMap<Felt, Felt> = random_felt_to_felt_map(config.nb_of_classes);
let deployed_contracts: BTreeMap<ContractAddress, Felt> =
random_address_to_felt_map(config.nb_of_contracts);

let state_updates = StateUpdates {
nonce_updates,
storage_updates,
declared_classes,
deployed_contracts,
..Default::default()
};

let header = random_header();

(header, transactions, receipts, state_updates)
}

fn commit_small(c: &mut Criterion) {
let mut c = c.benchmark_group("Commit.Small");
c.warm_up_time(Duration::from_secs(1));

let (header, small_transactions, small_receipts, small_state_updates) =
build_block(SMALL_BLOCK_CONFIG);

let block = UncommittedBlock::new(
header,
small_transactions,
small_receipts.as_slice(),
&small_state_updates,
DbProvider::new_ephemeral(),
);

c.bench_function("Serial", |b| {
b.iter_batched(|| block.clone(), |input| commit(black_box(input)), BatchSize::SmallInput);
});

c.bench_function("Parallel", |b| {
b.iter_batched(
|| block.clone(),
|input| commit_parallel(black_box(input)),
BatchSize::SmallInput,
);
});
}

fn commit_big(c: &mut Criterion) {
let mut c = c.benchmark_group("Commit.Big");
c.warm_up_time(Duration::from_secs(1));

let (header, big_transactions, big_receipts, big_state_updates) = build_block(BIG_BLOCK_CONFIG);
let block = UncommittedBlock::new(
header,
big_transactions,
big_receipts.as_slice(),
&big_state_updates,
DbProvider::new_ephemeral(),
);

c.bench_function("Serial", |b| {
b.iter_batched(|| block.clone(), |input| commit(black_box(input)), BatchSize::SmallInput);
});

c.bench_function("Parallel", |b| {
b.iter_batched(
|| block.clone(),
|input| commit_parallel(black_box(input)),
BatchSize::SmallInput,
);
});
}

fn commit_benchmark(c: &mut Criterion) {
commit_small(c);
commit_big(c);
}

criterion_group! {
name = benches;
config = Criterion::default().with_profiler(PProfProfiler::new(100, Output::Flamegraph(None)));
targets = commit_benchmark
}
criterion_main!(benches);
75 changes: 74 additions & 1 deletion crates/katana/core/src/backend/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
use katana_provider::traits::trie::TrieWriter;
use katana_trie::compute_merkle_root;
use parking_lot::RwLock;
use rayon::prelude::*;
use starknet::macros::short_string;
use starknet_types_core::hash::{self, StarkHash};
use tracing::info;
Expand Down Expand Up @@ -363,6 +364,7 @@
let transaction_count = self.transactions.len() as u32;
let state_diff_length = self.state_updates.len() as u32;

// optimisation 1
let state_root = self.compute_new_state_root();
let transactions_commitment = self.compute_transaction_commitment();
let events_commitment = self.compute_event_commitment();
Expand Down Expand Up @@ -393,6 +395,51 @@
SealedBlock { hash, header, body: self.transactions }
}

pub fn commit_parallel(self) -> SealedBlock {
// get the hash of the latest committed block
let parent_hash = self.header.parent_hash;
let events_count = self.receipts.iter().map(|r| r.events().len() as u32).sum::<u32>();
let transaction_count = self.transactions.len() as u32;
let state_diff_length = self.state_updates.len() as u32;

let mut state_root = Felt::default();
let mut transactions_commitment = Felt::default();
let mut events_commitment = Felt::default();
let mut receipts_commitment = Felt::default();
let mut state_diff_commitment = Felt::default();

rayon::scope(|s| {
s.spawn(|_| state_root = self.compute_new_state_root());
s.spawn(|_| transactions_commitment = self.compute_transaction_commitment());
s.spawn(|_| events_commitment = self.compute_event_commitment_parallel());
s.spawn(|_| receipts_commitment = self.compute_receipt_commitment_parallel());
s.spawn(|_| state_diff_commitment = self.compute_state_diff_commitment());
});

let header = Header {
state_root,
parent_hash,
events_count,
state_diff_length,
transaction_count,
events_commitment,
receipts_commitment,
state_diff_commitment,
transactions_commitment,
number: self.header.number,
timestamp: self.header.timestamp,
l1_da_mode: self.header.l1_da_mode,
l1_gas_prices: self.header.l1_gas_prices,
l1_data_gas_prices: self.header.l1_data_gas_prices,
sequencer_address: self.header.sequencer_address,
protocol_version: self.header.protocol_version,
};

let hash = header.compute_hash();

SealedBlock { hash, header, body: self.transactions }
}

Check warning on line 441 in crates/katana/core/src/backend/mod.rs

View check run for this annotation

Codecov / codecov/patch

crates/katana/core/src/backend/mod.rs#L398-L441

Added lines #L398 - L441 were not covered by tests

fn compute_transaction_commitment(&self) -> Felt {
let tx_hashes = self.transactions.iter().map(|t| t.hash).collect::<Vec<TxHash>>();
compute_merkle_root::<hash::Poseidon>(&tx_hashes).unwrap()
Expand All @@ -403,6 +450,12 @@
compute_merkle_root::<hash::Poseidon>(&receipt_hashes).unwrap()
}

fn compute_receipt_commitment_parallel(&self) -> Felt {
let receipt_hashes =
self.receipts.par_iter().map(|r| r.compute_hash()).collect::<Vec<Felt>>();
compute_merkle_root::<hash::Poseidon>(&receipt_hashes).unwrap()
}

Check warning on line 457 in crates/katana/core/src/backend/mod.rs

View check run for this annotation

Codecov / codecov/patch

crates/katana/core/src/backend/mod.rs#L453-L457

Added lines #L453 - L457 were not covered by tests

fn compute_state_diff_commitment(&self) -> Felt {
compute_state_diff_hash(self.state_updates.clone())
}
Expand All @@ -418,7 +471,6 @@
// the iterator will yield all events from all the receipts, each one paired with the
// transaction hash that emitted it: (tx hash, event).
let events = self.receipts.iter().flat_map(|r| r.events().iter().map(|e| (r.tx_hash, e)));

let mut hashes = Vec::new();
for (tx, event) in events {
let event_hash = event_hash(tx, event);
Expand All @@ -429,6 +481,27 @@
compute_merkle_root::<hash::Poseidon>(&hashes).unwrap()
}

fn compute_event_commitment_parallel(&self) -> Felt {

Check warning on line 484 in crates/katana/core/src/backend/mod.rs

View check run for this annotation

Codecov / codecov/patch

crates/katana/core/src/backend/mod.rs#L484

Added line #L484 was not covered by tests
// h(emitter_address, tx_hash, h(keys), h(data))
fn event_hash(tx: TxHash, event: &Event) -> Felt {
let keys_hash = hash::Poseidon::hash_array(&event.keys);
let data_hash = hash::Poseidon::hash_array(&event.data);
hash::Poseidon::hash_array(&[tx, event.from_address.into(), keys_hash, data_hash])
}

Check warning on line 490 in crates/katana/core/src/backend/mod.rs

View check run for this annotation

Codecov / codecov/patch

crates/katana/core/src/backend/mod.rs#L486-L490

Added lines #L486 - L490 were not covered by tests

// the iterator will yield all events from all the receipts, each one paired with the
// transaction hash that emitted it: (tx hash, event).
let events = self.receipts.iter().flat_map(|r| r.events().iter().map(|e| (r.tx_hash, e)));
let hashes = events
.par_bridge()
.into_par_iter()
.map(|(tx, event)| event_hash(tx, event))
.collect::<Vec<_>>();

// compute events commitment
compute_merkle_root::<hash::Poseidon>(&hashes).unwrap()
}

Check warning on line 503 in crates/katana/core/src/backend/mod.rs

View check run for this annotation

Codecov / codecov/patch

crates/katana/core/src/backend/mod.rs#L494-L503

Added lines #L494 - L503 were not covered by tests

// state_commitment = hPos("STARKNET_STATE_V0", contract_trie_root, class_trie_root)
fn compute_new_state_root(&self) -> Felt {
let class_trie_root = self
Expand Down
2 changes: 1 addition & 1 deletion crates/katana/executor/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ tokio.workspace = true

criterion.workspace = true
oneshot = { version = "0.1.8", default-features = false, features = [ "std" ] }
pprof = { version = "0.13.0", features = [ "criterion", "flamegraph" ] }
pprof.workspace = true
rayon.workspace = true

[features]
Expand Down
1 change: 1 addition & 0 deletions crates/katana/primitives/src/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@

/// Represents a partial block header.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "arbitrary", derive(::arbitrary::Arbitrary))]

Check warning on line 45 in crates/katana/primitives/src/block.rs

View check run for this annotation

Codecov / codecov/patch

crates/katana/primitives/src/block.rs#L45

Added line #L45 was not covered by tests
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct PartialHeader {
pub parent_hash: BlockHash,
Expand Down
1 change: 1 addition & 0 deletions crates/katana/primitives/src/receipt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,7 @@
}

#[derive(Debug, Clone, AsRef, Deref, PartialEq, Eq)]
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]

Check warning on line 181 in crates/katana/primitives/src/receipt.rs

View check run for this annotation

Codecov / codecov/patch

crates/katana/primitives/src/receipt.rs#L181

Added line #L181 was not covered by tests
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct ReceiptWithTxHash {
/// The hash of the transaction.
Expand Down
Loading
Loading