From f3746f6569925029eb6cd15f94f2ff960c69422a Mon Sep 17 00:00:00 2001 From: yihuang Date: Thu, 17 Oct 2024 16:25:29 +0800 Subject: [PATCH] Problem: gen-tx don't run in parallel for single node (#1645) * Problem: gen-tx don't run in parallel for single node Solution: - use multiprocessing library to do parallel tx gen * Update CHANGELOG.md Signed-off-by: yihuang * cleanup --------- Signed-off-by: yihuang --- CHANGELOG.md | 3 +- testground/benchmark/benchmark/transaction.py | 48 +++++++++++++++---- testground/benchmark/benchmark/utils.py | 8 ++++ 3 files changed, 49 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 224e469c4f..a28118470e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,8 @@ ### Improvements -* (testground)[#1644 ](https://github.com/crypto-org-chain/cronos/pull/1644) load generator retry with backoff on error. +* [#1645](https://github.com/crypto-org-chain/cronos/pull/1645) Gen test tx in parallel even in single node. +* (testground)[#1644](https://github.com/crypto-org-chain/cronos/pull/1644) load generator retry with backoff on error. *Oct 14, 2024* diff --git a/testground/benchmark/benchmark/transaction.py b/testground/benchmark/benchmark/transaction.py index a0ff05b00f..c54d944e92 100644 --- a/testground/benchmark/benchmark/transaction.py +++ b/testground/benchmark/benchmark/transaction.py @@ -1,4 +1,8 @@ import asyncio +import itertools +import multiprocessing +import os +from collections import namedtuple from pathlib import Path import aiohttp @@ -7,7 +11,7 @@ import ujson from .erc20 import CONTRACT_ADDRESS -from .utils import gen_account +from .utils import gen_account, split GAS_PRICE = 1000000000 CHAIN_ID = 777 @@ -48,17 +52,43 @@ def erc20_transfer_tx(nonce: int): } +Job = namedtuple( + "Job", ["chunk", "global_seq", "num_accounts", "num_txs", "tx_type", "create_tx"] +) + + +def _do_job(job: Job): + accounts = [gen_account(job.global_seq, i + 1) for i in range(*job.chunk)] + acct_txs = [] + total = 0 + for acct in accounts: + txs = [] + for i in range(job.num_txs): + txs.append(acct.sign_transaction(job.create_tx(i)).rawTransaction.hex()) + total += 1 + if total % 1000 == 0: + print("generated", total, "txs for node", job.global_seq) + acct_txs.append(txs) + return acct_txs + + def gen(global_seq, num_accounts, num_txs, tx_type: str) -> [str]: - accounts = [gen_account(global_seq, i + 1) for i in range(num_accounts)] - txs = [] + chunks = split(num_accounts, os.cpu_count()) create_tx = TX_TYPES[tx_type] - for i in range(num_txs): - for acct in accounts: - txs.append(acct.sign_transaction(create_tx(i)).rawTransaction.hex()) - if len(txs) % 1000 == 0: - print("generated", len(txs), "txs for node", global_seq) + jobs = [ + Job(chunk, global_seq, num_accounts, num_txs, tx_type, create_tx) + for chunk in chunks + ] + + with multiprocessing.Pool() as pool: + acct_txs = pool.map(_do_job, jobs) + + # mix the account txs together, ordered by nonce. + all_txs = [] + for txs in itertools.zip_longest(*itertools.chain(*acct_txs)): + all_txs += txs - return txs + return all_txs def save(txs: [str], datadir: Path, global_seq: int): diff --git a/testground/benchmark/benchmark/utils.py b/testground/benchmark/benchmark/utils.py index 42a6947176..421c7d1ccd 100644 --- a/testground/benchmark/benchmark/utils.py +++ b/testground/benchmark/benchmark/utils.py @@ -171,3 +171,11 @@ def block(height): def block_txs(height): return block(height)["result"]["block"]["data"]["txs"] + + +def split(a: int, n: int): + """ + Split range(0, a) into n parts + """ + k, m = divmod(a, n) + return [(i * k + min(i, m), (i + 1) * k + min(i + 1, m)) for i in range(n)]