Skip to content

Commit

Permalink
Make paralleize lazy
Browse files Browse the repository at this point in the history
  • Loading branch information
felixhekhorn committed Nov 5, 2024
1 parent faa7184 commit d535d73
Showing 1 changed file with 22 additions and 16 deletions.
38 changes: 22 additions & 16 deletions paper/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import pathlib
import time
from dataclasses import dataclass
from multiprocessing import Pool
from typing import Optional

import lhapdf
Expand Down Expand Up @@ -115,7 +114,7 @@ def grid_path(m2: float, nf: int) -> str:


def compute(
ndata: int, m2: float, nl: int, pdf: str, processes: int = -1, quick: bool = False
ndata: int, m2: float, nl: int, pdf: str, processes: int = 1, quick: bool = False
) -> None:
"""Compute grids."""
# determine energy range
Expand All @@ -124,24 +123,31 @@ def compute(
ndata = len(sh_range)
else:
sh_range = np.geomspace(SH_MIN, SH_MAX, ndata)
# parallelize
# parallelize if requested
if processes <= 0:
processes = max(os.cpu_count() + processes, 1)
start = time.perf_counter()
print(f"Computing with m2={m2}, nl={abs(nl)}, pdf={pdf} using {processes} threads")
with Pool(processes) as p:
p.starmap(
compute_subgrid,
zip(
range(ndata),
[ndata] * ndata,
[m2] * ndata,
[nl] * ndata,
sh_range,
[pdf] * ndata,
[quick] * ndata,
),
)
args = zip(
range(ndata),
[ndata] * ndata,
[m2] * ndata,
[nl] * ndata,
sh_range,
[pdf] * ndata,
[quick] * ndata,
)
if processes > 1:
from multiprocessing import Pool # pylint: disable=import-outside-toplevel

with Pool(processes) as p:
p.starmap(
compute_subgrid,
args,
)
else:
for arg in args:
compute_subgrid(*arg)
delta = time.perf_counter() - start
print("---")
print(f"computed {ndata} grids in {delta/60:.2f} min")
Expand Down

0 comments on commit d535d73

Please sign in to comment.