Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

handle constraints in qSimpleRegret #2141

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions botorch/acquisition/input_constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@
)
from botorch.acquisition.multi_objective.utils import get_default_partitioning_alpha
from botorch.acquisition.objective import (
ConstrainedMCObjective,
IdentityMCObjective,
MCAcquisitionObjective,
PosteriorTransform,
Expand All @@ -90,6 +91,7 @@
from botorch.acquisition.utils import (
compute_best_feasible_objective,
expand_trace_observations,
get_infeasible_cost,
get_optimal_samples,
project_to_target_fidelity,
)
Expand Down Expand Up @@ -433,6 +435,8 @@ def construct_inputs_qSimpleRegret(
posterior_transform: Optional[PosteriorTransform] = None,
X_pending: Optional[Tensor] = None,
sampler: Optional[MCSampler] = None,
constraints: Optional[List[Callable[[Tensor], Tensor]]] = None,
X_baseline: Optional[Tensor] = None,
) -> Dict[str, Any]:
r"""Construct kwargs for qSimpleRegret.

Expand All @@ -446,10 +450,28 @@ def construct_inputs_qSimpleRegret(
but have not yet been evaluated.
sampler: The sampler used to draw base samples. If omitted, uses
the acquisition functions's default sampler.
constraints: A list of constraint callables which map a Tensor of posterior
samples of dimension `sample_shape x batch-shape x q x m`-dim to a
`sample_shape x batch-shape x q`-dim Tensor. The associated constraints
are considered satisfied if the output is less than zero.
X_baseline: A `batch_shape x r x d`-dim Tensor of `r` design points
that have already been observed. These points are considered as
the potential best design point. If omitted, checks that all
training_data have the same input features and take the first `X`.

Returns:
A dict mapping kwarg names of the constructor to values.
"""
if constraints is not None:
if X_baseline is None:
raise ValueError("Constraints require an X_baseline.")
objective = ConstrainedMCObjective(
objective=objective,
constraints=constraints,
infeasible_cost=get_infeasible_cost(
X=X_baseline, model=model, objective=objective
),
)
return {
"model": model,
"objective": objective,
Expand Down
52 changes: 52 additions & 0 deletions botorch/acquisition/monte_carlo.py
Original file line number Diff line number Diff line change
Expand Up @@ -729,13 +729,54 @@ class qSimpleRegret(SampleReducingMCAcquisitionFunction):

`qSR(X) = E(max Y), Y ~ f(X), X = (x_1,...,x_q)`

Constraints should be provided as a `ConstrainedMCObjective`.
Passing `constraints` as an argument is not supported. This is because
`SampleReducingMCAcquisitionFunction` computes the acquisition values on the sample
level and then weights the sample-level acquisition values by a soft feasibility
indicator. Hence, it expects non-log acquisition function values to be
non-negative. `qSimpleRegret` acquisition values can be negative, so we instead use
a `ConstrainedMCObjective` which applies constraints to the objectives (e.g. before
computing the acquisition function) and shifts negative objective values using
by an infeasible cost to ensure non-negativity (before applying constraints and
shifting them back).

Example:
>>> model = SingleTaskGP(train_X, train_Y)
>>> sampler = SobolQMCNormalSampler(1024)
>>> qSR = qSimpleRegret(model, sampler)
>>> qsr = qSR(test_X)
"""

def __init__(
self,
model: Model,
sampler: Optional[MCSampler] = None,
objective: Optional[MCAcquisitionObjective] = None,
posterior_transform: Optional[PosteriorTransform] = None,
X_pending: Optional[Tensor] = None,
) -> None:
r"""q-Simple Regret.

Args:
model: A fitted model.
sampler: The sampler used to draw base samples. See `MCAcquisitionFunction`
more details.
objective: The MCAcquisitionObjective under which the samples are
evaluated. Defaults to `IdentityMCObjective()`.
posterior_transform: A PosteriorTransform (optional).
X_pending: A `m x d`-dim Tensor of `m` design points that have
points that have been submitted for function evaluation
but have not yet been evaluated. Concatenated into X upon
forward call. Copied and set to have no gradient.
"""
super().__init__(
model=model,
sampler=sampler,
objective=objective,
posterior_transform=posterior_transform,
X_pending=X_pending,
)

def _sample_forward(self, obj: Tensor) -> Tensor:
r"""Evaluate qSimpleRegret per sample on the candidate set `X`.

Expand All @@ -757,6 +798,17 @@ class qUpperConfidenceBound(SampleReducingMCAcquisitionFunction):
`qUCB = E(max(mu + |Y_tilde - mu|))`, where `Y_tilde ~ N(mu, beta pi/2 Sigma)`
and `f(X)` has distribution `N(mu, Sigma)`.

Constraints should be provided as a `ConstrainedMCObjective`.
Passing `constraints` as an argument is not supported. This is because
`SampleReducingMCAcquisitionFunction` computes the acquisition values on the sample
level and then weights the sample-level acquisition values by a soft feasibility
indicator. Hence, it expects non-log acquisition function values to be
non-negative. `qSimpleRegret` acquisition values can be negative, so we instead use
a `ConstrainedMCObjective` which applies constraints to the objectives (e.g. before
computing the acquisition function) and shifts negative objective values using
by an infeasible cost to ensure non-negativity (before applying constraints and
shifting them back).

Example:
>>> model = SingleTaskGP(train_X, train_Y)
>>> sampler = SobolQMCNormalSampler(1024)
Expand Down
30 changes: 30 additions & 0 deletions test/acquisition/test_input_constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@
)
from botorch.acquisition.multi_objective.utils import get_default_partitioning_alpha
from botorch.acquisition.objective import (
ConstrainedMCObjective,
LinearMCObjective,
ScalarizedPosteriorTransform,
)
Expand Down Expand Up @@ -473,6 +474,35 @@ def test_construct_inputs_mc_base(self) -> None:
self.assertIsNone(kwargs["sampler"])
acqf = qSimpleRegret(**kwargs)
self.assertIs(acqf.model, mock_model)
# test constraints
constraints = [lambda Y: Y[..., 0]]
with self.assertRaisesRegex(ValueError, "Constraints require an X_baseline."):
c(
model=mock_model,
training_data=self.blockX_blockY,
objective=objective,
X_pending=X_pending,
constraints=constraints,
)
with mock.patch(
"botorch.acquisition.input_constructors.get_infeasible_cost",
return_value=2.0,
):
kwargs = c(
model=mock_model,
training_data=self.blockX_blockY,
objective=objective,
X_pending=X_pending,
constraints=constraints,
X_baseline=X_pending,
)
acqf = qSimpleRegret(**kwargs)
self.assertIsNone(acqf._constraints)
self.assertIsInstance(acqf.objective, ConstrainedMCObjective)
self.assertIs(acqf.objective.objective, objective)
self.assertIs(acqf.objective.constraints, constraints)
self.assertEqual(acqf.objective.infeasible_cost.item(), 2.0)

# TODO: Test passing through of sampler

def test_construct_inputs_qEI(self) -> None:
Expand Down
21 changes: 20 additions & 1 deletion test/acquisition/test_monte_carlo.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,11 @@ def _sample_forward(self, X):
pass


class NegativeReducingMCAcquisitionFunction(SampleReducingMCAcquisitionFunction):
def _sample_forward(self, X):
return torch.full_like(X, -1.0)


def infeasible_con(samples: Tensor) -> Tensor:
return torch.ones_like(samples[..., 0])

Expand Down Expand Up @@ -806,6 +811,18 @@ def test_q_simple_regret_batch(self):
acqf(X)
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))

def test_q_simple_regret_constraints(self):
# basic test that passing constraints directly is not allowed
samples = torch.zeros(2, 2, 1, device=self.device, dtype=torch.double)
samples[0, 0, 0] = 1.0
mm = MockModel(MockPosterior(samples=samples))
regex = (
r"qSimpleRegret\.__init__\(\) got an unexpected keyword argument "
r"'constraints'"
)
with self.assertRaisesRegex(TypeError, regex):
qSimpleRegret(model=mm, constraints=[lambda Y: Y[..., 0]])

# TODO: Test different objectives (incl. constraints)


Expand Down Expand Up @@ -988,7 +1005,9 @@ def test_mc_acquisition_function_with_constraints(self):
# regret because the acquisition utility is negative.
samples = -torch.rand(n, q, m, device=self.device, dtype=dtype)
mm = MockModel(MockPosterior(samples=samples))
cacqf = qSimpleRegret(model=mm, constraints=[feasible_con])
cacqf = NegativeReducingMCAcquisitionFunction(
model=mm, constraints=[feasible_con]
)
with self.assertRaisesRegex(
ValueError,
"Constraint-weighting requires unconstrained "
Expand Down