Skip to content

Commit 003f13c

Browse files
Ethan Chefacebook-github-bot
authored andcommitted
Add input constructor for LogCEI (#2973)
Summary: Pull Request resolved: #2973 Similar to LogPOF, the input constructor converts a constraints_tuple (A,b) to a dictionary of input constraints. Also infers best_f from a training set, similar to LogEI. The input constructor does not throw an error if objective_index lies in the constraints, as this is already handled by the LogCEI constructor (this is included as a test case). Reviewed By: SebastianAment Differential Revision: D80183196 fbshipit-source-id: dc6490fdc23da340765efdd13c40f792d4ef90af
1 parent 91327b4 commit 003f13c

File tree

2 files changed

+160
-19
lines changed

2 files changed

+160
-19
lines changed

botorch/acquisition/input_constructors.py

Lines changed: 82 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
from botorch.acquisition.active_learning import qNegIntegratedPosteriorVariance
2121
from botorch.acquisition.analytic import (
2222
ExpectedImprovement,
23+
LogConstrainedExpectedImprovement,
2324
LogExpectedImprovement,
2425
LogNoisyExpectedImprovement,
2526
LogProbabilityOfFeasibility,
@@ -361,28 +362,63 @@ def construct_inputs_pof(
361362
Returns:
362363
A dict mapping kwarg names of the constructor to values.
363364
"""
364-
# Construct a dictionary of the form `{i: [lower, upper]}`,
365-
# where `i` is the output index, and `lower` and `upper` are
366-
# lower and upper bounds on that output (resp. interpreted
367-
# as -Inf / Inf if None).
368-
weights, bounds = constraints_tuple
369-
constraints_dict = {}
370-
for w, b in zip(weights, bounds):
371-
nonzero_w = w.nonzero()
372-
if nonzero_w.numel() != 1:
373-
raise BotorchError(
374-
"LogProbabilityOfFeasibility only support constraints on single"
375-
" outcomes."
376-
)
377-
i = nonzero_w.item()
378-
w_i = w[i]
379-
is_ub = torch.sign(w_i) == 1.0
380-
b = b.item()
381-
bounds = (None, b / w_i) if is_ub else (b / w_i, None)
382-
constraints_dict[i] = bounds
365+
# Construct a constraint dictionary from constraint_tuple
366+
constraints_dict = _construct_constraint_dict_from_tuple(
367+
constraints_tuple, LogProbabilityOfFeasibility
368+
)
369+
383370
return {"model": model, "constraints": constraints_dict}
384371

385372

373+
@acqf_input_constructor(LogConstrainedExpectedImprovement)
374+
def construct_inputs_logcei(
375+
model: Model,
376+
training_data: MaybeDict[SupervisedDataset],
377+
objective_index: int,
378+
constraints_tuple: tuple[Tensor, Tensor],
379+
best_f: float | Tensor | None = None,
380+
maximize: bool = True,
381+
) -> dict[str, Any]:
382+
r"""Construct kwargs for the log constrained expected improvement
383+
acquisition function.
384+
385+
Args:
386+
model: The model to be used in the acquisition function.
387+
training_data: Dataset(s) used to train the model.
388+
Used to determine default value for `best_f`.
389+
objective_index: The index of the objective.
390+
constraints_tuple: A tuple of `(A, b)`. For `k` outcome constraints
391+
and `m` outputs at `f(x)``, `A` is `k x m` and `b` is `k x 1` such
392+
that `A f(x) <= b`.
393+
best_f: Either a scalar or a `b`-dim Tensor (batch mode) representing
394+
the best feasible function value observed so far (assumed noiseless).
395+
maximize: If True, consider the problem a maximization problem.
396+
397+
Returns:
398+
A dict mapping kwarg names of the constructor to values.
399+
"""
400+
401+
# If no best_f provided, compute it from the training data
402+
# For LogCEI, posterior_transform is not used.
403+
if best_f is None:
404+
best_f = get_best_f_analytic(
405+
training_data=training_data,
406+
)
407+
408+
# Construct a constraint dictionary from constraint_tuple
409+
constraints_dict = _construct_constraint_dict_from_tuple(
410+
constraints_tuple, LogConstrainedExpectedImprovement
411+
)
412+
413+
return {
414+
"model": model,
415+
"best_f": best_f,
416+
"objective_index": objective_index,
417+
"constraints": constraints_dict,
418+
"maximize": maximize,
419+
}
420+
421+
386422
@acqf_input_constructor(UpperConfidenceBound)
387423
def construct_inputs_ucb(
388424
model: Model,
@@ -1984,3 +2020,30 @@ def _get_ref_point(
19842020
ref_point = objective(objective_thresholds)
19852021

19862022
return ref_point
2023+
2024+
2025+
def _construct_constraint_dict_from_tuple(
2026+
constraints_tuple: tuple, acqf_class: type[AcquisitionFunction]
2027+
) -> dict[str, Any]:
2028+
"""
2029+
Construct a dictionary of the form `{i: [lower, upper]}`,
2030+
where `i` is the output index, and `lower` and `upper` are
2031+
lower and upper bounds on that output (resp. interpreted
2032+
as -Inf / Inf if None).
2033+
"""
2034+
weights, bounds = constraints_tuple
2035+
constraints_dict = {}
2036+
for w, b in zip(weights, bounds):
2037+
nonzero_w = w.nonzero()
2038+
if nonzero_w.numel() != 1:
2039+
raise BotorchError(
2040+
f"{acqf_class.__name__} only support constraints on single outcomes."
2041+
)
2042+
i = nonzero_w.item()
2043+
w_i = w[i]
2044+
is_ub = torch.sign(w_i) == 1.0
2045+
b = b.item()
2046+
bounds = (None, b / w_i) if is_ub else (b / w_i, None)
2047+
constraints_dict[i] = bounds
2048+
2049+
return constraints_dict

test/acquisition/test_input_constructors.py

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
from botorch.acquisition.active_learning import qNegIntegratedPosteriorVariance
2525
from botorch.acquisition.analytic import (
2626
ExpectedImprovement,
27+
LogConstrainedExpectedImprovement,
2728
LogExpectedImprovement,
2829
LogNoisyExpectedImprovement,
2930
LogProbabilityOfFeasibility,
@@ -475,6 +476,72 @@ def test_construct_inputs_noisy_ei(self) -> None:
475476
with self.assertRaisesRegex(ValueError, "Field `X` must be shared"):
476477
c(model=mock_model, training_data=self.multiX_multiY)
477478

479+
def test_construct_inputs_LogCEI(self) -> None:
480+
c = get_acqf_input_constructor(LogConstrainedExpectedImprovement)
481+
mock_model = self.mock_model
482+
constraints_tuple = [torch.tensor([[0.0, 1.0]]), torch.tensor([[2.0]])]
483+
constraints = {1: (None, 2.0)}
484+
best_f_expected = self.blockX_blockY[0].Y.squeeze().max()
485+
objective_index = 0
486+
# test that best_f is inferred from training data
487+
# test constraint tuple
488+
kwargs = c(
489+
model=mock_model,
490+
objective_index=objective_index,
491+
training_data=self.blockX_blockY,
492+
constraints_tuple=constraints_tuple,
493+
maximize=False,
494+
)
495+
self.assertEqual(
496+
set(kwargs.keys()),
497+
{"model", "best_f", "objective_index", "constraints", "maximize"},
498+
)
499+
self.assertIs(kwargs["model"], mock_model)
500+
self.assertEqual(kwargs["objective_index"], objective_index)
501+
self.assertEqual(kwargs["constraints"], constraints)
502+
self.assertEqual(kwargs["best_f"], best_f_expected)
503+
self.assertFalse(kwargs["maximize"])
504+
# test that best_f overrides default from training data
505+
# test that negative constraints work
506+
constraints_tuple = [torch.tensor([[0.0, -1.0]]), torch.tensor([[-2.0]])]
507+
constraints = {1: (2.0, None)}
508+
kwargs = c(
509+
model=mock_model,
510+
objective_index=objective_index,
511+
training_data=self.blockX_blockY,
512+
best_f=0.1,
513+
constraints_tuple=constraints_tuple,
514+
)
515+
self.assertIs(kwargs["model"], mock_model)
516+
self.assertEqual(kwargs["objective_index"], objective_index)
517+
self.assertEqual(kwargs["constraints"], constraints)
518+
self.assertEqual(kwargs["best_f"], 0.1)
519+
self.assertTrue(kwargs["maximize"])
520+
# test that constraints on multiple outcomes raises an exception
521+
with self.assertRaisesRegex(
522+
BotorchError,
523+
"LogConstrainedExpectedImprovement only support constraints on single"
524+
" outcomes.",
525+
):
526+
c(
527+
model=mock_model,
528+
objective_index=objective_index,
529+
training_data=self.blockX_blockY,
530+
constraints_tuple=[torch.tensor([[1.0, 1.0]]), torch.tensor([[2.0]])],
531+
)
532+
# test that if objective_index coincides with constraints raises a value error
533+
with self.assertRaisesRegex(
534+
ValueError,
535+
"Output corresponding to objective should not be a constraint.",
536+
):
537+
kwargs = c(
538+
model=mock_model,
539+
objective_index=1,
540+
training_data=self.blockX_blockY,
541+
constraints_tuple=[torch.tensor([[0.0, -1.0]]), torch.tensor([[-2.0]])],
542+
)
543+
LogConstrainedExpectedImprovement(**kwargs)
544+
478545
def test_construct_inputs_eubo(self) -> None:
479546
"""test input constructor for analytical EUBO and MC qEUBO"""
480547

@@ -1845,6 +1912,17 @@ def setUp(self, suppress_input_warnings: bool = True) -> None:
18451912
},
18461913
)
18471914

1915+
self.cases["LogCEI"] = (
1916+
[LogConstrainedExpectedImprovement],
1917+
{
1918+
"model": st_soo_model,
1919+
"objective_index": 0,
1920+
"training_data": self.blockX_blockY,
1921+
"constraints": {0: [-5, 5]},
1922+
**constraints_tuple_dict,
1923+
},
1924+
)
1925+
18481926
def constraint(X: Tensor) -> Tensor:
18491927
return X[..., 0].abs() - 5
18501928

0 commit comments

Comments
 (0)