Skip to content

Commit e299af0

Browse files
esantorellafacebook-github-bot
authored andcommitted
Stop allowing some arguments to be ignored in acqf input constructors (#2356)
Summary: Pull Request resolved: #2356 Stop silently ignoring arguments that can't be safely ignored so that an exception will be naturally raised instead. Carve-outs had to be added for `AnalyticExpectedUtilityOfBestOption` because it is often used in a BOPE loop where the same arguments are passed in the preference learning and experiment candidate-generation stages. Reviewed By: saitcakmak Differential Revision: D57909958
1 parent da0e4d6 commit e299af0

File tree

3 files changed

+16
-16
lines changed

3 files changed

+16
-16
lines changed

botorch/acquisition/input_constructors.py

Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,7 @@
8181
from botorch.acquisition.objective import (
8282
ConstrainedMCObjective,
8383
IdentityMCObjective,
84+
LearnedObjective,
8485
MCAcquisitionObjective,
8586
PosteriorTransform,
8687
)
@@ -98,7 +99,7 @@
9899
)
99100
from botorch.exceptions.errors import UnsupportedError
100101
from botorch.models.cost import AffineFidelityCostModel
101-
from botorch.models.deterministic import DeterministicModel, FixedSingleSampleModel
102+
from botorch.models.deterministic import FixedSingleSampleModel
102103
from botorch.models.gpytorch import GPyTorchModel
103104
from botorch.models.model import Model
104105
from botorch.optim.optimize import optimize_acqf
@@ -214,14 +215,15 @@ def allow_only_specific_variable_kwargs(f: Callable[..., T]) -> Callable[..., T]
214215
in the signature of `f`. Any other keyword arguments will raise an error.
215216
"""
216217
allowed = {
218+
# `training_data` and/or `X_baseline` are needed to compute baselines
219+
# for some EI-type acquisition functions.
217220
"training_data",
218-
"objective",
219-
"posterior_transform",
220221
"X_baseline",
221-
"X_pending",
222+
# Objective thresholds are needed for defining hypervolumes in
223+
# multi-objective optimization.
222224
"objective_thresholds",
223-
"constraints",
224-
"target_fidelities",
225+
# Used in input constructors for some lookahead acquisition functions
226+
# such as qKnowledgeGradient.
225227
"bounds",
226228
}
227229

@@ -860,7 +862,6 @@ def construct_inputs_EHVI(
860862
model: Model,
861863
training_data: MaybeDict[SupervisedDataset],
862864
objective_thresholds: Tensor,
863-
objective: Optional[MCMultiOutputObjective] = None,
864865
posterior_transform: Optional[PosteriorTransform] = None,
865866
constraints: Optional[List[Callable[[Tensor], Tensor]]] = None,
866867
alpha: Optional[float] = None,
@@ -1327,12 +1328,7 @@ def construct_inputs_qMFMES(
13271328
training_data: MaybeDict[SupervisedDataset],
13281329
bounds: List[Tuple[float, float]],
13291330
target_fidelities: Dict[int, Union[int, float]],
1330-
objective: Optional[MCAcquisitionObjective] = None,
1331-
posterior_transform: Optional[PosteriorTransform] = None,
13321331
num_fantasies: int = 64,
1333-
X_baseline: Optional[Tensor] = None,
1334-
X_pending: Optional[Tensor] = None,
1335-
objective_thresholds: Optional[Tensor] = None,
13361332
fidelity_weights: Optional[Dict[int, float]] = None,
13371333
cost_intercept: float = 1.0,
13381334
num_trace_observations: int = 0,
@@ -1364,6 +1360,8 @@ def construct_inputs_analytic_eubo(
13641360
pref_model: Optional[Model] = None,
13651361
previous_winner: Optional[Tensor] = None,
13661362
sample_multiplier: Optional[float] = 1.0,
1363+
objective: Optional[LearnedObjective] = None,
1364+
posterior_transform: Optional[PosteriorTransform] = None,
13671365
) -> Dict[str, Any]:
13681366
r"""Construct kwargs for the `AnalyticExpectedUtilityOfBestOption` constructor.
13691367
@@ -1384,6 +1382,11 @@ def construct_inputs_analytic_eubo(
13841382
BOPE; if None, we are doing PBO and model is the preference model.
13851383
previous_winner: The previous winner of the best option.
13861384
sample_multiplier: The scale factor for the single-sample model.
1385+
objective: Ignored. This argument is allowed to be passed then ignored
1386+
because of the way that EUBO is typically used in a BOPE loop.
1387+
posterior_transform: Ignored. This argument is allowed to be passed then
1388+
ignored because of the way that EUBO is typically used in a BOPE
1389+
loop.
13871390
13881391
Returns:
13891392
A dict mapping kwarg names of the constructor to values.
@@ -1414,7 +1417,6 @@ def construct_inputs_analytic_eubo(
14141417
def construct_inputs_qeubo(
14151418
model: Model,
14161419
pref_model: Optional[Model] = None,
1417-
outcome_model: Optional[DeterministicModel] = None,
14181420
sample_multiplier: Optional[float] = 1.0,
14191421
sampler: Optional[MCSampler] = None,
14201422
objective: Optional[MCAcquisitionObjective] = None,

test/acquisition/test_input_constructors.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1312,7 +1312,6 @@ def test_construct_inputs_mfmes(self) -> None:
13121312
constructor_args = {
13131313
"model": None,
13141314
"training_data": self.blockX_blockY,
1315-
"objective": None,
13161315
"bounds": self.bounds,
13171316
"candidate_size": 17,
13181317
"target_fidelities": target_fidelities,
@@ -1340,7 +1339,6 @@ def test_construct_inputs_jes(self) -> None:
13401339
kwargs = func(
13411340
model=model,
13421341
training_data=self.blockX_blockY,
1343-
objective=LinearMCObjective(torch.rand(2)),
13441342
bounds=self.bounds,
13451343
num_optima=17,
13461344
maximize=False,

tutorials/custom_acquisition.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -509,7 +509,7 @@
509509
" model: Model,\n",
510510
" beta: float,\n",
511511
" weights: List[float],\n",
512-
" **kwargs: Any,\n",
512+
" posterior_transform: None,\n",
513513
") -> Dict[str, Any]:\n",
514514
" return {\n",
515515
" \"model\": model,\n",

0 commit comments

Comments
 (0)