Skip to content

Commit cd36e52

Browse files
blethamfacebook-github-bot
authored andcommitted
LogEI: select cache_root based on model support (#2820)
Summary: Pull Request resolved: #2820 This diff has the input constructor for LogEI check if the model can support cache_root rather than defaulting it to True. Currently anytime you use LogEI with a multi-task model it spews a log warning that cache_root is being disabled. This removes that log message by changing the default to be smarter. Reviewed By: SebastianAment Differential Revision: D72667330 fbshipit-source-id: 304a1c673e31c79e5cefdc0449e2ada103350895
1 parent 3c87451 commit cd36e52

File tree

1 file changed

+4
-1
lines changed

1 file changed

+4
-1
lines changed

botorch/acquisition/input_constructors.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131
from botorch.acquisition.bayesian_active_learning import (
3232
qBayesianActiveLearningByDisagreement,
3333
)
34+
from botorch.acquisition.cached_cholesky import supports_cache_root
3435
from botorch.acquisition.cost_aware import InverseCostWeightedUtility
3536
from botorch.acquisition.fixed_feature import FixedFeatureAcquisitionFunction
3637
from botorch.acquisition.joint_entropy_search import qJointEntropySearch
@@ -644,7 +645,7 @@ def construct_inputs_qLogNEI(
644645
sampler: MCSampler | None = None,
645646
X_baseline: Tensor | None = None,
646647
prune_baseline: bool | None = True,
647-
cache_root: bool | None = True,
648+
cache_root: bool | None = None,
648649
constraints: list[Callable[[Tensor], Tensor]] | None = None,
649650
eta: Tensor | float = 1e-3,
650651
fat: bool = True,
@@ -692,6 +693,8 @@ def construct_inputs_qLogNEI(
692693
Returns:
693694
A dict mapping kwarg names of the constructor to values.
694695
"""
696+
if cache_root is None:
697+
cache_root = supports_cache_root(model)
695698
return {
696699
**construct_inputs_qNEI(
697700
model=model,

0 commit comments

Comments
 (0)