21
21
from bayes_opt import acquisition
22
22
from bayes_opt .constraint import ConstraintModel
23
23
from bayes_opt .domain_reduction import DomainTransformer
24
- from bayes_opt .event import DEFAULT_EVENTS , Events
25
- from bayes_opt .logger import _get_default_logger
24
+ from bayes_opt .logger import ScreenLogger
26
25
from bayes_opt .parameter import wrap_kernel
27
26
from bayes_opt .target_space import TargetSpace
28
27
from bayes_opt .util import ensure_rng
29
28
30
29
if TYPE_CHECKING :
31
- from collections .abc import Callable , Iterable , Mapping
30
+ from collections .abc import Callable , Mapping
32
31
33
32
from numpy .random import RandomState
34
33
from numpy .typing import NDArray
41
40
Float = np .floating [Any ]
42
41
43
42
44
- class Observable :
45
- """Inspired by https://www.protechtraining.com/blog/post/879#simple-observer."""
46
-
47
- def __init__ (self , events : Iterable [Any ]) -> None :
48
- # maps event names to subscribers
49
- # str -> dict
50
- self ._events = {event : dict () for event in events }
51
-
52
- def get_subscribers (self , event : Any ) -> Any :
53
- """Return the subscribers of an event."""
54
- return self ._events [event ]
55
-
56
- def subscribe (self , event : Any , subscriber : Any , callback : Callable [..., Any ] | None = None ) -> None :
57
- """Add subscriber to an event."""
58
- if callback is None :
59
- callback = subscriber .update
60
- self .get_subscribers (event )[subscriber ] = callback
61
-
62
- def unsubscribe (self , event : Any , subscriber : Any ) -> None :
63
- """Remove a subscriber for a particular event."""
64
- del self .get_subscribers (event )[subscriber ]
65
-
66
- def dispatch (self , event : Any ) -> None :
67
- """Trigger callbacks for subscribers of an event."""
68
- for callback in self .get_subscribers (event ).values ():
69
- callback (event , self )
70
-
71
-
72
- class BayesianOptimization (Observable ):
43
+ class BayesianOptimization :
73
44
"""Handle optimization of a target function over a specific target space.
74
45
75
46
This class takes the function to optimize as well as the parameters bounds
@@ -173,7 +144,9 @@ def __init__(
173
144
self ._bounds_transformer .initialize (self ._space )
174
145
175
146
self ._sorting_warning_already_shown = False # TODO: remove in future version
176
- super ().__init__ (events = DEFAULT_EVENTS )
147
+
148
+ # Initialize logger
149
+ self .logger = ScreenLogger (verbose = self ._verbose , is_constrained = self .is_constrained )
177
150
178
151
@property
179
152
def space (self ) -> TargetSpace :
@@ -236,7 +209,9 @@ def register(
236
209
warn (msg , stacklevel = 1 )
237
210
self ._sorting_warning_already_shown = True
238
211
self ._space .register (params , target , constraint_value )
239
- self .dispatch (Events .OPTIMIZATION_STEP )
212
+ self .logger .log_optimization_step (
213
+ self ._space .keys , self ._space .res ()[- 1 ], self ._space .params_config , self .max
214
+ )
240
215
241
216
def probe (self , params : ParamsType , lazy : bool = True ) -> None :
242
217
"""Evaluate the function at the given points.
@@ -268,7 +243,9 @@ def probe(self, params: ParamsType, lazy: bool = True) -> None:
268
243
self ._queue .append (params )
269
244
else :
270
245
self ._space .probe (params )
271
- self .dispatch (Events .OPTIMIZATION_STEP )
246
+ self .logger .log_optimization_step (
247
+ self ._space .keys , self ._space .res ()[- 1 ], self ._space .params_config , self .max
248
+ )
272
249
273
250
def suggest (self ) -> dict [str , float | NDArray [Float ]]:
274
251
"""Suggest a promising point to probe next."""
@@ -295,13 +272,6 @@ def _prime_queue(self, init_points: int) -> None:
295
272
sample = self ._space .random_sample (random_state = self ._random_state )
296
273
self ._queue .append (self ._space .array_to_params (sample ))
297
274
298
- def _prime_subscriptions (self ) -> None :
299
- if not any ([len (subs ) for subs in self ._events .values ()]):
300
- _logger = _get_default_logger (self ._verbose , self .is_constrained )
301
- self .subscribe (Events .OPTIMIZATION_START , _logger )
302
- self .subscribe (Events .OPTIMIZATION_STEP , _logger )
303
- self .subscribe (Events .OPTIMIZATION_END , _logger )
304
-
305
275
def maximize (self , init_points : int = 5 , n_iter : int = 25 ) -> None :
306
276
r"""
307
277
Maximize the given function over the target space.
@@ -324,8 +294,10 @@ def maximize(self, init_points: int = 5, n_iter: int = 25) -> None:
324
294
optimization routine, make sure to fit it manually, e.g. by calling
325
295
``optimizer._gp.fit(optimizer.space.params, optimizer.space.target)``.
326
296
"""
327
- self ._prime_subscriptions ()
328
- self .dispatch (Events .OPTIMIZATION_START )
297
+ # Log optimization start
298
+ self .logger .log_optimization_start (self ._space .keys )
299
+
300
+ # Prime the queue with random points
329
301
self ._prime_queue (init_points )
330
302
331
303
iteration = 0
@@ -342,7 +314,8 @@ def maximize(self, init_points: int = 5, n_iter: int = 25) -> None:
342
314
# the init_points points (only for the true iterations)
343
315
self .set_bounds (self ._bounds_transformer .transform (self ._space ))
344
316
345
- self .dispatch (Events .OPTIMIZATION_END )
317
+ # Log optimization end
318
+ self .logger .log_optimization_end ()
346
319
347
320
def set_bounds (self , new_bounds : BoundsMapping ) -> None :
348
321
"""Modify the bounds of the search space.
0 commit comments