diff --git a/bayes_opt/bayesian_optimization.py b/bayes_opt/bayesian_optimization.py index 35741693..24397b50 100644 --- a/bayes_opt/bayesian_optimization.py +++ b/bayes_opt/bayesian_optimization.py @@ -12,7 +12,6 @@ from os import PathLike from pathlib import Path from typing import TYPE_CHECKING, Any -from warnings import warn import numpy as np from scipy.optimize import NonlinearConstraint @@ -138,8 +137,6 @@ def __init__( raise TypeError(msg) self._bounds_transformer.initialize(self._space) - self._sorting_warning_already_shown = False # TODO: remove in future version - # Initialize logger self.logger = ScreenLogger(verbose=self._verbose, is_constrained=self.is_constrained) @@ -278,17 +275,6 @@ def register( constraint_value: float or None Value of the constraint function at the observation, if any. """ - # TODO: remove in future version - if isinstance(params, np.ndarray) and not self._sorting_warning_already_shown: - msg = ( - "You're attempting to register an np.ndarray. In previous versions, the optimizer internally" - " sorted parameters by key and expected any registered array to respect this order." - " In the current and any future version the order as given by the pbounds dictionary will be" - " used. If you wish to retain sorted parameters, please manually sort your pbounds" - " dictionary before constructing the optimizer." - ) - warn(msg, stacklevel=1) - self._sorting_warning_already_shown = True self._space.register(params, target, constraint_value) self.logger.log_optimization_step( self._space.keys, self._space.res()[-1], self._space.params_config, self.max @@ -308,18 +294,6 @@ def probe(self, params: ParamsType, lazy: bool = True) -> None: If True, the optimizer will evaluate the points when calling maximize(). Otherwise it will evaluate it at the moment. """ - # TODO: remove in future version - if isinstance(params, np.ndarray) and not self._sorting_warning_already_shown: - msg = ( - "You're attempting to register an np.ndarray. In previous versions, the optimizer internally" - " sorted parameters by key and expected any registered array to respect this order." - " In the current and any future version the order as given by the pbounds dictionary will be" - " used. If you wish to retain sorted parameters, please manually sort your pbounds" - " dictionary before constructing the optimizer." - ) - warn(msg, stacklevel=1) - self._sorting_warning_already_shown = True - params = self._space.array_to_params(params) if lazy: self._queue.append(params) else: diff --git a/tests/test_bayesian_optimization.py b/tests/test_bayesian_optimization.py index 3bc3f2b5..7fb9f4de 100644 --- a/tests/test_bayesian_optimization.py +++ b/tests/test_bayesian_optimization.py @@ -1,6 +1,7 @@ from __future__ import annotations import pickle +import warnings from pathlib import Path import numpy as np @@ -97,6 +98,28 @@ def test_register(): optimizer.register(params={"p1": 5, "p2": 4}, target=9) +def test_register_array_uses_pbounds_order_without_warning(): + optimizer = BayesianOptimization(target_func, {"p1": (0, 10), "p2": (0, 10)}, random_state=1) + + with warnings.catch_warnings(record=True) as caught: + warnings.simplefilter("always") + optimizer.register(params=np.array([1, 2]), target=3) + + assert caught == [] + assert optimizer.space.array_to_params(optimizer.space.params[0]) == {"p1": 1.0, "p2": 2.0} + + +def test_probe_array_uses_pbounds_order_without_warning(): + optimizer = BayesianOptimization(target_func, {"p1": (0, 10), "p2": (0, 10)}, random_state=1) + + with warnings.catch_warnings(record=True) as caught: + warnings.simplefilter("always") + optimizer.probe(params=np.array([1, 2]), lazy=False) + + assert caught == [] + assert optimizer.space.array_to_params(optimizer.space.params[0]) == {"p1": 1.0, "p2": 2.0} + + def test_probe_lazy(): optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)