Skip to content

Commit 2c1308a

Browse files
authored
Merge pull request #5 from fmfn/master
update from main
2 parents 3e55ac6 + 698ca60 commit 2c1308a

20 files changed

+316
-237
lines changed

.github/workflows/run_tests.yml

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
# This workflow will install Python dependencies, run tests and lint with a single version of Python
22
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
33

4-
54
name: tests
65

76
on:
@@ -27,14 +26,21 @@ jobs:
2726
uses: actions/setup-python@v3
2827
with:
2928
python-version: ${{ matrix.python-version }}
30-
- name: Install dependencies
29+
- name: Install test dependencies
3130
run: |
3231
python -m pip install --upgrade pip
3332
pip install pytest
3433
pip install pytest-cov
3534
pip install coverage
35+
- name: Install notebook dependencies
36+
run: |
37+
pip install nbformat
38+
pip install nbconvert
39+
pip install jupyter
40+
pip install matplotlib
41+
- name: Install package
42+
run: |
3643
pip install -e .
37-
3844
- name: Test with pytest
3945
run: |
4046
pytest --cov-report xml --cov=bayes_opt/

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,7 @@ optimizer.maximize(
253253
)
254254
```
255255

256-
By default the previous data in the json file is removed. If you want to keep working with the same logger, the `reset` paremeter in `JSONLogger` should be set to False.
256+
By default the previous data in the json file is removed. If you want to keep working with the same logger, the `reset` parameter in `JSONLogger` should be set to False.
257257

258258
### 4.2 Loading progress
259259

bayes_opt/bayesian_optimization.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ class BayesianOptimization(Observable):
9595
If True, the optimizer will allow duplicate points to be registered.
9696
This behavior may be desired in high noise situations where repeatedly probing
9797
the same point will give different answers. In other situations, the acquisition
98-
may occasionaly generate a duplicate point.
98+
may occasionally generate a duplicate point.
9999
100100
Methods
101101
-------
@@ -226,7 +226,7 @@ def suggest(self, utility_function):
226226
suggestion = acq_max(ac=utility_function.utility,
227227
gp=self._gp,
228228
constraint=self.constraint,
229-
y_max=self._space.target.max(),
229+
y_max=self._space._target_max(),
230230
bounds=self._space.bounds,
231231
random_state=self._random_state)
232232

@@ -276,7 +276,7 @@ def maximize(self,
276276
An instance of bayes_opt.util.UtilityFunction.
277277
If nothing is passed, a default using ucb is used
278278
279-
All other parameters are unused, and are only available to ensure backwards compatability - these
279+
All other parameters are unused, and are only available to ensure backwards compatibility - these
280280
will be removed in a future release
281281
"""
282282
self._prime_subscriptions()

bayes_opt/domain_reduction.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def transform(self, target_space: TargetSpace):
1919

2020
class SequentialDomainReductionTransformer(DomainTransformer):
2121
"""
22-
A sequential domain reduction transformer bassed on the work by Stander, N. and Craig, K:
22+
A sequential domain reduction transformer based on the work by Stander, N. and Craig, K:
2323
"On the robustness of a simple domain reduction scheme for simulation‐based optimization"
2424
"""
2525

@@ -68,8 +68,8 @@ def initialize(self, target_space: TargetSpace) -> None:
6868

6969
self.r = self.contraction_rate * self.r
7070

71-
# check if the minimum window fits in the orignal bounds
72-
self._window_bounds_compatiblity(self.original_bounds)
71+
# check if the minimum window fits in the original bounds
72+
self._window_bounds_compatibility(self.original_bounds)
7373

7474
def _update(self, target_space: TargetSpace) -> None:
7575

@@ -121,7 +121,7 @@ def _trim(self, new_bounds: np.array, global_bounds: np.array) -> np.array:
121121
new_bounds[i, 1] += ddw_r
122122
return new_bounds
123123

124-
def _window_bounds_compatiblity(self, global_bounds: np.array) -> bool:
124+
def _window_bounds_compatibility(self, global_bounds: np.array) -> bool:
125125
"""Checks if global bounds are compatible with the minimum window sizes."""
126126
for i, entry in enumerate(global_bounds):
127127
global_window_width = abs(entry[1] - entry[0])

bayes_opt/logger.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ def _header(self, instance):
106106
return line + "\n" + ("-" * self._header_length)
107107

108108
def _is_new_max(self, instance):
109-
if instance.max["target"] is None:
109+
if instance.max is None:
110110
# During constrained optimization, there might not be a maximum
111111
# value since the optimizer might've not encountered any points
112112
# that fulfill the constraints.

bayes_opt/observer.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,12 @@ def __init__(self):
2323
def _update_tracker(self, event, instance):
2424
if event == Events.OPTIMIZATION_STEP:
2525
self._iterations += 1
26+
27+
if instance.max is None:
28+
return
2629

2730
current_max = instance.max
31+
2832
if (self._previous_max is None
2933
or current_max["target"] > self._previous_max):
3034
self._previous_max = current_max["target"]

bayes_opt/target_space.py

Lines changed: 36 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
import warnings
2-
31
import numpy as np
42
from .util import ensure_rng, NotUniqueError
53
from .util import Colours
@@ -214,7 +212,7 @@ def register(self, params, target, constraint_value=None):
214212

215213
def probe(self, params):
216214
"""
217-
Evaulates a single point x, to obtain the value y and then records them
215+
Evaluates a single point x, to obtain the value y and then records them
218216
as observations.
219217
220218
Notes
@@ -265,44 +263,46 @@ def random_sample(self):
265263
data.T[col] = self.random_state.uniform(lower, upper, size=1)
266264
return data.ravel()
267265

266+
def _target_max(self):
267+
"""Get maximum target value found.
268+
269+
If there is a constraint present, the maximum value that fulfills the
270+
constraint is returned."""
271+
if len(self.target) == 0:
272+
return None
273+
274+
if self._constraint is None:
275+
return self.target.max()
276+
277+
allowed = self._constraint.allowed(self._constraint_values)
278+
if allowed.any():
279+
return self.target[allowed].max()
280+
281+
return None
282+
268283
def max(self):
269284
"""Get maximum target value found and corresponding parameters.
270285
271286
If there is a constraint present, the maximum value that fulfills the
272287
constraint is returned."""
273-
if self._constraint is None:
274-
try:
275-
res = {
276-
'target': self.target.max(),
277-
'params': dict(
278-
zip(self.keys, self.params[self.target.argmax()])
279-
)
280-
}
281-
except ValueError:
282-
res = {}
283-
return res
284-
else:
285-
allowed = self._constraint.allowed(self._constraint_values)
286-
if allowed.any():
287-
# Getting of all points that fulfill the constraints, find the
288-
# one with the maximum value for the target function.
289-
sorted = np.argsort(self.target)
290-
idx = sorted[allowed[sorted]][-1]
291-
# there must be a better way to do this, right?
292-
res = {
293-
'target': self.target[idx],
294-
'params': dict(
295-
zip(self.keys, self.params[idx])
296-
),
297-
'constraint': self._constraint_values[idx]
298-
}
299-
else:
300-
res = {
301-
'target': None,
302-
'params': None,
303-
'constraint': None
304-
}
305-
return res
288+
target_max = self._target_max()
289+
290+
if target_max is None:
291+
return None
292+
293+
target_max_idx = np.where(self.target == target_max)[0][0]
294+
295+
res = {
296+
'target': target_max,
297+
'params': dict(
298+
zip(self.keys, self.params[target_max_idx])
299+
)
300+
}
301+
302+
if self._constraint is not None:
303+
res['constraint'] = self._constraint_values[target_max_idx]
304+
305+
return res
306306

307307
def res(self):
308308
"""Get all target values and constraint fulfillment for all parameters.

bayes_opt/util.py

Lines changed: 36 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -44,38 +44,53 @@ def acq_max(ac, gp, y_max, bounds, random_state, constraint=None, n_warmup=10000
4444
:return: x_max, The arg max of the acquisition function.
4545
"""
4646

47+
# We need to adjust the acquisition function to deal with constraints when there is some
48+
if constraint is not None:
49+
def adjusted_ac(x):
50+
"""Acquisition function adjusted to fulfill the constraint when necessary"""
51+
52+
# Transforms the problem in a minimization problem, this is necessary
53+
# because the solver we are using later on is a minimizer
54+
values = -ac(x.reshape(-1, bounds.shape[0]), gp=gp, y_max=y_max)
55+
p_constraints = constraint.predict(x.reshape(-1, bounds.shape[0]))
56+
57+
# Slower fallback for the case where any values are negative
58+
if np.any(values > 0):
59+
# TODO: This is not exactly how Gardner et al do it.
60+
# Their way would require the result of the acquisition function
61+
# to be strictly positive, which is not the case here. For a
62+
# positive target value, we use Gardner's version. If the target
63+
# is negative, we instead slightly rescale the target depending
64+
# on the probability estimate to fulfill the constraint.
65+
return np.array(
66+
[
67+
value / (0.5 + 0.5 * p) if value > 0 else value * p
68+
for value, p in zip(values, p_constraints)
69+
]
70+
)
71+
72+
# Faster, vectorized version of Gardner et al's method
73+
return values * p_constraints
74+
75+
else:
76+
# Transforms the problem in a minimization problem, this is necessary
77+
# because the solver we are using later on is a minimizer
78+
adjusted_ac = lambda x: -ac(x.reshape(-1, bounds.shape[0]), gp=gp, y_max=y_max)
79+
4780
# Warm up with random points
4881
x_tries = random_state.uniform(bounds[:, 0], bounds[:, 1],
4982
size=(n_warmup, bounds.shape[0]))
50-
ys = ac(x_tries, gp=gp, y_max=y_max)
83+
ys = -adjusted_ac(x_tries)
5184
x_max = x_tries[ys.argmax()]
5285
max_acq = ys.max()
5386

54-
# Explore the parameter space more throughly
87+
# Explore the parameter space more thoroughly
5588
x_seeds = random_state.uniform(bounds[:, 0], bounds[:, 1],
5689
size=(n_iter, bounds.shape[0]))
5790

58-
if constraint is not None:
59-
def to_minimize(x):
60-
target = -ac(x.reshape(1, -1), gp=gp, y_max=y_max)
61-
p_constraint = constraint.predict(x.reshape(1, -1))
62-
63-
# TODO: This is not exactly how Gardner et al do it.
64-
# Their way would require the result of the acquisition function
65-
# to be strictly positive (or negative), which is not the case
66-
# here. For a negative target value, we use Gardner's version. If
67-
# the target is positive, we instead slightly rescale the target
68-
# depending on the probability estimate to fulfill the constraint.
69-
if target < 0:
70-
return target * p_constraint
71-
else:
72-
return target / (0.5 + p_constraint)
73-
else:
74-
to_minimize = lambda x: -ac(x.reshape(1, -1), gp=gp, y_max=y_max)
75-
7691
for x_try in x_seeds:
7792
# Find the minimum of minus the acquisition function
78-
res = minimize(lambda x: to_minimize(x),
93+
res = minimize(adjusted_ac,
7994
x_try,
8095
bounds=bounds,
8196
method="L-BFGS-B")

examples/advanced-tour.ipynb

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333
"metadata": {},
3434
"outputs": [],
3535
"source": [
36-
"# Let's start by defining our function, bounds, and instanciating an optimization object.\n",
36+
"# Let's start by defining our function, bounds, and instantiating an optimization object.\n",
3737
"def black_box_function(x, y):\n",
3838
" return -x ** 2 - (y - 1) ** 2 + 1"
3939
]
@@ -347,12 +347,13 @@
347347
]
348348
},
349349
{
350+
"attachments": {},
350351
"cell_type": "markdown",
351352
"metadata": {},
352353
"source": [
353354
"### 3.3 Changing kernels\n",
354355
"\n",
355-
"By default this package uses the Mattern 2.5 kernel. Depending on your use case you may find that tunning the GP kernel could be beneficial. You're on your own here since these are very specific solutions to very specific problems."
356+
"By default this package uses the Matern 2.5 kernel. Depending on your use case you may find that tunning the GP kernel could be beneficial. You're on your own here since these are very specific solutions to very specific problems."
356357
]
357358
},
358359
{

examples/basic-tour.ipynb

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,13 @@
1616
]
1717
},
1818
{
19+
"attachments": {},
1920
"cell_type": "markdown",
2021
"metadata": {},
2122
"source": [
2223
"## 1. Specifying the function to be optimized\n",
2324
"\n",
24-
"This is a function optimization package, therefore the first and most important ingreedient is, of course, the function to be optimized.\n",
25+
"This is a function optimization package, therefore the first and most important ingredient is, of course, the function to be optimized.\n",
2526
"\n",
2627
"**DISCLAIMER:** We know exactly how the output of the function below depends on its parameter. Obviously this is just an example, and you shouldn't expect to know it in a real scenario. However, it should be clear that you don't need to. All you need in order to use this package (and more generally, this technique) is a function `f` that takes a known set of parameters and outputs a real number."
2728
]
@@ -43,12 +44,13 @@
4344
]
4445
},
4546
{
47+
"attachments": {},
4648
"cell_type": "markdown",
4749
"metadata": {},
4850
"source": [
4951
"## 2. Getting Started\n",
5052
"\n",
51-
"All we need to get started is to instanciate a `BayesianOptimization` object specifying a function to be optimized `f`, and its parameters with their corresponding bounds, `pbounds`. This is a constrained optimization technique, so you must specify the minimum and maximum values that can be probed for each parameter in order for it to work"
53+
"All we need to get started is to instantiate a `BayesianOptimization` object specifying a function to be optimized `f`, and its parameters with their corresponding bounds, `pbounds`. This is a constrained optimization technique, so you must specify the minimum and maximum values that can be probed for each parameter in order for it to work"
5254
]
5355
},
5456
{
@@ -306,12 +308,13 @@
306308
]
307309
},
308310
{
311+
"attachments": {},
309312
"cell_type": "markdown",
310313
"metadata": {},
311314
"source": [
312315
"## 4. Saving, loading and restarting\n",
313316
"\n",
314-
"By default you can follow the progress of your optimization by setting `verbose>0` when instanciating the `BayesianOptimization` object. If you need more control over logging/alerting you will need to use an observer. For more information about observers checkout the advanced tour notebook. Here we will only see how to use the native `JSONLogger` object to save to and load progress from files.\n",
317+
"By default you can follow the progress of your optimization by setting `verbose>0` when instantiating the `BayesianOptimization` object. If you need more control over logging/alerting you will need to use an observer. For more information about observers checkout the advanced tour notebook. Here we will only see how to use the native `JSONLogger` object to save to and load progress from files.\n",
315318
"\n",
316319
"### 4.1 Saving progress"
317320
]
@@ -327,14 +330,15 @@
327330
]
328331
},
329332
{
333+
"attachments": {},
330334
"cell_type": "markdown",
331335
"metadata": {},
332336
"source": [
333337
"The observer paradigm works by:\n",
334338
"1. Instantiating an observer object.\n",
335339
"2. Tying the observer object to a particular event fired by an optimizer.\n",
336340
"\n",
337-
"The `BayesianOptimization` object fires a number of internal events during optimization, in particular, everytime it probes the function and obtains a new parameter-target combination it will fire an `Events.OPTIMIZATION_STEP` event, which our logger will listen to.\n",
341+
"The `BayesianOptimization` object fires a number of internal events during optimization, in particular, every time it probes the function and obtains a new parameter-target combination it will fire an `Events.OPTIMIZATION_STEP` event, which our logger will listen to.\n",
338342
"\n",
339343
"**Caveat:** The logger will not look back at previously probed points."
340344
]
@@ -487,7 +491,7 @@
487491
],
488492
"metadata": {
489493
"kernelspec": {
490-
"display_name": "Python 3",
494+
"display_name": "Python 3 (ipykernel)",
491495
"language": "python",
492496
"name": "python3"
493497
},
@@ -501,7 +505,7 @@
501505
"name": "python",
502506
"nbconvert_exporter": "python",
503507
"pygments_lexer": "ipython3",
504-
"version": "3.5.2"
508+
"version": "3.9.6"
505509
}
506510
},
507511
"nbformat": 4,

0 commit comments

Comments
 (0)