Skip to content

Commit 30e9abb

Browse files
committed
deploy: 0ef608f
1 parent 9a3ff65 commit 30e9abb

38 files changed

+8027
-453
lines changed
16.8 KB
Loading
25.9 KB
Loading
74 KB
Loading

master/_sources/advanced-tour.ipynb.txt

Lines changed: 28 additions & 133 deletions
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@
9696
"name": "stdout",
9797
"output_type": "stream",
9898
"text": [
99-
"Next point to probe is: {'x': -0.331911981189704, 'y': 1.3219469606529486}\n"
99+
"Next point to probe is: {'x': np.float64(-0.331911981189704), 'y': np.float64(1.3219469606529486)}\n"
100100
]
101101
}
102102
],
@@ -167,12 +167,12 @@
167167
"name": "stdout",
168168
"output_type": "stream",
169169
"text": [
170-
"-18.503835804889988 {'x': 1.953072105336, 'y': -2.9609778030491904}\n",
171-
"-1.0819533157901717 {'x': 0.22703572807626315, 'y': 2.4249238905875123}\n",
172-
"-6.50219704520679 {'x': -1.9991881984624875, 'y': 2.872282989383577}\n",
173-
"-5.747604713731052 {'x': -1.994467585936897, 'y': -0.664242699361514}\n",
174-
"-2.9682431497650823 {'x': 1.9737252084307952, 'y': 1.269540259274744}\n",
175-
"{'target': 0.7861845912690544, 'params': {'x': -0.331911981189704, 'y': 1.3219469606529486}}\n"
170+
"-18.707136686093495 {'x': np.float64(1.9261486197444082), 'y': np.float64(-2.9996360060323246)}\n",
171+
"0.750594563473972 {'x': np.float64(-0.3763326769822668), 'y': np.float64(1.328297354179696)}\n",
172+
"-6.559031075654336 {'x': np.float64(1.979183535803597), 'y': np.float64(2.9083667381450318)}\n",
173+
"-6.915481333972961 {'x': np.float64(-1.9686133847781613), 'y': np.float64(-1.009985740060171)}\n",
174+
"-6.8600832617014085 {'x': np.float64(-1.9763198875239296), 'y': np.float64(2.9885278383464513)}\n",
175+
"{'target': np.float64(0.7861845912690544), 'params': {'x': np.float64(-0.331911981189704), 'y': np.float64(1.3219469606529486)}}\n"
176176
]
177177
}
178178
],
@@ -190,112 +190,7 @@
190190
"cell_type": "markdown",
191191
"metadata": {},
192192
"source": [
193-
"## 2. Dealing with discrete parameters\n",
194-
"\n",
195-
"**There is no principled way of dealing with discrete parameters using this package.**\n",
196-
"\n",
197-
"Ok, now that we got that out of the way, how do you do it? You're bound to be in a situation where some of your function's parameters may only take on discrete values. Unfortunately, the nature of bayesian optimization with gaussian processes doesn't allow for an easy/intuitive way of dealing with discrete parameters - but that doesn't mean it is impossible. The example below showcases a simple, yet reasonably adequate, way to dealing with discrete parameters."
198-
]
199-
},
200-
{
201-
"cell_type": "code",
202-
"execution_count": 9,
203-
"metadata": {},
204-
"outputs": [],
205-
"source": [
206-
"def func_with_discrete_params(x, y, d):\n",
207-
" # Simulate necessity of having d being discrete.\n",
208-
" assert type(d) == int\n",
209-
" \n",
210-
" return ((x + y + d) // (1 + d)) / (1 + (x + y) ** 2)"
211-
]
212-
},
213-
{
214-
"cell_type": "code",
215-
"execution_count": 10,
216-
"metadata": {},
217-
"outputs": [],
218-
"source": [
219-
"def function_to_be_optimized(x, y, w):\n",
220-
" d = int(w)\n",
221-
" return func_with_discrete_params(x, y, d)"
222-
]
223-
},
224-
{
225-
"cell_type": "code",
226-
"execution_count": 11,
227-
"metadata": {},
228-
"outputs": [],
229-
"source": [
230-
"optimizer = BayesianOptimization(\n",
231-
" f=function_to_be_optimized,\n",
232-
" pbounds={'x': (-10, 10), 'y': (-10, 10), 'w': (0, 5)},\n",
233-
" verbose=2,\n",
234-
" random_state=1,\n",
235-
")"
236-
]
237-
},
238-
{
239-
"cell_type": "code",
240-
"execution_count": 12,
241-
"metadata": {},
242-
"outputs": [
243-
{
244-
"name": "stdout",
245-
"output_type": "stream",
246-
"text": [
247-
"| iter | target | w | x | y |\n",
248-
"-------------------------------------------------------------\n",
249-
"| \u001b[30m1 | \u001b[30m-0.06199 | \u001b[30m2.085 | \u001b[30m4.406 | \u001b[30m-9.998 |\n",
250-
"| \u001b[35m2 | \u001b[35m-0.0344 | \u001b[35m1.512 | \u001b[35m-7.065 | \u001b[35m-8.153 |\n",
251-
"| \u001b[30m3 | \u001b[30m-0.2177 | \u001b[30m0.9313 | \u001b[30m-3.089 | \u001b[30m-2.065 |\n",
252-
"| \u001b[35m4 | \u001b[35m0.1865 | \u001b[35m2.694 | \u001b[35m-1.616 | \u001b[35m3.704 |\n",
253-
"| \u001b[30m5 | \u001b[30m-0.2187 | \u001b[30m1.022 | \u001b[30m7.562 | \u001b[30m-9.452 |\n",
254-
"| \u001b[35m6 | \u001b[35m0.2488 | \u001b[35m2.684 | \u001b[35m-2.188 | \u001b[35m3.925 |\n"
255-
]
256-
},
257-
{
258-
"name": "stdout",
259-
"output_type": "stream",
260-
"text": [
261-
"| \u001b[35m7 | \u001b[35m0.2948 | \u001b[35m2.683 | \u001b[35m-2.534 | \u001b[35m4.08 |\n",
262-
"| \u001b[35m8 | \u001b[35m0.3202 | \u001b[35m2.514 | \u001b[35m-3.83 | \u001b[35m5.287 |\n",
263-
"| \u001b[30m9 | \u001b[30m0.0 | \u001b[30m4.057 | \u001b[30m-4.458 | \u001b[30m3.928 |\n",
264-
"| \u001b[35m10 | \u001b[35m0.4802 | \u001b[35m2.296 | \u001b[35m-3.518 | \u001b[35m4.558 |\n",
265-
"| \u001b[30m11 | \u001b[30m0.0 | \u001b[30m1.084 | \u001b[30m-3.737 | \u001b[30m4.472 |\n",
266-
"| \u001b[30m12 | \u001b[30m0.0 | \u001b[30m2.649 | \u001b[30m-3.861 | \u001b[30m4.353 |\n",
267-
"| \u001b[30m13 | \u001b[30m0.0 | \u001b[30m2.442 | \u001b[30m-3.658 | \u001b[30m4.599 |\n",
268-
"| \u001b[30m14 | \u001b[30m-0.05801 | \u001b[30m1.935 | \u001b[30m-0.4758 | \u001b[30m-8.755 |\n",
269-
"| \u001b[30m15 | \u001b[30m0.0 | \u001b[30m2.337 | \u001b[30m7.973 | \u001b[30m-8.96 |\n",
270-
"| \u001b[30m16 | \u001b[30m0.07699 | \u001b[30m0.6926 | \u001b[30m5.59 | \u001b[30m6.854 |\n",
271-
"| \u001b[30m17 | \u001b[30m-0.02025 | \u001b[30m3.534 | \u001b[30m-8.943 | \u001b[30m1.987 |\n",
272-
"| \u001b[30m18 | \u001b[30m0.0 | \u001b[30m2.59 | \u001b[30m-7.339 | \u001b[30m5.941 |\n",
273-
"| \u001b[30m19 | \u001b[30m0.0929 | \u001b[30m2.237 | \u001b[30m-4.535 | \u001b[30m9.065 |\n",
274-
"| \u001b[30m20 | \u001b[30m0.1538 | \u001b[30m0.477 | \u001b[30m2.931 | \u001b[30m2.683 |\n",
275-
"| \u001b[30m21 | \u001b[30m0.0 | \u001b[30m0.9999 | \u001b[30m4.397 | \u001b[30m-3.971 |\n",
276-
"| \u001b[30m22 | \u001b[30m-0.01894 | \u001b[30m3.764 | \u001b[30m-7.043 | \u001b[30m-3.184 |\n",
277-
"| \u001b[30m23 | \u001b[30m0.03683 | \u001b[30m1.851 | \u001b[30m5.783 | \u001b[30m7.966 |\n",
278-
"| \u001b[30m24 | \u001b[30m-0.04359 | \u001b[30m1.615 | \u001b[30m-5.133 | \u001b[30m-6.556 |\n",
279-
"| \u001b[30m25 | \u001b[30m0.02617 | \u001b[30m3.863 | \u001b[30m0.1052 | \u001b[30m8.579 |\n",
280-
"| \u001b[30m26 | \u001b[30m-0.1071 | \u001b[30m0.8131 | \u001b[30m-0.7949 | \u001b[30m-9.292 |\n",
281-
"| \u001b[30m27 | \u001b[30m0.0 | \u001b[30m4.969 | \u001b[30m8.778 | \u001b[30m-8.467 |\n",
282-
"| \u001b[30m28 | \u001b[30m-0.1372 | \u001b[30m0.9475 | \u001b[30m-1.019 | \u001b[30m-7.018 |\n",
283-
"| \u001b[30m29 | \u001b[30m0.08078 | \u001b[30m1.917 | \u001b[30m-0.2606 | \u001b[30m6.272 |\n",
284-
"| \u001b[30m30 | \u001b[30m0.02003 | \u001b[30m4.278 | \u001b[30m3.8 | \u001b[30m8.398 |\n",
285-
"=============================================================\n"
286-
]
287-
}
288-
],
289-
"source": [
290-
"optimizer.set_gp_params(alpha=1e-3)\n",
291-
"optimizer.maximize()"
292-
]
293-
},
294-
{
295-
"cell_type": "markdown",
296-
"metadata": {},
297-
"source": [
298-
"## 3. Tuning the underlying Gaussian Process\n",
193+
"## 2. Tuning the underlying Gaussian Process\n",
299194
"\n",
300195
"The bayesian optimization algorithm works by performing a gaussian process regression of the observed combination of parameters and their associated target values. The predicted parameter $\\rightarrow$ target hyper-surface (and its uncertainty) is then used to guide the next best point to probe."
301196
]
@@ -304,14 +199,14 @@
304199
"cell_type": "markdown",
305200
"metadata": {},
306201
"source": [
307-
"### 3.1 Passing parameter to the GP\n",
202+
"### 2.1 Passing parameter to the GP\n",
308203
"\n",
309204
"Depending on the problem it could be beneficial to change the default parameters of the underlying GP. You can use the `optimizer.set_gp_params` method to do this:"
310205
]
311206
},
312207
{
313208
"cell_type": "code",
314-
"execution_count": 13,
209+
"execution_count": 9,
315210
"metadata": {},
316211
"outputs": [
317212
{
@@ -320,12 +215,12 @@
320215
"text": [
321216
"| iter | target | x | y |\n",
322217
"-------------------------------------------------\n",
323-
"| \u001b[30m1 | \u001b[30m0.7862 | \u001b[30m-0.3319 | \u001b[30m1.322 |\n",
324-
"| \u001b[30m2 | \u001b[30m-18.19 | \u001b[30m1.957 | \u001b[30m-2.919 |\n",
325-
"| \u001b[30m3 | \u001b[30m-12.05 | \u001b[30m-1.969 | \u001b[30m-2.029 |\n",
326-
"| \u001b[30m4 | \u001b[30m-7.463 | \u001b[30m0.6032 | \u001b[30m-1.846 |\n",
327-
"| \u001b[30m5 | \u001b[30m-1.093 | \u001b[30m1.444 | \u001b[30m1.096 |\n",
328-
"| \u001b[35m6 | \u001b[35m0.8586 | \u001b[35m-0.2165 | \u001b[35m1.307 |\n",
218+
"| \u001b[39m1 \u001b[39m | \u001b[39m0.7862 \u001b[39m | \u001b[39m-0.331911\u001b[39m | \u001b[39m1.3219469\u001b[39m |\n",
219+
"| \u001b[39m2 \u001b[39m | \u001b[39m-18.34 \u001b[39m | \u001b[39m1.9021640\u001b[39m | \u001b[39m-2.965222\u001b[39m |\n",
220+
"| \u001b[35m3 \u001b[39m | \u001b[35m0.8731 \u001b[39m | \u001b[35m-0.298167\u001b[39m | \u001b[35m1.1948749\u001b[39m |\n",
221+
"| \u001b[39m4 \u001b[39m | \u001b[39m-6.497 \u001b[39m | \u001b[39m1.9876938\u001b[39m | \u001b[39m2.8830942\u001b[39m |\n",
222+
"| \u001b[39m5 \u001b[39m | \u001b[39m-4.286 \u001b[39m | \u001b[39m-1.995643\u001b[39m | \u001b[39m-0.141769\u001b[39m |\n",
223+
"| \u001b[39m6 \u001b[39m | \u001b[39m-6.781 \u001b[39m | \u001b[39m-1.953302\u001b[39m | \u001b[39m2.9913127\u001b[39m |\n",
329224
"=================================================\n"
330225
]
331226
}
@@ -348,7 +243,7 @@
348243
"cell_type": "markdown",
349244
"metadata": {},
350245
"source": [
351-
"### 3.2 Tuning the `alpha` parameter\n",
246+
"### 2.2 Tuning the `alpha` parameter\n",
352247
"\n",
353248
"When dealing with functions with discrete parameters,or particularly erratic target space it might be beneficial to increase the value of the `alpha` parameter. This parameters controls how much noise the GP can handle, so increase it whenever you think that extra flexibility is needed."
354249
]
@@ -358,7 +253,7 @@
358253
"cell_type": "markdown",
359254
"metadata": {},
360255
"source": [
361-
"### 3.3 Changing kernels\n",
256+
"### 2.3 Changing kernels\n",
362257
"\n",
363258
"By default this package uses the Matern 2.5 kernel. Depending on your use case you may find that tuning the GP kernel could be beneficial. You're on your own here since these are very specific solutions to very specific problems. You should start with the [scikit learn docs](https://scikit-learn.org/stable/modules/gaussian_process.html#kernels-for-gaussian-processes)."
364259
]
@@ -376,7 +271,7 @@
376271
},
377272
{
378273
"cell_type": "code",
379-
"execution_count": 14,
274+
"execution_count": 10,
380275
"metadata": {},
381276
"outputs": [],
382277
"source": [
@@ -385,7 +280,7 @@
385280
},
386281
{
387282
"cell_type": "code",
388-
"execution_count": 15,
283+
"execution_count": 11,
389284
"metadata": {},
390285
"outputs": [],
391286
"source": [
@@ -399,7 +294,7 @@
399294
},
400295
{
401296
"cell_type": "code",
402-
"execution_count": 16,
297+
"execution_count": 12,
403298
"metadata": {},
404299
"outputs": [],
405300
"source": [
@@ -411,7 +306,7 @@
411306
},
412307
{
413308
"cell_type": "code",
414-
"execution_count": 17,
309+
"execution_count": 13,
415310
"metadata": {},
416311
"outputs": [],
417312
"source": [
@@ -433,7 +328,7 @@
433328
},
434329
{
435330
"cell_type": "code",
436-
"execution_count": 18,
331+
"execution_count": 14,
437332
"metadata": {},
438333
"outputs": [],
439334
"source": [
@@ -449,7 +344,7 @@
449344
},
450345
{
451346
"cell_type": "code",
452-
"execution_count": 19,
347+
"execution_count": 15,
453348
"metadata": {},
454349
"outputs": [
455350
{
@@ -476,7 +371,7 @@
476371
},
477372
{
478373
"cell_type": "code",
479-
"execution_count": 20,
374+
"execution_count": 16,
480375
"metadata": {},
481376
"outputs": [
482377
{
@@ -485,7 +380,7 @@
485380
"['optimization:start', 'optimization:step', 'optimization:end']"
486381
]
487382
},
488-
"execution_count": 20,
383+
"execution_count": 16,
489384
"metadata": {},
490385
"output_type": "execute_result"
491386
}
@@ -497,7 +392,7 @@
497392
],
498393
"metadata": {
499394
"kernelspec": {
500-
"display_name": "Python 3 (ipykernel)",
395+
"display_name": "bayesian-optimization-t6LLJ9me-py3.10",
501396
"language": "python",
502397
"name": "python3"
503398
},
@@ -511,7 +406,7 @@
511406
"name": "python",
512407
"nbconvert_exporter": "python",
513408
"pygments_lexer": "ipython3",
514-
"version": "3.1.undefined"
409+
"version": "3.10.13"
515410
},
516411
"nbdime-conflicts": {
517412
"local_diff": [

master/_sources/basic-tour.ipynb.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -252,7 +252,7 @@
252252
"cell_type": "markdown",
253253
"metadata": {},
254254
"source": [
255-
"Or as an iterable. Beware that the order has to be alphabetical. You can usee `optimizer.space.keys` for guidance"
255+
"Or as an iterable. Beware that the order has to match the order of the initial `pbounds` dictionary. You can usee `optimizer.space.keys` for guidance"
256256
]
257257
},
258258
{

master/_sources/index.rst.txt

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
Basic Tour </basic-tour>
1212
Advanced Tour </advanced-tour>
1313
Constrained Bayesian Optimization </constraints>
14+
Parameter Types </parameter_types>
1415
Sequential Domain Reduction </domain_reduction>
1516
Acquisition Functions </acquisition_functions>
1617
Exploration vs. Exploitation </exploitation_vs_exploration>
@@ -26,6 +27,7 @@
2627
reference/constraint
2728
reference/domain_reduction
2829
reference/target_space
30+
reference/parameter
2931
reference/exception
3032
reference/other
3133

@@ -121,11 +123,13 @@ section. We suggest that you:
121123
to learn how to use the package's most important features.
122124
- Take a look at the `advanced tour
123125
notebook <advanced-tour.html>`__
124-
to learn how to make the package more flexible, how to deal with
125-
categorical parameters, how to use observers, and more.
126+
to learn how to make the package more flexible or how to use observers.
126127
- To learn more about acquisition functions, a central building block
127128
of bayesian optimization, see the `acquisition functions
128129
notebook <acquisition_functions.html>`__
130+
- If you want to optimize over integer-valued or categorical
131+
parameters, see the `parameter types
132+
notebook <parameter_types.html>`__.
129133
- Check out this
130134
`notebook <visualization.html>`__
131135
with a step by step visualization of how this method works.
@@ -195,6 +199,20 @@ For constrained optimization:
195199
year={2014}
196200
}
197201

202+
For optimization over non-float parameters:
203+
204+
::
205+
206+
@article{garrido2020dealing,
207+
title={Dealing with categorical and integer-valued variables in bayesian optimization with gaussian processes},
208+
author={Garrido-Merch{\'a}n, Eduardo C and Hern{\'a}ndez-Lobato, Daniel},
209+
journal={Neurocomputing},
210+
volume={380},
211+
pages={20--35},
212+
year={2020},
213+
publisher={Elsevier}
214+
}
215+
198216
.. |tests| image:: https://github.com/bayesian-optimization/BayesianOptimization/actions/workflows/run_tests.yml/badge.svg
199217
.. |Codecov| image:: https://codecov.io/github/bayesian-optimization/BayesianOptimization/badge.svg?branch=master&service=github
200218
:target: https://codecov.io/github/bayesian-optimization/BayesianOptimization?branch=master

master/_sources/parameter_types.ipynb.txt

Lines changed: 756 additions & 0 deletions
Large diffs are not rendered by default.
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
:py:mod:`bayes_opt.parameter`
2+
--------------------------------
3+
4+
.. automodule:: bayes_opt.parameter
5+
:members:

0 commit comments

Comments
 (0)