├── .coverage
├── .gitattributes
├── .travis.yml
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── README.md
├── docs
├── Makefile
├── build
│ ├── doctrees
│ │ ├── api.doctree
│ │ ├── comparison.doctree
│ │ ├── environment.pickle
│ │ ├── features.doctree
│ │ ├── index.doctree
│ │ ├── modules.doctree
│ │ ├── pyGPGO.GPGO.doctree
│ │ ├── pyGPGO.acquisition.doctree
│ │ ├── pyGPGO.covfunc.doctree
│ │ ├── pyGPGO.doctree
│ │ ├── pyGPGO.logger.doctree
│ │ ├── pyGPGO.surrogates.BoostedTrees.doctree
│ │ ├── pyGPGO.surrogates.GaussianProcess.doctree
│ │ ├── pyGPGO.surrogates.GaussianProcessMCMC.doctree
│ │ ├── pyGPGO.surrogates.RandomForest.doctree
│ │ ├── pyGPGO.surrogates.doctree
│ │ ├── pyGPGO.surrogates.tStudentProcess.doctree
│ │ ├── pyGPGO.surrogates.tStudentProcessMCMC.doctree
│ │ └── pyGPGO.version.doctree
│ ├── html
│ │ ├── .buildinfo
│ │ ├── .nojekyll
│ │ ├── _modules
│ │ │ ├── index.html
│ │ │ └── pyGPGO
│ │ │ │ ├── GPGO.html
│ │ │ │ ├── acquisition.html
│ │ │ │ ├── covfunc.html
│ │ │ │ ├── logger.html
│ │ │ │ └── surrogates
│ │ │ │ ├── BoostedTrees.html
│ │ │ │ ├── GaussianProcess.html
│ │ │ │ ├── GaussianProcessMCMC.html
│ │ │ │ ├── RandomForest.html
│ │ │ │ ├── tStudentProcess.html
│ │ │ │ └── tStudentProcessMCMC.html
│ │ ├── _sources
│ │ │ ├── api.rst.txt
│ │ │ ├── api.txt
│ │ │ ├── comparison.rst.txt
│ │ │ ├── features.rst.txt
│ │ │ ├── index.rst.txt
│ │ │ ├── index.txt
│ │ │ ├── modules.rst.txt
│ │ │ ├── modules.txt
│ │ │ ├── pyGPGO.GPGO.rst.txt
│ │ │ ├── pyGPGO.GPGO.txt
│ │ │ ├── pyGPGO.acquisition.rst.txt
│ │ │ ├── pyGPGO.acquisition.txt
│ │ │ ├── pyGPGO.covfunc.rst.txt
│ │ │ ├── pyGPGO.covfunc.txt
│ │ │ ├── pyGPGO.logger.rst.txt
│ │ │ ├── pyGPGO.logger.txt
│ │ │ ├── pyGPGO.rst.txt
│ │ │ ├── pyGPGO.surrogates.BoostedTrees.rst.txt
│ │ │ ├── pyGPGO.surrogates.BoostedTrees.txt
│ │ │ ├── pyGPGO.surrogates.GaussianProcess.rst.txt
│ │ │ ├── pyGPGO.surrogates.GaussianProcess.txt
│ │ │ ├── pyGPGO.surrogates.GaussianProcessMCMC.rst.txt
│ │ │ ├── pyGPGO.surrogates.GaussianProcessMCMC.txt
│ │ │ ├── pyGPGO.surrogates.RandomForest.rst.txt
│ │ │ ├── pyGPGO.surrogates.RandomForest.txt
│ │ │ ├── pyGPGO.surrogates.rst.txt
│ │ │ ├── pyGPGO.surrogates.tStudentProcess.rst.txt
│ │ │ ├── pyGPGO.surrogates.tStudentProcess.txt
│ │ │ ├── pyGPGO.surrogates.tStudentProcessMCMC.rst.txt
│ │ │ ├── pyGPGO.surrogates.tStudentProcessMCMC.txt
│ │ │ ├── pyGPGO.surrogates.txt
│ │ │ ├── pyGPGO.txt
│ │ │ ├── pyGPGO.version.rst.txt
│ │ │ └── pyGPGO.version.txt
│ │ ├── _static
│ │ │ ├── ajax-loader.gif
│ │ │ ├── basic.css
│ │ │ ├── comment-bright.png
│ │ │ ├── comment-close.png
│ │ │ ├── comment.png
│ │ │ ├── css
│ │ │ │ ├── badge_only.css
│ │ │ │ └── theme.css
│ │ │ ├── doctools.js
│ │ │ ├── down-pressed.png
│ │ │ ├── down.png
│ │ │ ├── file.png
│ │ │ ├── fonts
│ │ │ │ ├── Inconsolata-Bold.ttf
│ │ │ │ ├── Inconsolata-Regular.ttf
│ │ │ │ ├── Lato-Bold.ttf
│ │ │ │ ├── Lato-Regular.ttf
│ │ │ │ ├── RobotoSlab-Bold.ttf
│ │ │ │ ├── RobotoSlab-Regular.ttf
│ │ │ │ ├── fontawesome-webfont.eot
│ │ │ │ ├── fontawesome-webfont.svg
│ │ │ │ ├── fontawesome-webfont.ttf
│ │ │ │ └── fontawesome-webfont.woff
│ │ │ ├── jquery-1.11.1.js
│ │ │ ├── jquery-3.1.0.js
│ │ │ ├── jquery.js
│ │ │ ├── js
│ │ │ │ ├── modernizr.min.js
│ │ │ │ └── theme.js
│ │ │ ├── minus.png
│ │ │ ├── plus.png
│ │ │ ├── pygments.css
│ │ │ ├── searchtools.js
│ │ │ ├── underscore-1.3.1.js
│ │ │ ├── underscore.js
│ │ │ ├── up-pressed.png
│ │ │ ├── up.png
│ │ │ └── websupport.js
│ │ ├── api.html
│ │ ├── comparison.html
│ │ ├── features.html
│ │ ├── genindex.html
│ │ ├── index.html
│ │ ├── modules.html
│ │ ├── objects.inv
│ │ ├── py-modindex.html
│ │ ├── pyGPGO.GPGO.html
│ │ ├── pyGPGO.acquisition.html
│ │ ├── pyGPGO.covfunc.html
│ │ ├── pyGPGO.html
│ │ ├── pyGPGO.logger.html
│ │ ├── pyGPGO.surrogates.BoostedTrees.html
│ │ ├── pyGPGO.surrogates.GaussianProcess.html
│ │ ├── pyGPGO.surrogates.GaussianProcessMCMC.html
│ │ ├── pyGPGO.surrogates.RandomForest.html
│ │ ├── pyGPGO.surrogates.html
│ │ ├── pyGPGO.surrogates.tStudentProcess.html
│ │ ├── pyGPGO.surrogates.tStudentProcessMCMC.html
│ │ ├── pyGPGO.version.html
│ │ ├── search.html
│ │ └── searchindex.js
│ └── latex
│ │ ├── Makefile
│ │ ├── capt-of.sty
│ │ ├── eqparbox.sty
│ │ ├── fncychap.sty
│ │ ├── iftex.sty
│ │ ├── needspace.sty
│ │ ├── newfloat.sty
│ │ ├── pyGPGO.aux
│ │ ├── pyGPGO.idx
│ │ ├── pyGPGO.ilg
│ │ ├── pyGPGO.ind
│ │ ├── pyGPGO.log
│ │ ├── pyGPGO.out
│ │ ├── pyGPGO.pdf
│ │ ├── pyGPGO.tex
│ │ ├── pyGPGO.toc
│ │ ├── python.ist
│ │ ├── sphinx.sty
│ │ ├── sphinxhowto.cls
│ │ ├── sphinxmanual.cls
│ │ ├── tabulary.sty
│ │ ├── upquote.sty
│ │ └── {eqp@restorefont}l
└── source
│ ├── api.rst
│ ├── comparison.rst
│ ├── conf.py
│ ├── features.rst
│ ├── index.rst
│ ├── modules.rst
│ ├── pyGPGO.GPGO.rst
│ ├── pyGPGO.acquisition.rst
│ ├── pyGPGO.covfunc.rst
│ ├── pyGPGO.logger.rst
│ ├── pyGPGO.rst
│ ├── pyGPGO.surrogates.BoostedTrees.rst
│ ├── pyGPGO.surrogates.GaussianProcess.rst
│ ├── pyGPGO.surrogates.GaussianProcessMCMC.rst
│ ├── pyGPGO.surrogates.RandomForest.rst
│ ├── pyGPGO.surrogates.rst
│ ├── pyGPGO.surrogates.tStudentProcess.rst
│ ├── pyGPGO.surrogates.tStudentProcessMCMC.rst
│ └── pyGPGO.version.rst
├── examples
├── acqzoo.py
├── bayoptwork.py
├── covzoo.py
├── drawGP.py
├── example1d.py
├── example2d.py
├── exampleGBM.py
├── exampleRF.py
├── exampleint.py
├── franke.py
├── gif_gen.py
├── hyperopt.py
├── hyperpost.py
├── integratedacq.py
├── minimalexample.py
├── sineGP.py
└── sklearnexample.py
├── franke.gif
├── paper.bib
├── paper.md
├── pyGPGO
├── GPGO.py
├── __init__.py
├── __pycache__
│ ├── GPGO.cpython-36.pyc
│ ├── __init__.cpython-36.pyc
│ ├── acquisition.cpython-36.pyc
│ ├── covfunc.cpython-36.pyc
│ ├── logger.cpython-36.pyc
│ └── version.cpython-36.pyc
├── acquisition.py
├── covfunc.py
├── logger.py
├── surrogates
│ ├── BoostedTrees.py
│ ├── GaussianProcess.py
│ ├── GaussianProcessMCMC.py
│ ├── RandomForest.py
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── BoostedTrees.cpython-35.pyc
│ │ ├── BoostedTrees.cpython-36.pyc
│ │ ├── GaussianProcess.cpython-35.pyc
│ │ ├── GaussianProcess.cpython-36.pyc
│ │ ├── GaussianProcessMCMC.cpython-35.pyc
│ │ ├── GaussianProcessMCMC.cpython-36.pyc
│ │ ├── RandomForest.cpython-35.pyc
│ │ ├── RandomForest.cpython-36.pyc
│ │ ├── __init__.cpython-35.pyc
│ │ ├── __init__.cpython-36.pyc
│ │ ├── tStudentProcess.cpython-35.pyc
│ │ ├── tStudentProcess.cpython-36.pyc
│ │ ├── tStudentProcessMCMC.cpython-35.pyc
│ │ └── tStudentProcessMCMC.cpython-36.pyc
│ ├── tStudentProcess.py
│ └── tStudentProcessMCMC.py
└── version.py
├── requirements_rtd.txt
├── setup.py
├── tests
├── test_GPGO.py
├── test_acquisition.py
├── test_covfunc.py
├── test_surrogates.py
├── test_surrogates_mcmc.py
└── test_surrogates_sk.py
└── tutorials
├── hyperparam.ipynb
└── mlopt.ipynb
/.coverage:
--------------------------------------------------------------------------------
1 | !coverage.py: This is a private format, don't read it directly!{"lines":{"/home/jose/pyGPGO/tests/test_GPGO.py":[1,2,3,4,5,6,7,8,11,15,27,38,16,17,18,19,20,21,22,12,23,24,28,29,30,31,32,33,34,35,36],"/home/jose/pyGPGO/pyGPGO/__init__.py":[1],"/home/jose/pyGPGO/pyGPGO/version.py":[1],"/home/jose/pyGPGO/pyGPGO/covfunc.py":[1,2,3,6,7,8,9,10,11,12,16,35,54,55,56,85,104,141,142,143,144,145,176,201,202,232,253,286,287,316,337,369,370,371,372,373,405,425,466,467,468,469,470,502,522,563,578,580,591,611,624,638,640,651,669,74,75,76,77,78,81,82,83,165,166,167,168,169,172,173,174,221,222,223,224,225,228,229,230,305,306,307,308,309,312,313,314,393,394,395,396,397,398,401,402,403,490,491,492,493,494,495,498,499,500,581,582,583,584,587,588,589,641,642,643,644,647,648,649,101,32,102,51,192,193,194,195,196,197,198,248,249,250,251,332,333,334,335,421,422,423,518,519,520,607,608,609,667,122,123,124,125,126,127,271,272,273,274,355,356,357,358,359,360,443,450,451,452,453,454,540,547,548,549,550,551,612,613,614,615,616,128,129,130,131,133,134,135],"/home/jose/pyGPGO/pyGPGO/surrogates/__init__.py":[1],"/home/jose/pyGPGO/pyGPGO/surrogates/GaussianProcess.py":[1,2,3,4,6,7,42,56,83,111,145,167,200,228,37,38,39,40,68,69,70,71,77,78,79,80,81,219,220,221,222,223,224,225,226,239,240,241,72,73,75,180,181,128,129,130,131,134,135,136,137,139,141,142,143,182,183,184,185,186,187,190,191,193,194,195,196,197,198,51,52,53,54,74,189,162,163,164,165,97,98,99,100,101,102,103,104,105,106,107,108,109],"/home/jose/pyGPGO/pyGPGO/surrogates/GaussianProcessMCMC.py":[2,3,4,5,6,7,8,9,10,13,14,15,18,19,43,52,87,96,131,37,38,39,40,41,64,65,66,67,69,70,72,73,75,76,78,79,80,81,82,83,117,118,119,120,121,122,44,45,46,47,48,50,123,124,125,126,127,128,129,142,143,144],"/home/jose/pyGPGO/pyGPGO/surrogates/RandomForest.py":[1,2,4,5,20,37,77,93,94,109,126,166,18,31,32,33,34,35,107,120,121,122,123,124],"/home/jose/pyGPGO/pyGPGO/acquisition.py":[1,2,5,6,41,62,83,105,128,149,170,194,215,239,24,25,28,29,30,31,32,33,34,35,36,39,258,80,81,146,147,59,60,103,125,126,212,213,167,168,191,192,236,237],"/home/jose/pyGPGO/pyGPGO/GPGO.py":[1,3,4,5,7,10,11,53,72,93,112,144,154,173,39,40,41,42,43,45,46,47,48,50,51,186,187,188,82,83,84,85,62,63,64,66,67,70,86,87,88,89,90,91,189,190,191,124,125,126,127,128,129,130,131,108,109,110,132,142,192,148,149,150,151,152,193,166,167,168,169,170,171],"/home/jose/pyGPGO/pyGPGO/logger.py":[1,2,3,4,5,6,7,8,9,11,12,18,27,31,13,14,15,16,28,29,19,20,21,22,23,24,25],"/home/jose/pyGPGO/tests/test_acquisition.py":[1,2,5,6,8,9,12,13,14,16,18,19,22,28,34,23,24,25,29,30,31],"/home/jose/pyGPGO/tests/test_covfunc.py":[1,2,6,7,9,10,19,26,33,20,21,22,23,27,28,29,30],"/home/jose/pyGPGO/tests/test_surrogates.py":[1,2,3,4,7,17,33,49,59,74,8,9,10,12,13,14,18,19,20,22,23,24,26,28,29,30,34,35,36,38,39,40,42,44,45,46,50,51,52,54,55,56,60,61,62,64,65,66,67,69,70,71],"/home/jose/pyGPGO/pyGPGO/surrogates/tStudentProcess.py":[1,2,3,4,5,6,9,45,46,72,86,116,147,170,201,217,68,69,70,159,160,161,163,166,167,168,30,31,32,33,34,35,37,38,40,42,164,129,130,103,104,105,106,109,110,111,112,114,131,132,133,134,136,137,138,140,141,142,143,144,145,81,82,83,84],"/home/jose/pyGPGO/tests/test_surrogates_mcmc.py":[1,2,3,4,5,8,18,28,9,10,11,13,14,15,19,20,21,23,24,25],"/home/jose/pyGPGO/pyGPGO/surrogates/tStudentProcessMCMC.py":[1,2,3,4,5,6,7,8,9,10,13,14,40,49,84,94,129,145,33,34,35,36,37,38,61,62,63,64,66,67,69,70,72,73,75,76,77,78,79,80],"/home/jose/pyGPGO/tests/test_surrogates_sk.py":[1,2,3,6,10,19,28,37,11,12,13,15,16,20,21,22,24,25,29,30,31,33,34],"/home/jose/pyGPGO/pyGPGO/surrogates/BoostedTrees.py":[1,2,4,5,27,48,79,22,23,24,25,38,39,40,41,42,43,44,45,46],"/home/jose/pyGPGO/setup.py":[]}}
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | tutorials/* linguist-documentation
2 | examples/* linguist-documentation
3 | docs/* linguist-documentation
4 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | matrix:
3 | include:
4 | - python: 3.6
5 | - python: 3.7
6 | - python: 3.8
7 | sudo: true
8 | # command to install dependencies
9 | install:
10 | - "pip install ."
11 | - "pip install pytest-cov"
12 | - "pip install codecov"
13 | # command to run tests
14 | script: py.test --cov=./
15 | # codecov
16 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | First of all, thank you for wanting to contribute. It is people like you that render open-source software possible.
2 | Since pyGPGO is MIT Licensed, you can pretty much use the code here however it suits your needs, but if you want
3 | to contribute to the existing codebase, please consider the following guidelines.
4 |
5 | # How can I contribute to the project?
6 |
7 | Essentially either by:
8 | 1. Bug reporting.
9 | 2. Suggesting a feature
10 | 3. Contributing to the codebase
11 |
12 | # Found a bug? Tell us about it!
13 |
14 | Hey, your bug may already be fixed! Try to update to the latest pyGPGO version
15 | (ideally the one provided in this repository) and check if it persists.
16 |
17 | Open an issue using the tag `[BUG]` and try to be as descriptive as possible. Also tell us about:
18 | - your installed pyGPGO version.
19 | - your Python environment setup.
20 | - which OS you're running.
21 |
22 | Ideally, we would also like to have a minimal working example of the issue for reproducibility.
23 | We'll fix your bug as soon as possible.
24 |
25 | # Got a feature request?
26 |
27 | We are always looking for interesting ideas to implement in the codebase. Open an issue with the tag
28 | `[REQUEST]` and describe your idea in detail, ideally providing some references.
29 |
30 | After appropriate discussion, if you're feeling adventurous, you can implement your own idea and open
31 | a pull request on the repository. We will review it in due time and approve it shall it meet our
32 | quality criteria.
33 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2017 Jose Jimenez
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4 |
5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6 |
7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
8 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | # Include the license file
2 | include LICENSE
3 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # pyGPGO: Bayesian Optimization for Python
2 | [](https://travis-ci.org/hawk31/pyGPGO)
3 | [](https://codecov.io/gh/hawk31/pyGPGO)
4 | [](http://pygpgo.readthedocs.io/en/latest/?badge=latest)
5 | [](https://zenodo.org/badge/latestdoi/74589922)
6 | [](http://joss.theoj.org/papers/7d60820fabf7fa81501e3d638cac522d)
7 |
8 |
9 | 
10 |
11 | pyGPGO is a simple and modular Python (>3.5) package for bayesian optimization.
12 |
13 | Bayesian optimization is a framework that can be used in situations where:
14 |
15 | * Your objective function may not have a closed form. (e.g. the result of a simulation)
16 | * No gradient information is available.
17 | * Function evaluations may be noisy.
18 | * Evaluations are expensive (time/cost-wise)
19 |
20 |
21 | ### Installation
22 |
23 | Retrieve the latest stable release from pyPI:
24 |
25 | ```bash
26 | pip install pyGPGO
27 | ```
28 |
29 | Or if you're feeling adventurous, retrieve it from this repo,
30 |
31 | ```bash
32 | pip install git+https://github.com/hawk31/pyGPGO
33 | ```
34 |
35 | Check our documentation in http://pygpgo.readthedocs.io/.
36 |
37 |
38 | ### Features
39 |
40 | * Different surrogate models: Gaussian Processes, Student-t Processes, Random Forests, Gradient Boosting Machines.
41 | * Type II Maximum-Likelihood of covariance function hyperparameters.
42 | * MCMC sampling for full-Bayesian inference of hyperparameters (via `pyMC3`).
43 | * Integrated acquisition functions
44 |
45 | ### A small example!
46 |
47 | The user only has to define a function to maximize and a dictionary specifying input space.
48 |
49 | ```python
50 | import numpy as np
51 | from pyGPGO.covfunc import matern32
52 | from pyGPGO.acquisition import Acquisition
53 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
54 | from pyGPGO.GPGO import GPGO
55 |
56 |
57 | def f(x, y):
58 | # Franke's function (https://www.mathworks.com/help/curvefit/franke.html)
59 | one = 0.75 * np.exp(-(9 * x - 2) ** 2 / 4 - (9 * y - 2) ** 2 / 4)
60 | two = 0.75 * np.exp(-(9 * x + 1) ** 2/ 49 - (9 * y + 1) / 10)
61 | three = 0.5 * np.exp(-(9 * x - 7) ** 2 / 4 - (9 * y -3) ** 2 / 4)
62 | four = 0.25 * np.exp(-(9 * x - 4) ** 2 - (9 * y - 7) ** 2)
63 | return one + two + three - four
64 |
65 | cov = matern32()
66 | gp = GaussianProcess(cov)
67 | acq = Acquisition(mode='ExpectedImprovement')
68 | param = {'x': ('cont', [0, 1]),
69 | 'y': ('cont', [0, 1])}
70 |
71 | np.random.seed(1337)
72 | gpgo = GPGO(gp, acq, f, param)
73 | gpgo.run(max_iter=10)
74 |
75 | ```
76 |
77 | Check the `tutorials` and `examples` folders for more ideas on how to use the software.
78 |
79 | ### Citation
80 |
81 | If you use pyGPGO in academic work please cite:
82 |
83 | Jiménez, J., & Ginebra, J. (2017). pyGPGO: Bayesian Optimization for Python. The Journal of Open Source Software, 2, 431.
84 |
--------------------------------------------------------------------------------
/docs/build/doctrees/api.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/api.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/comparison.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/comparison.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/environment.pickle:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/environment.pickle
--------------------------------------------------------------------------------
/docs/build/doctrees/features.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/features.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/index.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/index.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/modules.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/modules.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.GPGO.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.GPGO.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.acquisition.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.acquisition.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.covfunc.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.covfunc.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.logger.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.logger.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.surrogates.BoostedTrees.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.surrogates.BoostedTrees.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.surrogates.GaussianProcess.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.surrogates.GaussianProcess.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.surrogates.GaussianProcessMCMC.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.surrogates.GaussianProcessMCMC.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.surrogates.RandomForest.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.surrogates.RandomForest.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.surrogates.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.surrogates.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.surrogates.tStudentProcess.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.surrogates.tStudentProcess.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.surrogates.tStudentProcessMCMC.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.surrogates.tStudentProcessMCMC.doctree
--------------------------------------------------------------------------------
/docs/build/doctrees/pyGPGO.version.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/doctrees/pyGPGO.version.doctree
--------------------------------------------------------------------------------
/docs/build/html/.buildinfo:
--------------------------------------------------------------------------------
1 | # Sphinx build info version 1
2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
3 | config:
4 | tags:
5 |
--------------------------------------------------------------------------------
/docs/build/html/.nojekyll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/.nojekyll
--------------------------------------------------------------------------------
/docs/build/html/_modules/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 | Overview: module code — pyGPGO 0.1.0.dev1 documentation
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
106 |
107 |
108 |
109 |
110 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 | - Docs »
142 |
143 | - Overview: module code
144 |
145 |
146 | -
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
All modules for which code is available
161 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
233 |
234 |
235 |
236 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/api.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO documentation
2 | ==================
3 |
4 |
5 | Contents:
6 |
7 | .. toctree::
8 | :maxdepth: 3
9 |
10 | Bayesian Optimization module
11 | Surrogates module
12 | Covariance function module
13 | Acquisition function module
14 |
15 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/api.txt:
--------------------------------------------------------------------------------
1 | pyGPGO documentation
2 | ==================
3 |
4 |
5 | Contents:
6 |
7 | .. toctree::
8 | :maxdepth: 3
9 |
10 | Bayesian Optimization module
11 | Surrogates module
12 | Covariance function module
13 | Acquisition function module
14 |
15 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/comparison.rst.txt:
--------------------------------------------------------------------------------
1 | Comparison with other software
2 | ==============================
3 |
4 | pyGPGO is not the only available Python package for bayesian optimization. To the best of our knowledge, we believe
5 | that it is one of the most comprehensive ones in terms of features available to the user. We show a table comparing
6 | some of the most common features here:
7 |
8 |
9 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
10 | | | pyGPGO | Spearmint | fmfn/BayesianOptimization | pyBO | MOE | GPyOpt | scikit-optimize |
11 | +===============================+========================+===========+===========================+==================================+==========+===============+==================+
12 | | GP implementation | Native | Native | via scikit-learn | via Reggie | Native | via GPy | via scikit-learn |
13 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
14 | | Modular | Yes | No | No | No | No | Yes | No |
15 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
16 | | Surrogates | {GP, tSP, RF, ET, GBM} | {GP} | {GP} | {GP} | {GP} | {GP, RF, WGP} | {GP, RF, GBM} |
17 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
18 | | Type II ML optimization | Yes | No | No | No | Yes | Yes | Yes |
19 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
20 | | MCMC inference | Yes (via pyMC3) | Yes | No | Yes | No | Yes | No |
21 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
22 | | Choice of MCMC sampler | Yes | Yes | No | Yes | No | No | No |
23 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
24 | | Acquisition functions | {PI, EI, UCB, Entropy} | {EI} | {PI, EI, UCB} | {PI, EI, UCB, Thompson sampling} | {EI} | {PI, EI, UCB} | {PI, EI, UCB} |
25 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
26 | | Integrated acq. function | Yes | Yes | No | Yes | No | Yes | No |
27 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
28 | | License | MIT | Academic | MIT | BSD-2 | Apache | BSD-3 | BSD |
29 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
30 | | Last update (as of Apr. 2017) | - | Apr 2016 | Mar 2017 | Sept 2015 | Apr 2016 | Apr 2017 | Apr 2017 |
31 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
32 | | Python version | > 3.5 | 2.7 | 2/3 | 2/3 | 2.7 | 2/3 | 2/3 |
33 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
34 |
35 | If you like some other feature implemented into pyGPGO or think this table is outdated or incorrect, please let us know by opening an issue on the Github repository of the package!
36 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/features.rst.txt:
--------------------------------------------------------------------------------
1 | Features
2 | ==================
3 |
4 | The Bayesian optimization framework is very flexible, as it allows for choices in many
5 | steps of its design. To name a few of the choices that pyGPGO provides to the user:
6 |
7 | Surrogate models :class:`pyGPGO.surrogates`
8 | ----------------
9 |
10 | The framework works by specifying a model that will approximate our target function,
11 | better after each evaluation. The most common surrogate in the literature is the Gaussian
12 | Process, but the framework is model agnostic. Some featured models are:
13 |
14 | - Gaussian Processes (:class:`pyGPGO.surrogates.GaussianProcess` :class:`pyGPGO.surrogates.GaussianProcessMCMC`): By far the most common choice, it needs the user to specify a covariance function (detailed in the next section), measuring similarity among training examples. For a good introduction to Gaussian Processes, check [@Rasmussen-Williams2004].
15 | - Student-t Processes (:class:`pyGPGO.surrogates.tStudentProcess` :class:`pyGPGO.surrogates.tStudentProcessMCMC`): Some functions benefit from the heavy-tailed nature of the Student-t distribution. It also requires providing a covariance function.
16 | - Random Forests (:class:`pyGPGO.surrogates.RandomForest`): provided by `sklearn`, it represents a nonparametric surrogate model. Does not require specifying a covariance function. A class for Extra Random Forests is also available. Posterior variance is approximated by averaging the variance of each subtree [@reference].
17 | - Gradient Boosting Machines (:class:`pyGPGO.surrogates.BoostedTrees`): similar to the latter, posterior variance is approximated using quantile regression.
18 |
19 |
20 | Covariance functions :class:`pyGPGO.covfunc`
21 | --------------------
22 |
23 | These determine how similar training examples are for the surrogate model. Most of these also
24 | have hyperparameters that need to be taken into account. pyGPGO implements
25 | the most common covariance functions and its gradients w.r.t. hyperparamers,
26 | that we briefly list here.
27 |
28 | - Squared Exponential
29 | - Matérn
30 | - Gamma-Exponential
31 | - Rational-Quadratic
32 | - ArcSine
33 | - Dot-product
34 |
35 |
36 | Acquisition behaviour :class:`pyGPGO.acquisition`
37 | ---------------------
38 |
39 | In each iteration of the framework, we choose the next point to evaluate according to a behaviour,
40 | dictated by what we call an acquisition function, leveraging exploration and exploitation of
41 | the sampled space. pyGPGO supports the most common acquisition functions in the literature.
42 |
43 | - Probability of improvement: chooses the next point according to the probability of improvement w.r.t. the best observed value.
44 | - Expected improvement: similar to probability of improvement, also weighes the probability by the amount improved. Naturally balances exploration and exploitation and is by far the most used acquisition function in the literature.
45 | - Upper confidence limit: Features a beta parameter to explicitly control the balance of exploration vs exploitation. Higher beta values would higher levels of exploration.
46 | - Entropy: Information-theory based acquisition function.
47 |
48 | Integrated version of these are also available for the MCMC sampling versions of surrogate
49 | models.
50 |
51 | Hyperparameter treatment
52 | ------------------------
53 |
54 | Covariance functions also have hyperparameters, and their treatment is also thoroughly discussed in the literature (see [@Shahriari2016]).
55 | To summarize, we mainly have two options available:
56 |
57 | - Optimizing the marginal log-likelihood, also called the Empirical Bayes approach. pyGPGO supports this feature using analytical gradients for almost all acquisition functions.
58 | - The full Bayesian approach takes into account the uncertainty caused by the hyperparameters in the optimization procedure by marginalizing them, thatis, integrating over them. pyGPGO implements this via MCMC sampling provided by the pyMC3 software, which in turns also provides an easy way for the user to choose whatever sampler they wish.
59 |
60 | References
61 | ----------
62 |
63 | [@Rasmussen-Williams2004]: Rasmussen, C. E., & Williams, C. K. I. (2004). Gaussian processes for machine learning. International journal of neural systems (Vol. 14). http://doi.org/10.1142/S0129065704001899
64 |
65 | [@Shahriari2016]: Shahriari, B., Swersky, K., Wang, Z., Adams, R. P., & De Freitas, N. (2016). Taking the human out of the loop: A review of Bayesian optimization. Proceedings of the IEEE. http://doi.org/10.1109/JPROC.2015.2494218
66 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/index.rst.txt:
--------------------------------------------------------------------------------
1 | .. pyGPGO documentation master file, created by
2 | sphinx-quickstart on Thu Mar 23 17:21:57 2017.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | pyGPGO: Bayesian optimization for Python
7 | ==================================
8 |
9 | pyGPGO is a simple and modular Python (>3.5) package for Bayesian optimization. It supports:
10 |
11 | - Different surrogate models: Gaussian Processes, Student-t Processes, Random Forests, Gradient Boosting Machines.
12 | - Type II Maximum-Likelihood of covariance function hyperparameters.
13 | - MCMC sampling for full-Bayesian inference of hyperparameters (via ``pyMC3``).
14 | - Integrated acquisition functions
15 |
16 | Check us out on `Github `_.
17 |
18 |
19 | What is Bayesian Optimization?
20 | ==================================
21 |
22 | Bayesian optimization is a framework that is useful in several scenarios:
23 |
24 | - Your objective function has no closed-form.
25 | - No access to gradients
26 | - In presence of noise
27 | - It may be expensive to evaluate.
28 |
29 | The bayesian optimization framework uses a surrogate model to approximate the objective function and chooses to
30 | optimize it according to some acquisition function. This framework gives a lot of freedom to the user in terms
31 | of optimization choices:
32 |
33 | - Surrogate model choice
34 | - Covariance function choice
35 | - Acquisition function behaviour
36 | - Hyperparameter treatment
37 |
38 | pyGPGO provides an extensive range of choices in each of the previous points, in a modular way. We recommend checking
39 | [@Shahriari2016] for an in-depth review of the framework if you're interested.
40 |
41 |
42 | How do I get started with pyGPGO?
43 | ==================================
44 |
45 | Install the latest stable release from pyPI::
46 |
47 | pip install pyGPGO
48 |
49 |
50 | or if you're feeling adventurous, install the latest devel version from the Github repository::
51 |
52 | pip install git+https://github.com/hawk31/pyGPGO
53 |
54 |
55 | pyGPGO is straightforward to use, we only need to specify:
56 |
57 | - A function to optimize according to some parameters.
58 | - A dictionary defining parameters, their type and bounds.
59 | - A surrogate model, such as a Gaussian Process, from the ``surrogates`` module. Some surrogate models require defining
60 | a covariance function, with hyperparameters. (from the ``covfunc`` module)
61 | - An acquisition strategy, from the ``acquisition`` module.
62 | - A GPGO instance, from the ``GPGO`` module
63 |
64 | A simple example can be checked below::
65 |
66 | import numpy as np
67 | from pyGPGO.covfunc import squaredExponential
68 | from pyGPGO.acquisition import Acquisition
69 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
70 | from pyGPGO.GPGO import GPGO
71 |
72 | def f(x):
73 | return (np.sin(x))
74 |
75 |
76 | sexp = squaredExponential()
77 | gp = GaussianProcess(sexp)
78 | acq = Acquisition(mode='ExpectedImprovement')
79 | param = {'x': ('cont', [0, 2 * np.pi])}
80 |
81 | np.random.seed(23)
82 | gpgo = GPGO(gp, acq, f, param)
83 | gpgo.run(max_iter=20)
84 |
85 |
86 | There are a couple of tutorials to help get you started on the `tutorials `_ folder.
87 |
88 | For a full list of features with explanations check our Features section
89 |
90 | .. toctree::
91 | :maxdepth: 1
92 |
93 | features
94 |
95 | pyGPGO is not the only package for bayesian optimization in Python, other excellent alternatives exist. For an in-depth comparison
96 | of the features offered by pyGPGO compared to other sofware, check the following section:
97 |
98 | .. toctree::
99 | :maxdepth: 1
100 |
101 | comparison
102 |
103 | API documentation
104 | =================
105 |
106 | .. toctree::
107 | :maxdepth: 3
108 |
109 | api
110 |
111 |
112 | References
113 | ==========
114 |
115 | [@Shahriari2016]: Shahriari, B., Swersky, K., Wang, Z., Adams, R. P., & De Freitas, N. (2016). Taking the human out of the loop: A review of Bayesian optimization. Proceedings of the IEEE. http://doi.org/10.1109/JPROC.2015.2494218
116 |
117 |
118 | Indices and tables
119 | ==================
120 |
121 | * :ref:`genindex`
122 | * :ref:`modindex`
123 | * :ref:`search`
124 |
125 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/index.txt:
--------------------------------------------------------------------------------
1 | .. pyGPGO documentation master file, created by
2 | sphinx-quickstart on Thu Mar 23 17:21:57 2017.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | pyGPGO: Bayesian Optimization for Python
7 | ==================================
8 |
9 | pyGPGO is a simple and modular Python (>3.5) package for Bayesian Optimization. It supports:
10 |
11 | - Different surrogate models: Gaussian Processes, Student-t Processes, Random Forests, Gradient Boosting Machines.
12 | - Type II Maximum-Likelihood of covariance function hyperparameters.
13 | - MCMC sampling for full-Bayesian inference of hyperparameters (via ``pyMC3``).
14 | - Integrated acquisition functions
15 |
16 | Check us out on `Github `_.
17 |
18 | Overall, pyGPGO is a very easy to use package. In practice, a user needs to specify:
19 |
20 | - A function to optimize according to some parameters.
21 | - A dictionary defining parameters, their type and bounds.
22 | - A surrogate model, such as a Gaussian Process, from the surrogates module. Some surrogate models require defining
23 | a covariance function, with hyperparameters. (from the covfunc module)
24 | - An acquisition strategy, from the acquisition module.
25 | - A GPGO instance, from the GPGO module
26 |
27 | A simple example can be checked below::
28 |
29 | import numpy as np
30 | from pyGPGO.covfunc import squaredExponential
31 | from pyGPGO.acquisition import Acquisition
32 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
33 | from pyGPGO.GPGO import GPGO
34 |
35 | def f(x):
36 | return (np.sin(x))
37 |
38 |
39 | sexp = squaredExponential()
40 | gp = GaussianProcess(sexp)
41 | acq = Acquisition(mode='ExpectedImprovement')
42 | param = {'x': ('cont', [0, 2 * np.pi])}
43 |
44 | np.random.seed(23)
45 | gpgo = GPGO(gp, acq, f, param)
46 | gpgo.run(max_iter=20)
47 |
48 |
49 | Contents:
50 |
51 | .. toctree::
52 | :maxdepth: 3
53 |
54 | api
55 |
56 |
57 |
58 | Indices and tables
59 | ==================
60 |
61 | * :ref:`genindex`
62 | * :ref:`modindex`
63 | * :ref:`search`
64 |
65 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/modules.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO
2 | ======
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | pyGPGO
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/modules.txt:
--------------------------------------------------------------------------------
1 | pyGPGO
2 | ======
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | pyGPGO
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.GPGO.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO\.GPGO module
2 | ===================
3 |
4 | .. automodule:: pyGPGO.GPGO
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.GPGO.txt:
--------------------------------------------------------------------------------
1 | pyGPGO.GPGO module
2 | ==================
3 |
4 | .. automodule:: pyGPGO.GPGO
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.acquisition.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO\.acquisition module
2 | ==========================
3 |
4 | .. automodule:: pyGPGO.acquisition
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.acquisition.txt:
--------------------------------------------------------------------------------
1 | pyGPGO.acquisition module
2 | =========================
3 |
4 | .. automodule:: pyGPGO.acquisition
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.covfunc.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO\.covfunc module
2 | ======================
3 |
4 | .. automodule:: pyGPGO.covfunc
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.covfunc.txt:
--------------------------------------------------------------------------------
1 | pyGPGO.covfunc module
2 | =====================
3 |
4 | .. automodule:: pyGPGO.covfunc
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.logger.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO\.logger module
2 | =====================
3 |
4 | .. automodule:: pyGPGO.logger
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.logger.txt:
--------------------------------------------------------------------------------
1 | pyGPGO.logger module
2 | ====================
3 |
4 | .. automodule:: pyGPGO.logger
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO package
2 | ==============
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 |
9 | pyGPGO.surrogates
10 |
11 | Submodules
12 | ----------
13 |
14 | .. toctree::
15 |
16 | pyGPGO.GPGO
17 | pyGPGO.acquisition
18 | pyGPGO.covfunc
19 | pyGPGO.logger
20 | pyGPGO.version
21 |
22 | Module contents
23 | ---------------
24 |
25 | .. automodule:: pyGPGO
26 | :members:
27 | :undoc-members:
28 | :show-inheritance:
29 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.BoostedTrees.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates\.BoostedTrees module
2 | =======================================
3 |
4 | .. automodule:: pyGPGO.surrogates.BoostedTrees
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.BoostedTrees.txt:
--------------------------------------------------------------------------------
1 | pyGPGO.surrogates.BoostedTrees module
2 | =====================================
3 |
4 | .. automodule:: pyGPGO.surrogates.BoostedTrees
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.GaussianProcess.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates\.GaussianProcess module
2 | ==========================================
3 |
4 | .. automodule:: pyGPGO.surrogates.GaussianProcess
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.GaussianProcess.txt:
--------------------------------------------------------------------------------
1 | pyGPGO.surrogates.GaussianProcess module
2 | ========================================
3 |
4 | .. automodule:: pyGPGO.surrogates.GaussianProcess
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.GaussianProcessMCMC.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates\.GaussianProcessMCMC module
2 | ==============================================
3 |
4 | .. automodule:: pyGPGO.surrogates.GaussianProcessMCMC
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.GaussianProcessMCMC.txt:
--------------------------------------------------------------------------------
1 | pyGPGO.surrogates.GaussianProcessMCMC module
2 | ============================================
3 |
4 | .. automodule:: pyGPGO.surrogates.GaussianProcessMCMC
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.RandomForest.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates\.RandomForest module
2 | =======================================
3 |
4 | .. automodule:: pyGPGO.surrogates.RandomForest
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.RandomForest.txt:
--------------------------------------------------------------------------------
1 | pyGPGO.surrogates.RandomForest module
2 | =====================================
3 |
4 | .. automodule:: pyGPGO.surrogates.RandomForest
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates package
2 | ==========================
3 |
4 | Submodules
5 | ----------
6 |
7 | .. toctree::
8 |
9 | pyGPGO.surrogates.BoostedTrees
10 | pyGPGO.surrogates.GaussianProcess
11 | pyGPGO.surrogates.GaussianProcessMCMC
12 | pyGPGO.surrogates.RandomForest
13 | pyGPGO.surrogates.tStudentProcess
14 | pyGPGO.surrogates.tStudentProcessMCMC
15 |
16 | Module contents
17 | ---------------
18 |
19 | .. automodule:: pyGPGO.surrogates
20 | :members:
21 | :undoc-members:
22 | :show-inheritance:
23 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.tStudentProcess.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates\.tStudentProcess module
2 | ==========================================
3 |
4 | .. automodule:: pyGPGO.surrogates.tStudentProcess
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.tStudentProcess.txt:
--------------------------------------------------------------------------------
1 | pyGPGO.surrogates.tStudentProcess module
2 | ========================================
3 |
4 | .. automodule:: pyGPGO.surrogates.tStudentProcess
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.tStudentProcessMCMC.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates\.tStudentProcessMCMC module
2 | ==============================================
3 |
4 | .. automodule:: pyGPGO.surrogates.tStudentProcessMCMC
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.tStudentProcessMCMC.txt:
--------------------------------------------------------------------------------
1 | pyGPGO.surrogates.tStudentProcessMCMC module
2 | ============================================
3 |
4 | .. automodule:: pyGPGO.surrogates.tStudentProcessMCMC
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.surrogates.txt:
--------------------------------------------------------------------------------
1 | pyGPGO.surrogates package
2 | =========================
3 |
4 | Submodules
5 | ----------
6 |
7 | .. toctree::
8 |
9 | pyGPGO.surrogates.BoostedTrees
10 | pyGPGO.surrogates.GaussianProcess
11 | pyGPGO.surrogates.GaussianProcessMCMC
12 | pyGPGO.surrogates.RandomForest
13 | pyGPGO.surrogates.tStudentProcess
14 | pyGPGO.surrogates.tStudentProcessMCMC
15 |
16 | Module contents
17 | ---------------
18 |
19 | .. automodule:: pyGPGO.surrogates
20 | :members:
21 | :undoc-members:
22 | :show-inheritance:
23 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.txt:
--------------------------------------------------------------------------------
1 | pyGPGO package
2 | ==============
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 |
9 | pyGPGO.surrogates
10 |
11 | Submodules
12 | ----------
13 |
14 | .. toctree::
15 |
16 | pyGPGO.GPGO
17 | pyGPGO.acquisition
18 | pyGPGO.covfunc
19 | pyGPGO.logger
20 | pyGPGO.version
21 |
22 | Module contents
23 | ---------------
24 |
25 | .. automodule:: pyGPGO
26 | :members:
27 | :undoc-members:
28 | :show-inheritance:
29 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.version.rst.txt:
--------------------------------------------------------------------------------
1 | pyGPGO\.version module
2 | ======================
3 |
4 | .. automodule:: pyGPGO.version
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_sources/pyGPGO.version.txt:
--------------------------------------------------------------------------------
1 | pyGPGO.version module
2 | =====================
3 |
4 | .. automodule:: pyGPGO.version
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/build/html/_static/ajax-loader.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/ajax-loader.gif
--------------------------------------------------------------------------------
/docs/build/html/_static/comment-bright.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/comment-bright.png
--------------------------------------------------------------------------------
/docs/build/html/_static/comment-close.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/comment-close.png
--------------------------------------------------------------------------------
/docs/build/html/_static/comment.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/comment.png
--------------------------------------------------------------------------------
/docs/build/html/_static/css/badge_only.css:
--------------------------------------------------------------------------------
1 | .fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#FontAwesome") format("svg")}.fa:before{display:inline-block;font-family:FontAwesome;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .fa{display:inline-block;text-decoration:inherit}li .fa{display:inline-block}li .fa-large:before,li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.fas li .fa{width:0.8em}ul.fas li .fa-large:before,ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before{content:""}.icon-book:before{content:""}.fa-caret-down:before{content:""}.icon-caret-down:before{content:""}.fa-caret-up:before{content:""}.icon-caret-up:before{content:""}.fa-caret-left:before{content:""}.icon-caret-left:before{content:""}.fa-caret-right:before{content:""}.icon-caret-right:before{content:""}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980B9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27AE60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book{float:left}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#E74C3C;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#F1C40F;color:#000}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge .fa-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book{float:left}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}
2 | /*# sourceMappingURL=badge_only.css.map */
3 |
--------------------------------------------------------------------------------
/docs/build/html/_static/down-pressed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/down-pressed.png
--------------------------------------------------------------------------------
/docs/build/html/_static/down.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/down.png
--------------------------------------------------------------------------------
/docs/build/html/_static/file.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/file.png
--------------------------------------------------------------------------------
/docs/build/html/_static/fonts/Inconsolata-Bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/fonts/Inconsolata-Bold.ttf
--------------------------------------------------------------------------------
/docs/build/html/_static/fonts/Inconsolata-Regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/fonts/Inconsolata-Regular.ttf
--------------------------------------------------------------------------------
/docs/build/html/_static/fonts/Lato-Bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/fonts/Lato-Bold.ttf
--------------------------------------------------------------------------------
/docs/build/html/_static/fonts/Lato-Regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/fonts/Lato-Regular.ttf
--------------------------------------------------------------------------------
/docs/build/html/_static/fonts/RobotoSlab-Bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/fonts/RobotoSlab-Bold.ttf
--------------------------------------------------------------------------------
/docs/build/html/_static/fonts/RobotoSlab-Regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/fonts/RobotoSlab-Regular.ttf
--------------------------------------------------------------------------------
/docs/build/html/_static/fonts/fontawesome-webfont.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/fonts/fontawesome-webfont.eot
--------------------------------------------------------------------------------
/docs/build/html/_static/fonts/fontawesome-webfont.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/fonts/fontawesome-webfont.ttf
--------------------------------------------------------------------------------
/docs/build/html/_static/fonts/fontawesome-webfont.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/fonts/fontawesome-webfont.woff
--------------------------------------------------------------------------------
/docs/build/html/_static/js/theme.js:
--------------------------------------------------------------------------------
1 | require=(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o");
80 |
81 | // Add expand links to all parents of nested ul
82 | $('.wy-menu-vertical ul').not('.simple').siblings('a').each(function () {
83 | var link = $(this);
84 | expand = $('');
85 | expand.on('click', function (ev) {
86 | self.toggleCurrent(link);
87 | ev.stopPropagation();
88 | return false;
89 | });
90 | link.prepend(expand);
91 | });
92 | };
93 |
94 | nav.reset = function () {
95 | // Get anchor from URL and open up nested nav
96 | var anchor = encodeURI(window.location.hash);
97 | if (anchor) {
98 | try {
99 | var link = $('.wy-menu-vertical')
100 | .find('[href="' + anchor + '"]');
101 | // If we didn't find a link, it may be because we clicked on
102 | // something that is not in the sidebar (eg: when using
103 | // sphinxcontrib.httpdomain it generates headerlinks but those
104 | // aren't picked up and placed in the toctree). So let's find
105 | // the closest header in the document and try with that one.
106 | if (link.length === 0) {
107 | var doc_link = $('.document a[href="' + anchor + '"]');
108 | var closest_section = doc_link.closest('div.section');
109 | // Try again with the closest section entry.
110 | link = $('.wy-menu-vertical')
111 | .find('[href="#' + closest_section.attr("id") + '"]');
112 |
113 | }
114 | $('.wy-menu-vertical li.toctree-l1 li.current')
115 | .removeClass('current');
116 | link.closest('li.toctree-l2').addClass('current');
117 | link.closest('li.toctree-l3').addClass('current');
118 | link.closest('li.toctree-l4').addClass('current');
119 | }
120 | catch (err) {
121 | console.log("Error expanding nav for anchor", err);
122 | }
123 | }
124 | };
125 |
126 | nav.onScroll = function () {
127 | this.winScroll = false;
128 | var newWinPosition = this.win.scrollTop(),
129 | winBottom = newWinPosition + this.winHeight,
130 | navPosition = this.navBar.scrollTop(),
131 | newNavPosition = navPosition + (newWinPosition - this.winPosition);
132 | if (newWinPosition < 0 || winBottom > this.docHeight) {
133 | return;
134 | }
135 | this.navBar.scrollTop(newNavPosition);
136 | this.winPosition = newWinPosition;
137 | };
138 |
139 | nav.onResize = function () {
140 | this.winResize = false;
141 | this.winHeight = this.win.height();
142 | this.docHeight = $(document).height();
143 | };
144 |
145 | nav.hashChange = function () {
146 | this.linkScroll = true;
147 | this.win.one('hashchange', function () {
148 | this.linkScroll = false;
149 | });
150 | };
151 |
152 | nav.toggleCurrent = function (elem) {
153 | var parent_li = elem.closest('li');
154 | parent_li.siblings('li.current').removeClass('current');
155 | parent_li.siblings().find('li.current').removeClass('current');
156 | parent_li.find('> ul li.current').removeClass('current');
157 | parent_li.toggleClass('current');
158 | }
159 |
160 | return nav;
161 | };
162 |
163 | module.exports.ThemeNav = ThemeNav();
164 |
165 | if (typeof(window) != 'undefined') {
166 | window.SphinxRtdTheme = { StickyNav: module.exports.ThemeNav };
167 | }
168 |
169 | },{"jquery":"jquery"}]},{},["sphinx-rtd-theme"]);
170 |
--------------------------------------------------------------------------------
/docs/build/html/_static/minus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/minus.png
--------------------------------------------------------------------------------
/docs/build/html/_static/plus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/plus.png
--------------------------------------------------------------------------------
/docs/build/html/_static/pygments.css:
--------------------------------------------------------------------------------
1 | .highlight .hll { background-color: #ffffcc }
2 | .highlight { background: #eeffcc; }
3 | .highlight .c { color: #408090; font-style: italic } /* Comment */
4 | .highlight .err { border: 1px solid #FF0000 } /* Error */
5 | .highlight .k { color: #007020; font-weight: bold } /* Keyword */
6 | .highlight .o { color: #666666 } /* Operator */
7 | .highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */
8 | .highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */
9 | .highlight .cp { color: #007020 } /* Comment.Preproc */
10 | .highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */
11 | .highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */
12 | .highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */
13 | .highlight .gd { color: #A00000 } /* Generic.Deleted */
14 | .highlight .ge { font-style: italic } /* Generic.Emph */
15 | .highlight .gr { color: #FF0000 } /* Generic.Error */
16 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
17 | .highlight .gi { color: #00A000 } /* Generic.Inserted */
18 | .highlight .go { color: #333333 } /* Generic.Output */
19 | .highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
20 | .highlight .gs { font-weight: bold } /* Generic.Strong */
21 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
22 | .highlight .gt { color: #0044DD } /* Generic.Traceback */
23 | .highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */
24 | .highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
25 | .highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */
26 | .highlight .kp { color: #007020 } /* Keyword.Pseudo */
27 | .highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
28 | .highlight .kt { color: #902000 } /* Keyword.Type */
29 | .highlight .m { color: #208050 } /* Literal.Number */
30 | .highlight .s { color: #4070a0 } /* Literal.String */
31 | .highlight .na { color: #4070a0 } /* Name.Attribute */
32 | .highlight .nb { color: #007020 } /* Name.Builtin */
33 | .highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */
34 | .highlight .no { color: #60add5 } /* Name.Constant */
35 | .highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */
36 | .highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */
37 | .highlight .ne { color: #007020 } /* Name.Exception */
38 | .highlight .nf { color: #06287e } /* Name.Function */
39 | .highlight .nl { color: #002070; font-weight: bold } /* Name.Label */
40 | .highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
41 | .highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */
42 | .highlight .nv { color: #bb60d5 } /* Name.Variable */
43 | .highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */
44 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */
45 | .highlight .mb { color: #208050 } /* Literal.Number.Bin */
46 | .highlight .mf { color: #208050 } /* Literal.Number.Float */
47 | .highlight .mh { color: #208050 } /* Literal.Number.Hex */
48 | .highlight .mi { color: #208050 } /* Literal.Number.Integer */
49 | .highlight .mo { color: #208050 } /* Literal.Number.Oct */
50 | .highlight .sa { color: #4070a0 } /* Literal.String.Affix */
51 | .highlight .sb { color: #4070a0 } /* Literal.String.Backtick */
52 | .highlight .sc { color: #4070a0 } /* Literal.String.Char */
53 | .highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */
54 | .highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
55 | .highlight .s2 { color: #4070a0 } /* Literal.String.Double */
56 | .highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
57 | .highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */
58 | .highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */
59 | .highlight .sx { color: #c65d09 } /* Literal.String.Other */
60 | .highlight .sr { color: #235388 } /* Literal.String.Regex */
61 | .highlight .s1 { color: #4070a0 } /* Literal.String.Single */
62 | .highlight .ss { color: #517918 } /* Literal.String.Symbol */
63 | .highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */
64 | .highlight .fm { color: #06287e } /* Name.Function.Magic */
65 | .highlight .vc { color: #bb60d5 } /* Name.Variable.Class */
66 | .highlight .vg { color: #bb60d5 } /* Name.Variable.Global */
67 | .highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */
68 | .highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */
69 | .highlight .il { color: #208050 } /* Literal.Number.Integer.Long */
--------------------------------------------------------------------------------
/docs/build/html/_static/up-pressed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/up-pressed.png
--------------------------------------------------------------------------------
/docs/build/html/_static/up.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/_static/up.png
--------------------------------------------------------------------------------
/docs/build/html/modules.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 | pyGPGO — pyGPGO 0.1.0.dev1 documentation
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
106 |
107 |
108 |
109 |
110 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 | - Docs »
142 |
143 | - pyGPGO
144 |
145 |
146 | -
147 |
148 |
149 | View page source
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
251 |
252 |
253 |
254 |
--------------------------------------------------------------------------------
/docs/build/html/objects.inv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/html/objects.inv
--------------------------------------------------------------------------------
/docs/build/html/pyGPGO.version.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 | pyGPGO.version module — pyGPGO 0.1.0.dev1 documentation
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
106 |
107 |
108 |
109 |
110 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 | - Docs »
142 |
143 | - pyGPGO.version module
144 |
145 |
146 | -
147 |
148 |
149 | View page source
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
pyGPGO.version module
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
227 |
228 |
229 |
230 |
--------------------------------------------------------------------------------
/docs/build/html/search.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 | Search — pyGPGO 0.1.0.dev1 documentation
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
106 |
107 |
108 |
109 |
110 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 | - Docs »
142 |
143 | - Search
144 |
145 |
146 | -
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
233 |
234 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
--------------------------------------------------------------------------------
/docs/build/latex/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx LaTeX output
2 |
3 | ALLDOCS = $(basename $(wildcard *.tex))
4 | ALLPDF = $(addsuffix .pdf,$(ALLDOCS))
5 | ALLDVI = $(addsuffix .dvi,$(ALLDOCS))
6 |
7 | # Prefix for archive names
8 | ARCHIVEPRREFIX =
9 | # Additional LaTeX options
10 | LATEXOPTS =
11 | # format: pdf or dvi
12 | FMT = pdf
13 |
14 | LATEX = latex
15 | PDFLATEX = pdflatex
16 | MAKEINDEX = makeindex
17 |
18 | all: $(ALLPDF)
19 | all-pdf: $(ALLPDF)
20 | all-dvi: $(ALLDVI)
21 | all-ps: all-dvi
22 | for f in *.dvi; do dvips $$f; done
23 |
24 | all-pdf-ja:
25 | for f in *.pdf *.png *.gif *.jpg *.jpeg; do extractbb $$f; done
26 | for f in *.tex; do platex -kanji=utf8 $(LATEXOPTS) $$f; done
27 | for f in *.tex; do platex -kanji=utf8 $(LATEXOPTS) $$f; done
28 | for f in *.tex; do platex -kanji=utf8 $(LATEXOPTS) $$f; done
29 | -for f in *.idx; do mendex -U -f -d "`basename $$f .idx`.dic" -s python.ist $$f; done
30 | for f in *.tex; do platex -kanji=utf8 $(LATEXOPTS) $$f; done
31 | for f in *.tex; do platex -kanji=utf8 $(LATEXOPTS) $$f; done
32 | for f in *.dvi; do dvipdfmx $$f; done
33 |
34 | zip: all-$(FMT)
35 | mkdir $(ARCHIVEPREFIX)docs-$(FMT)
36 | cp $(ALLPDF) $(ARCHIVEPREFIX)docs-$(FMT)
37 | zip -q -r -9 $(ARCHIVEPREFIX)docs-$(FMT).zip $(ARCHIVEPREFIX)docs-$(FMT)
38 | rm -r $(ARCHIVEPREFIX)docs-$(FMT)
39 |
40 | tar: all-$(FMT)
41 | mkdir $(ARCHIVEPREFIX)docs-$(FMT)
42 | cp $(ALLPDF) $(ARCHIVEPREFIX)docs-$(FMT)
43 | tar cf $(ARCHIVEPREFIX)docs-$(FMT).tar $(ARCHIVEPREFIX)docs-$(FMT)
44 | rm -r $(ARCHIVEPREFIX)docs-$(FMT)
45 |
46 | gz: tar
47 | gzip -9 < $(ARCHIVEPREFIX)docs-$(FMT).tar > $(ARCHIVEPREFIX)docs-$(FMT).tar.gz
48 |
49 | bz2: tar
50 | bzip2 -9 -k $(ARCHIVEPREFIX)docs-$(FMT).tar
51 |
52 | xz: tar
53 | xz -9 -k $(ARCHIVEPREFIX)docs-$(FMT).tar
54 |
55 | # The number of LaTeX runs is quite conservative, but I don't expect it
56 | # to get run often, so the little extra time won't hurt.
57 | %.dvi: %.tex
58 | $(LATEX) $(LATEXOPTS) '$<'
59 | $(LATEX) $(LATEXOPTS) '$<'
60 | $(LATEX) $(LATEXOPTS) '$<'
61 | -$(MAKEINDEX) -s python.ist '$(basename $<).idx'
62 | $(LATEX) $(LATEXOPTS) '$<'
63 | $(LATEX) $(LATEXOPTS) '$<'
64 |
65 | %.pdf: %.tex
66 | $(PDFLATEX) $(LATEXOPTS) '$<'
67 | $(PDFLATEX) $(LATEXOPTS) '$<'
68 | $(PDFLATEX) $(LATEXOPTS) '$<'
69 | -$(MAKEINDEX) -s python.ist '$(basename $<).idx'
70 | $(PDFLATEX) $(LATEXOPTS) '$<'
71 | $(PDFLATEX) $(LATEXOPTS) '$<'
72 |
73 | clean:
74 | rm -f *.log *.ind *.aux *.toc *.syn *.idx *.out *.ilg *.pla *.ps *.tar *.tar.gz *.tar.bz2 *.tar.xz $(ALLPDF) $(ALLDVI)
75 |
76 | .PHONY: all all-pdf all-dvi all-ps clean zip tar gz bz2 xz
77 | .PHONY: all-pdf-ja
78 |
79 |
--------------------------------------------------------------------------------
/docs/build/latex/capt-of.sty:
--------------------------------------------------------------------------------
1 | % CAPT-OF.STY v0.1
2 | % Copyright 1996, 1999 Robin Fairbairns (rf10@cam.ac.uk)
3 | %
4 | % This program can redistributed and/or modified under the terms
5 | % of the LaTeX Project Public License Distributed from CTAN
6 | % archives in directory macros/latex/base/lppl.txt; either
7 | % version 1 of the License, or (at your option) any later version.
8 | %
9 | % This short package permits you to have things with apparently
10 | % `proper' captions in the body of your text. This is sometimes
11 | % convenient in documents that only have small `figure's or `table's
12 | % (or small numbers of them) and you don't need to hassle yourself
13 | % dealing with floats (\begin{figure|table}...)
14 | %
15 | % Usage: \captionof{type}[move]{caption}
16 | % type is `figure' or `table' (or some type you've defined with
17 | % float.sty)
18 | % move is the optional moving argument of \caption (the thing
19 | % that goes to the listoftables/figures)
20 | % caption is the text of the caption
21 | %
22 | % It's probably best to use \captionof within an enclosing group
23 | % (e.g., \begin{center} fig \captionof{figure}{blah blah} \end{center})
24 | %
25 | % CAVEAT: If you _do_ have `real' (floating) figures or tables in your
26 | % document, you may find this package causes the figures\tables to get
27 | % out of order. You are (as far as I'm concerned) `on your own' if
28 | % you use this package in such circumstances.
29 |
30 | \newcommand\captionof[1]{\def\@captype{#1}\caption}
31 |
--------------------------------------------------------------------------------
/docs/build/latex/eqparbox.sty:
--------------------------------------------------------------------------------
1 | %%
2 | %% This is file `eqparbox.sty',
3 | %% generated with the docstrip utility.
4 | %%
5 | %% The original source files were:
6 | %%
7 | %% eqparbox.dtx (with options: `package')
8 | %%
9 | %% This is a generated file.
10 | %%
11 | %% Copyright (C) 2010 Scott Pakin
12 | %% -------------------------------------------------------
13 | %%
14 | %% This package may be distributed and/or modified under the
15 | %% conditions of the LaTeX Project Public License, either version 1.2
16 | %% of this license or (at your option) any later version.
17 | %% The latest version of this license is in
18 | %% http://www.latex-project.org/lppl.txt
19 | %% and version 1.3c or later is part of all distributions of LaTeX
20 | %% version 2008/05/04 or later.
21 | %%
22 | \NeedsTeXFormat{LaTeX2e}[1999/12/01]
23 | \ProvidesPackage{eqparbox}
24 | [2010/01/01 v3.1 Create equal-widthed boxes]
25 | \newlength{\eqp@tempdima} \newlength{\eqp@tempdimb}
26 | \def\eqp@taglist{}
27 | \newif\ifeqp@must@rerun
28 | \AtEndDocument{%
29 | \begingroup
30 | \def\@elt#1{%
31 | \eqp@tempdima\csname eqp@this@#1\endcsname\relax
32 | \eqp@tempdimb\csname eqp@next@#1\endcsname\relax
33 | \ifdim\eqp@tempdima=\eqp@tempdimb
34 | \else
35 | \@latex@warning@no@line{Rerun to correct the width of eqparbox `#1'}%
36 | \fi
37 | \immediate\write\@auxout{%
38 | \string\expandafter\string\gdef\string\csname\space
39 | eqp@this@#1\string\endcsname{%
40 | \csname eqp@next@#1\endcsname
41 | }%
42 | ^^J%
43 | \string\expandafter\string\gdef\string\csname\space
44 | eqp@next@#1\string\endcsname{0pt}%
45 | }%
46 | }%
47 | \eqp@taglist
48 | \endgroup
49 | \ifeqp@must@rerun
50 | \@latex@warning@no@line{Rerun to correct eqparbox widths}
51 | \fi
52 | }
53 | \newcommand*{\eqp@storefont}{%
54 | \xdef\eqp@restorefont{%
55 | \noexpand\usefont{\f@encoding}{\f@family}{\f@series}{\f@shape}%
56 | \noexpand\fontsize{\f@size}{\f@baselineskip}%
57 | \noexpand\selectfont
58 | }%
59 | }
60 | \RequirePackage{array}
61 | \newcommand{\eqp@settowidth}[2]{%
62 | \settowidth{#1}{{%
63 | \eqp@storefont
64 | \begin{tabular}{@{}>{\eqp@restorefont}l<{\eqp@storefont}@{}}%
65 | #2%
66 | \end{tabular}%
67 | }}%
68 | }
69 | \DeclareRobustCommand{\eqparbox}{%
70 | \@ifnextchar[%]
71 | {\eqparbox@i}%
72 | {\eqparbox@iii[c][\relax][s]}%
73 | }
74 | \def\eqparbox@i[#1]{%
75 | \@ifnextchar[%]
76 | {\eqparbox@ii[#1]}%
77 | {\eqparbox@iii[#1][\relax][s]}%
78 | }
79 | \def\eqparbox@ii[#1][#2]{%
80 | \@ifnextchar[%]
81 | {\eqparbox@iii[#1][#2]}%
82 | {\eqparbox@iii[#1][#2][#1]}%
83 | }
84 | \def\eqparbox@iii[#1][#2][#3]{%
85 | \gdef\eqp@produce@box##1##2{%
86 | \parbox[#1][#2][#3]{##1}{##2}%
87 | }%
88 | \eqp@compute@width
89 | }
90 | \DeclareRobustCommand{\eqmakebox}{%
91 | \@ifnextchar[%]
92 | {\eqlrbox@i\makebox}%
93 | {\makebox}%
94 | }
95 | \DeclareRobustCommand{\eqframebox}{%
96 | \@ifnextchar[%]
97 | {\eqlrbox@i\framebox}%
98 | {\framebox}%
99 | }
100 | \DeclareRobustCommand{\eqsavebox}[1]{%
101 | \@ifnextchar[%]
102 | {\eqlrbox@i{\savebox{#1}}}%
103 | {\savebox{#1}}%
104 | }
105 | \def\eqlrbox@i#1[#2]{%
106 | \@ifnextchar[%]
107 | {\eqlrbox@ii{#1}[#2]}%
108 | {\eqlrbox@ii{#1}[#2][c]}%
109 | }
110 | \def\eqlrbox@ii#1[#2][#3]{%
111 | \gdef\eqp@produce@box##1##2{%
112 | #1[##1][#3]{##2}%
113 | }%
114 | \eqp@compute@width{#2}%
115 | }
116 | \def\eqp@compute@width#1#2{%
117 | \eqp@settowidth{\eqp@tempdimb}{#2}%
118 | \expandafter
119 | \ifx\csname eqp@this@#1\endcsname\relax
120 | \global\eqp@must@reruntrue
121 | \expandafter\xdef\csname eqp@this@#1\endcsname{\the\eqp@tempdimb}%
122 | \expandafter\xdef\csname eqp@next@#1\endcsname{\the\eqp@tempdimb}%
123 | \else
124 | \eqp@tempdima=\csname eqp@this@#1\endcsname\relax
125 | \ifdim\eqp@tempdima<\eqp@tempdimb
126 | \expandafter\xdef\csname eqp@this@#1\endcsname{\the\eqp@tempdimb}%
127 | \global\eqp@must@reruntrue
128 | \fi
129 | \eqp@tempdima=\csname eqp@next@#1\endcsname\relax
130 | \ifdim\eqp@tempdima<\eqp@tempdimb
131 | \expandafter\xdef\csname eqp@next@#1\endcsname{\the\eqp@tempdimb}%
132 | \fi
133 | \fi
134 | \@ifundefined{eqp@seen@#1}{%
135 | \expandafter\gdef\csname eqp@seen@#1\endcsname{}%
136 | \@cons\eqp@taglist{{#1}}%
137 | }{}%
138 | \eqp@tempdima=\csname eqp@this@#1\endcsname\relax
139 | \eqp@produce@box{\eqp@tempdima}{#2}%
140 | }
141 | \newcommand*{\eqboxwidth}[1]{%
142 | \@ifundefined{eqp@this@#1}{0pt}{\csname eqp@this@#1\endcsname}%
143 | }
144 | \endinput
145 | %%
146 | %% End of file `eqparbox.sty'.
147 |
--------------------------------------------------------------------------------
/docs/build/latex/iftex.sty:
--------------------------------------------------------------------------------
1 | %%
2 | %% This is file `iftex.sty',
3 |
4 | %%
5 | %% __________________________________
6 | %% Copyright © 2010–2013 Persian TeX Group
7 | %%
8 | %% License information appended.
9 | %%
10 | %%
11 | \csname iftexloaded\endcsname
12 | \let\iftexloaded\endinput
13 | \expandafter\ifx\csname ProvidesPackage\endcsname\relax\else
14 | \ProvidesPackage{iftex}
15 | [2013/04/04 v0.2 Provides if(tex) conditional for PDFTeX, XeTeX, and LuaTeX]
16 | \fi
17 | \def\RequirePDFTeX{%
18 | \ifPDFTeX\else
19 | \begingroup
20 | \errorcontextlines=-1\relax
21 | \newlinechar=10\relax
22 | \errmessage{^^J
23 | ********************************************^^J
24 | * PDFTeX is required to compile this document.^^J
25 | * Sorry!^^J
26 | ********************************************}%
27 | \endgroup
28 | \fi}
29 | \def\RequireXeTeX{%
30 | \ifXeTeX\else
31 | \begingroup
32 | \errorcontextlines=-1\relax
33 | \newlinechar=10\relax
34 | \errmessage{^^J
35 | ********************************************^^J
36 | * XeTeX is required to compile this document.^^J
37 | * Sorry!^^J
38 | ********************************************}%
39 | \endgroup
40 | \fi}
41 | \def\RequireLuaTeX{%
42 | \ifLuaTeX\else
43 | \begingroup
44 | \errorcontextlines=-1\relax
45 | \newlinechar=10\relax
46 | \errmessage{^^J
47 | ********************************************^^J
48 | * LuaTeX is required to compile this document.^^J
49 | * Sorry!^^J
50 | ********************************************}%
51 | \endgroup
52 | \fi}
53 | \expandafter\ifx\csname ifPDFTeX\endcsname\relax\else
54 | \expandafter\endinput
55 | \fi
56 | \expandafter\ifx\csname ifXeTeX\endcsname\relax\else
57 | \expandafter\endinput
58 | \fi
59 | \expandafter\ifx\csname ifLuaTeX\endcsname\relax\else
60 | \expandafter\endinput
61 | \fi
62 | \newif\ifPDFTeX
63 | \begingroup\expandafter\expandafter\expandafter\endgroup
64 | \expandafter\ifx\csname pdfmatch\endcsname\relax
65 | \PDFTeXfalse
66 | \else
67 | \PDFTeXtrue
68 | \fi
69 | \newif\ifXeTeX
70 | \begingroup\expandafter\expandafter\expandafter\endgroup
71 | \expandafter\ifx\csname XeTeXinterchartoks\endcsname\relax
72 | \XeTeXfalse
73 | \else
74 | \XeTeXtrue
75 | \fi
76 | \newif\ifLuaTeX
77 | \begingroup\expandafter\expandafter\expandafter\endgroup
78 | \expandafter\ifx\csname directlua\endcsname\relax
79 | \LuaTeXfalse
80 | \else
81 | \LuaTeXtrue
82 | \fi
83 | %%
84 | %% Copyright © 2010–2013 by Persian TeX Group
85 | %%
86 | %% Distributable under the LaTeX Project Public License,
87 | %% version 1.3c or higher (your choice). The latest version of
88 | %% this license is at: http://www.latex-project.org/lppl.txt
89 | %%
90 | %% This work is "maintained" (as per LPPL maintenance status)
91 | %% by Persian TeX Group.
92 | %%
93 | %%
94 | %%
95 | %%
96 | %%
97 | %% End of file `iftex.sty'.
98 |
--------------------------------------------------------------------------------
/docs/build/latex/needspace.sty:
--------------------------------------------------------------------------------
1 |
2 | \NeedsTeXFormat{LaTeX2e}
3 | \ProvidesPackage{needspace}[2010/09/12 v1.3d reserve vertical space]
4 |
5 | \newcommand{\needspace}[1]{%
6 | \begingroup
7 | \setlength{\dimen@}{#1}%
8 | \vskip\z@\@plus\dimen@
9 | \penalty -100\vskip\z@\@plus -\dimen@
10 | \vskip\dimen@
11 | \penalty 9999%
12 | \vskip -\dimen@
13 | \vskip\z@skip % hide the previous |\vskip| from |\addvspace|
14 | \endgroup
15 | }
16 |
17 | \newcommand{\Needspace}{\@ifstar{\@sneedsp@}{\@needsp@}}
18 |
19 | \newcommand{\@sneedsp@}[1]{\par \penalty-100\begingroup
20 | \setlength{\dimen@}{#1}%
21 | \dimen@ii\pagegoal \advance\dimen@ii-\pagetotal
22 | \ifdim \dimen@>\dimen@ii
23 | \break
24 | \fi\endgroup}
25 |
26 | \newcommand{\@needsp@}[1]{\par \penalty-100\begingroup
27 | \setlength{\dimen@}{#1}%
28 | \dimen@ii\pagegoal \advance\dimen@ii-\pagetotal
29 | \ifdim \dimen@>\dimen@ii
30 | \ifdim \dimen@ii>\z@
31 | \vfil
32 | \fi
33 | \break
34 | \fi\endgroup}
35 |
36 |
--------------------------------------------------------------------------------
/docs/build/latex/pyGPGO.ilg:
--------------------------------------------------------------------------------
1 | This is makeindex, version 2.15 [TeX Live 2013] (kpathsea + Thai support).
2 | Scanning style file ./python.ist......done (6 attributes redefined, 0 ignored).
3 | Scanning input file pyGPGO.idx....done (104 entries accepted, 0 rejected).
4 | Sorting entries....done (765 comparisons).
5 | Generating output file pyGPGO.ind....done (170 lines written, 0 warnings).
6 | Output written in pyGPGO.ind.
7 | Transcript written in pyGPGO.ilg.
8 |
--------------------------------------------------------------------------------
/docs/build/latex/pyGPGO.out:
--------------------------------------------------------------------------------
1 | \BOOKMARK [0][-]{chapter.1}{\376\377\000p\000y\000G\000P\000G\000O\000\040\000d\000o\000c\000u\000m\000e\000n\000t\000a\000t\000i\000o\000n}{}% 1
2 | \BOOKMARK [1][-]{section.1.1}{\376\377\000p\000y\000G\000P\000G\000O\000.\000G\000P\000G\000O\000\040\000m\000o\000d\000u\000l\000e}{chapter.1}% 2
3 | \BOOKMARK [1][-]{section.1.2}{\376\377\000p\000y\000G\000P\000G\000O\000.\000s\000u\000r\000r\000o\000g\000a\000t\000e\000s\000\040\000p\000a\000c\000k\000a\000g\000e}{chapter.1}% 3
4 | \BOOKMARK [2][-]{subsection.1.2.1}{\376\377\000S\000u\000b\000m\000o\000d\000u\000l\000e\000s}{section.1.2}% 4
5 | \BOOKMARK [2][-]{subsection.1.2.2}{\376\377\000M\000o\000d\000u\000l\000e\000\040\000c\000o\000n\000t\000e\000n\000t\000s}{section.1.2}% 5
6 | \BOOKMARK [1][-]{section.1.3}{\376\377\000p\000y\000G\000P\000G\000O\000.\000c\000o\000v\000f\000u\000n\000c\000\040\000m\000o\000d\000u\000l\000e}{chapter.1}% 6
7 | \BOOKMARK [1][-]{section.1.4}{\376\377\000p\000y\000G\000P\000G\000O\000.\000a\000c\000q\000u\000i\000s\000i\000t\000i\000o\000n\000\040\000m\000o\000d\000u\000l\000e}{chapter.1}% 7
8 | \BOOKMARK [0][-]{chapter.2}{\376\377\000I\000n\000d\000i\000c\000e\000s\000\040\000a\000n\000d\000\040\000t\000a\000b\000l\000e\000s}{}% 8
9 | \BOOKMARK [0][-]{section*.104}{\376\377\000P\000y\000t\000h\000o\000n\000\040\000M\000o\000d\000u\000l\000e\000\040\000I\000n\000d\000e\000x}{}% 9
10 | \BOOKMARK [0][-]{section*.105}{\376\377\000I\000n\000d\000e\000x}{}% 10
11 |
--------------------------------------------------------------------------------
/docs/build/latex/pyGPGO.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/latex/pyGPGO.pdf
--------------------------------------------------------------------------------
/docs/build/latex/pyGPGO.toc:
--------------------------------------------------------------------------------
1 | \select@language {english}
2 | \contentsline {chapter}{\numberline {1}pyGPGO documentation}{3}{chapter.1}
3 | \contentsline {section}{\numberline {1.1}pyGPGO.GPGO module}{3}{section.1.1}
4 | \contentsline {section}{\numberline {1.2}pyGPGO.surrogates package}{4}{section.1.2}
5 | \contentsline {subsection}{\numberline {1.2.1}Submodules}{4}{subsection.1.2.1}
6 | \contentsline {subsubsection}{pyGPGO.surrogates.BoostedTrees module}{4}{subsubsection*.15}
7 | \contentsline {subsubsection}{pyGPGO.surrogates.GaussianProcess module}{5}{subsubsection*.20}
8 | \contentsline {paragraph}{Notes}{6}{paragraph*.26}
9 | \contentsline {subsubsection}{pyGPGO.surrogates.GaussianProcessMCMC module}{7}{subsubsection*.35}
10 | \contentsline {subsubsection}{pyGPGO.surrogates.RandomForest module}{8}{subsubsection*.41}
11 | \contentsline {subsubsection}{pyGPGO.surrogates.tStudentProcess module}{10}{subsubsection*.50}
12 | \contentsline {subsubsection}{pyGPGO.surrogates.tStudentProcessMCMC module}{11}{subsubsection*.62}
13 | \contentsline {subsection}{\numberline {1.2.2}Module contents}{12}{subsection.1.2.2}
14 | \contentsline {section}{\numberline {1.3}pyGPGO.covfunc module}{12}{section.1.3}
15 | \contentsline {section}{\numberline {1.4}pyGPGO.acquisition module}{18}{section.1.4}
16 | \contentsline {chapter}{\numberline {2}Indices and tables}{23}{chapter.2}
17 | \contentsline {chapter}{Python Module Index}{25}{section*.104}
18 | \contentsline {chapter}{Index}{27}{section*.105}
19 |
--------------------------------------------------------------------------------
/docs/build/latex/python.ist:
--------------------------------------------------------------------------------
1 | line_max 100
2 | headings_flag 1
3 | heading_prefix " \\bigletter "
4 |
5 | preamble "\\begin{theindex}
6 | \\def\\bigletter#1{{\\Large\\sffamily#1}\\nopagebreak\\vspace{1mm}}
7 |
8 | "
9 |
10 | symhead_positive "{Symbols}"
11 | numhead_positive "{Numbers}"
12 |
--------------------------------------------------------------------------------
/docs/build/latex/sphinxhowto.cls:
--------------------------------------------------------------------------------
1 | %
2 | % sphinxhowto.cls for Sphinx (http://sphinx-doc.org/)
3 | %
4 |
5 | \NeedsTeXFormat{LaTeX2e}[1995/12/01]
6 | \ProvidesClass{sphinxhowto}[2009/06/02 Document class (Sphinx HOWTO)]
7 |
8 | \ifx\directlua\undefined\else
9 | % if compiling with lualatex 0.85 or later load compatibility patch issued by
10 | % the LaTeX team for older packages relying on \pdf named primitives.
11 | \IfFileExists{luatex85.sty}{\RequirePackage{luatex85}}{}
12 | \fi
13 |
14 | % 'oneside' option overriding the 'twoside' default
15 | \newif\if@oneside
16 | \DeclareOption{oneside}{\@onesidetrue}
17 | % Pass remaining document options to the parent class.
18 | \DeclareOption*{\PassOptionsToClass{\CurrentOption}{\sphinxdocclass}}
19 | \ProcessOptions\relax
20 |
21 | % Default to two-side document
22 | \if@oneside
23 | % nothing to do (oneside is the default)
24 | \else
25 | \PassOptionsToClass{twoside}{\sphinxdocclass}
26 | \fi
27 |
28 | \LoadClass{\sphinxdocclass}
29 |
30 | % Set some sane defaults for section numbering depth and TOC depth. You can
31 | % reset these counters in your preamble.
32 | %
33 | \setcounter{secnumdepth}{2}
34 |
35 | % Change the title page to look a bit better, and fit in with the fncychap
36 | % ``Bjarne'' style a bit better.
37 | %
38 | \renewcommand{\maketitle}{%
39 | \noindent\rule{\textwidth}{1pt}\ifsphinxpdfoutput\newline\null\fi\par
40 | \ifsphinxpdfoutput
41 | \begingroup
42 | % These \defs are required to deal with multi-line authors; it
43 | % changes \\ to ', ' (comma-space), making it pass muster for
44 | % generating document info in the PDF file.
45 | \def\\{, }%
46 | \def\and{and }%
47 | \pdfinfo{
48 | /Author (\@author)
49 | /Title (\@title)
50 | }%
51 | \endgroup
52 | \fi
53 | \begin{flushright}
54 | \sphinxlogo
55 | \py@HeaderFamily
56 | {\Huge \@title }\par
57 | {\itshape\large \py@release \releaseinfo}\par
58 | \vspace{25pt}
59 | {\Large
60 | \begin{tabular}[t]{c}
61 | \@author
62 | \end{tabular}}\par
63 | \vspace{25pt}
64 | \@date \par
65 | \py@authoraddress \par
66 | \end{flushright}
67 | \@thanks
68 | \setcounter{footnote}{0}
69 | \let\thanks\relax\let\maketitle\relax
70 | %\gdef\@thanks{}\gdef\@author{}\gdef\@title{}
71 | }
72 |
73 | \let\py@OldTableofcontents=\tableofcontents
74 | \renewcommand{\tableofcontents}{
75 | \begingroup
76 | \parskip = 0mm
77 | \py@OldTableofcontents
78 | \endgroup
79 | \rule{\textwidth}{1pt}
80 | \vspace{12pt}
81 | }
82 |
83 | \@ifundefined{fancyhf}{
84 | \pagestyle{plain}}{
85 | \pagestyle{normal}} % start this way; change for
86 | \pagenumbering{arabic} % ToC & chapters
87 |
88 | \thispagestyle{empty}
89 |
90 | % Fix the bibliography environment to add an entry to the Table of
91 | % Contents.
92 | % For an article document class this environment is a section,
93 | % so no page break before it.
94 | \let\py@OldThebibliography=\thebibliography
95 | \renewcommand{\thebibliography}[1]{
96 | \phantomsection
97 | \py@OldThebibliography{1}
98 | \addcontentsline{toc}{section}{\bibname}
99 | }
100 |
101 | % Same for the indices.
102 | % The memoir class already does this, so we don't duplicate it in that case.
103 | %
104 | \@ifclassloaded{memoir}{}{
105 | \let\py@OldTheindex=\theindex
106 | \renewcommand{\theindex}{
107 | \phantomsection
108 | \py@OldTheindex
109 | \addcontentsline{toc}{section}{\indexname}
110 | }
111 | }
112 |
--------------------------------------------------------------------------------
/docs/build/latex/sphinxmanual.cls:
--------------------------------------------------------------------------------
1 | %
2 | % sphinxmanual.cls for Sphinx (http://sphinx-doc.org/)
3 | %
4 |
5 | \NeedsTeXFormat{LaTeX2e}[1995/12/01]
6 | \ProvidesClass{sphinxmanual}[2009/06/02 Document class (Sphinx manual)]
7 |
8 | \ifx\directlua\undefined\else
9 | % if compiling with lualatex 0.85 or later load compatibility patch issued by
10 | % the LaTeX team for older packages relying on \pdf named primitives.
11 | \IfFileExists{luatex85.sty}{\RequirePackage{luatex85}}{}
12 | \fi
13 |
14 | % chapters starting at odd pages (overridden by 'openany' document option)
15 | \PassOptionsToClass{openright}{\sphinxdocclass}
16 |
17 | % 'oneside' option overriding the 'twoside' default
18 | \newif\if@oneside
19 | \DeclareOption{oneside}{\@onesidetrue}
20 | % Pass remaining document options to the parent class.
21 | \DeclareOption*{\PassOptionsToClass{\CurrentOption}{\sphinxdocclass}}
22 | \ProcessOptions\relax
23 |
24 | % Defaults two-side document
25 | \if@oneside
26 | % nothing to do (oneside is the default)
27 | \else
28 | \PassOptionsToClass{twoside}{\sphinxdocclass}
29 | \fi
30 |
31 | \LoadClass{\sphinxdocclass}
32 |
33 | % Set some sane defaults for section numbering depth and TOC depth. You can
34 | % reset these counters in your preamble.
35 | %
36 | \setcounter{secnumdepth}{2}
37 | \setcounter{tocdepth}{1}
38 |
39 | % Change the title page to look a bit better, and fit in with the fncychap
40 | % ``Bjarne'' style a bit better.
41 | %
42 | \renewcommand{\maketitle}{%
43 | \begin{titlepage}%
44 | \let\footnotesize\small
45 | \let\footnoterule\relax
46 | \noindent\rule{\textwidth}{1pt}\ifsphinxpdfoutput\newline\null\fi\par
47 | \ifsphinxpdfoutput
48 | \begingroup
49 | % These \defs are required to deal with multi-line authors; it
50 | % changes \\ to ', ' (comma-space), making it pass muster for
51 | % generating document info in the PDF file.
52 | \def\\{, }%
53 | \def\and{and }%
54 | \pdfinfo{
55 | /Author (\@author)
56 | /Title (\@title)
57 | }%
58 | \endgroup
59 | \fi
60 | \begin{flushright}%
61 | \sphinxlogo
62 | \py@HeaderFamily
63 | {\Huge \@title \par}
64 | {\itshape\LARGE \py@release\releaseinfo \par}
65 | \vfill
66 | {\LARGE
67 | \begin{tabular}[t]{c}
68 | \@author
69 | \end{tabular}
70 | \par}
71 | \vfill\vfill
72 | {\large
73 | \@date \par
74 | \vfill
75 | \py@authoraddress \par
76 | }%
77 | \end{flushright}%\par
78 | \@thanks
79 | \end{titlepage}%
80 | \setcounter{footnote}{0}%
81 | \let\thanks\relax\let\maketitle\relax
82 | %\gdef\@thanks{}\gdef\@author{}\gdef\@title{}
83 | }
84 |
85 | \let\py@OldTableofcontents=\tableofcontents
86 | \renewcommand{\tableofcontents}{%
87 | % before resetting page counter, let's do the right thing.
88 | \if@openright\cleardoublepage\else\clearpage\fi
89 | \pagenumbering{roman}%
90 | \pagestyle{plain}%
91 | \begingroup
92 | \parskip \z@skip
93 | \py@OldTableofcontents
94 | \endgroup
95 | % before resetting page counter, let's do the right thing.
96 | \if@openright\cleardoublepage\else\clearpage\fi
97 | \pagenumbering{arabic}%
98 | \ifdefined\fancyhf\pagestyle{normal}\fi
99 | }
100 | \pagenumbering{alph}% avoid hyperref "duplicate destination" warnings
101 |
102 | % This is needed to get the width of the section # area wide enough in the
103 | % library reference. Doing it here keeps it the same for all the manuals.
104 | %
105 | \renewcommand*\l@section{\@dottedtocline{1}{1.5em}{2.6em}}
106 | \renewcommand*\l@subsection{\@dottedtocline{2}{4.1em}{3.5em}}
107 |
108 | % Fix the bibliography environment to add an entry to the Table of
109 | % Contents.
110 | % For a report document class this environment is a chapter.
111 | \let\py@OldThebibliography=\thebibliography
112 | \renewcommand{\thebibliography}[1]{
113 | \if@openright\cleardoublepage\else\clearpage\fi
114 | \phantomsection
115 | \py@OldThebibliography{1}
116 | \addcontentsline{toc}{chapter}{\bibname}
117 | }
118 |
119 | % Same for the indices.
120 | % The memoir class already does this, so we don't duplicate it in that case.
121 | %
122 | \@ifclassloaded{memoir}{}{
123 | \let\py@OldTheindex=\theindex
124 | \renewcommand{\theindex}{
125 | \if@openright\cleardoublepage\else\clearpage\fi
126 | \phantomsection
127 | \py@OldTheindex
128 | \addcontentsline{toc}{chapter}{\indexname}
129 | }
130 | }
131 |
--------------------------------------------------------------------------------
/docs/build/latex/upquote.sty:
--------------------------------------------------------------------------------
1 | %%
2 | %% This is file `upquote.sty',
3 | %% generated with the docstrip utility.
4 | %%
5 | %% The original source files were:
6 | %%
7 | %% upquote.dtx (with options: `package')
8 | %%
9 | %% Copyright (C) 2000 by Michael A. Covington
10 | %% Copyright (C) 2003 by Frank Mittelbach
11 | %% Copyright (C) 2012 by Markus Kuhn (current maintainer)
12 | %%
13 | %% Released under the LaTeX Project Public License v1.3c or later
14 | %% See http://www.latex-project.org/lppl.txt
15 | %%
16 | \NeedsTeXFormat{LaTeX2e}
17 | \ProvidesPackage{upquote}
18 | [2012/04/19 v1.3 upright-quote and grave-accent glyphs in verbatim]
19 | \newcommand\upquote@cmtt{cmtt}
20 | \newcommand\upquote@OTone{OT1}
21 | \ifx\encodingdefault\upquote@OTone
22 | \ifx\ttdefault\upquote@cmtt\else\RequirePackage{textcomp}\fi
23 | \else
24 | \RequirePackage{textcomp}
25 | \fi
26 | \begingroup
27 | \catcode`'=\active
28 | \catcode``=\active
29 | \g@addto@macro\@noligs
30 | {\let'\textquotesingle
31 | \let`\textasciigrave
32 | \ifx\encodingdefault\upquote@OTone
33 | \ifx\ttdefault\upquote@cmtt
34 | \def'{\char13 }%
35 | \def`{\char18 }%
36 | \fi\fi}
37 | \endgroup
38 | \endinput
39 | %%
40 | %% End of file `upquote.sty'.
41 |
--------------------------------------------------------------------------------
/docs/build/latex/{eqp@restorefont}l:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/docs/build/latex/{eqp@restorefont}l
--------------------------------------------------------------------------------
/docs/source/api.rst:
--------------------------------------------------------------------------------
1 | pyGPGO documentation
2 | ==================
3 |
4 |
5 | Contents:
6 |
7 | .. toctree::
8 | :maxdepth: 3
9 |
10 | Bayesian Optimization module
11 | Surrogates module
12 | Covariance function module
13 | Acquisition function module
14 |
15 |
--------------------------------------------------------------------------------
/docs/source/comparison.rst:
--------------------------------------------------------------------------------
1 | Comparison with other software
2 | ==============================
3 |
4 | pyGPGO is not the only available Python package for bayesian optimization. To the best of our knowledge, we believe
5 | that it is one of the most comprehensive ones in terms of features available to the user. We show a table comparing
6 | some of the most common features here:
7 |
8 |
9 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
10 | | | pyGPGO | Spearmint | fmfn/BayesianOptimization | pyBO | MOE | GPyOpt | scikit-optimize |
11 | +===============================+========================+===========+===========================+==================================+==========+===============+==================+
12 | | GP implementation | Native | Native | via scikit-learn | via Reggie | Native | via GPy | via scikit-learn |
13 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
14 | | Modular | Yes | No | No | No | No | Yes | No |
15 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
16 | | Surrogates | {GP, tSP, RF, ET, GBM} | {GP} | {GP} | {GP} | {GP} | {GP, RF, WGP} | {GP, RF, GBM} |
17 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
18 | | Type II ML optimization | Yes | No | No | No | Yes | Yes | Yes |
19 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
20 | | MCMC inference | Yes (via pyMC3) | Yes | No | Yes | No | Yes | No |
21 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
22 | | Choice of MCMC sampler | Yes | Yes | No | Yes | No | No | No |
23 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
24 | | Acquisition functions | {PI, EI, UCB, Entropy} | {EI} | {PI, EI, UCB} | {PI, EI, UCB, Thompson sampling} | {EI} | {PI, EI, UCB} | {PI, EI, UCB} |
25 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
26 | | Integrated acq. function | Yes | Yes | No | Yes | No | Yes | No |
27 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
28 | | License | MIT | Academic | MIT | BSD-2 | Apache | BSD-3 | BSD |
29 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
30 | | Last update (as of Apr. 2017) | - | Apr 2016 | Mar 2017 | Sept 2015 | Apr 2016 | Apr 2017 | Apr 2017 |
31 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
32 | | Python version | > 3.5 | 2.7 | 2/3 | 2/3 | 2.7 | 2/3 | 2/3 |
33 | +-------------------------------+------------------------+-----------+---------------------------+----------------------------------+----------+---------------+------------------+
34 |
35 | If you like some other feature implemented into pyGPGO or think this table is outdated or incorrect, please let us know by opening an issue on the Github repository of the package!
36 |
--------------------------------------------------------------------------------
/docs/source/features.rst:
--------------------------------------------------------------------------------
1 | Features
2 | ==================
3 |
4 | The Bayesian optimization framework is very flexible, as it allows for choices in many
5 | steps of its design. To name a few of the choices that pyGPGO provides to the user:
6 |
7 | Surrogate models :class:`pyGPGO.surrogates`
8 | ----------------
9 |
10 | The framework works by specifying a model that will approximate our target function,
11 | better after each evaluation. The most common surrogate in the literature is the Gaussian
12 | Process, but the framework is model agnostic. Some featured models are:
13 |
14 | - Gaussian Processes (:class:`pyGPGO.surrogates.GaussianProcess` and :class:`pyGPGO.surrogates.GaussianProcessMCMC`): By far the most common choice, it needs the user to specify a covariance function (detailed in the next section), measuring similarity among training examples. For a good introduction to Gaussian Processes, check [Rasmussen-Williams2004]_ .
15 | - Student-t Processes (:class:`pyGPGO.surrogates.tStudentProcess` and :class:`pyGPGO.surrogates.tStudentProcessMCMC`): Some functions benefit from the heavy-tailed nature of the Student-t distribution. It also requires providing a covariance function.
16 | - Random Forests (:class:`pyGPGO.surrogates.RandomForest`): provided by `sklearn`, it represents a nonparametric surrogate model. Does not require specifying a covariance function. A class for Extra Random Forests is also available. Posterior variance is approximated by averaging the variance of each subtree.
17 | - Gradient Boosting Machines (:class:`pyGPGO.surrogates.BoostedTrees`): similar to the latter, posterior variance is approximated using quantile regression.
18 |
19 |
20 | Covariance functions :class:`pyGPGO.covfunc`
21 | --------------------
22 |
23 | These determine how similar training examples are for the surrogate model. Most of these also
24 | have hyperparameters that need to be taken into account. pyGPGO implements
25 | the most common covariance functions and its gradients w.r.t. hyperparamers,
26 | that we briefly list here.
27 |
28 | - Squared Exponential (:class:`pyGPGO.covfunc.squaredExponential`)
29 | - Matérn (:class:`pyGPGO.covfunc.matern` or :class:`pyGPGO.covfunc.matern32` or :class:`pyGPGO.covfunc.matern52`)
30 | - Gamma-Exponential (:class:`pyGPGO.covfunc.gammaExponential`)
31 | - Rational-Quadratic (:class:`pyGPGO.covfunc.rationalQuadratic`)
32 | - ArcSine (:class:`pyGPGO.covfunc.arcSine`)
33 | - Dot-product (:class:`pyGPGO.covfunc.dotProd`)
34 |
35 |
36 | Acquisition behaviour :class:`pyGPGO.acquisition`
37 | ---------------------
38 |
39 | In each iteration of the framework, we choose the next point to evaluate according to a behaviour,
40 | dictated by what we call an acquisition function, leveraging exploration and exploitation of
41 | the sampled space. pyGPGO supports the most common acquisition functions in the literature.
42 |
43 | - Probability of improvement: chooses the next point according to the probability of improvement w.r.t. the best observed value.
44 | - Expected improvement: similar to probability of improvement, also weighes the probability by the amount improved. Naturally balances exploration and exploitation and is by far the most used acquisition function in the literature.
45 | - Upper confidence limit: Features a beta parameter to explicitly control the balance of exploration vs exploitation. Higher beta values would higher levels of exploration.
46 | - Entropy: Information-theory based acquisition function.
47 |
48 | Integrated version of these are also available for the MCMC sampling versions of surrogate
49 | models.
50 |
51 | Hyperparameter treatment
52 | ------------------------
53 |
54 | Covariance functions also have hyperparameters, and their treatment is also thoroughly discussed in the literature (see [Shahriari2016]_ ).
55 | To summarize, we mainly have two options available:
56 |
57 |
58 | - Optimizing the marginal log-likelihood, also called the Empirical Bayes approach. pyGPGO supports this feature using analytical gradients for almost all acquisition functions.
59 | - The full Bayesian approach takes into account the uncertainty caused by the hyperparameters in the optimization procedure by marginalizing them, thatis, integrating over them. pyGPGO implements this via MCMC sampling provided by the pyMC3 software, which in turns also provides an easy way for the user to choose whatever sampler they wish.
60 |
61 | References
62 | ----------
63 |
64 | .. [Rasmussen-Williams2004] Rasmussen, C. E., & Williams, C. K. I. (2004). Gaussian processes for machine learning. International journal of neural systems (Vol. 14). http://doi.org/10.1142/S0129065704001899
65 | .. [Shahriari2016] Shahriari, B., Swersky, K., Wang, Z., Adams, R. P., & De Freitas, N. (2016). Taking the human out of the loop: A review of Bayesian optimization. Proceedings of the IEEE. http://doi.org/10.1109/JPROC.2015.2494218
66 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. pyGPGO documentation master file, created by
2 | sphinx-quickstart on Thu Mar 23 17:21:57 2017.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | pyGPGO: Bayesian optimization for Python
7 | ==================================
8 |
9 | pyGPGO is a simple and modular Python (>3.5) package for Bayesian optimization. It supports:
10 |
11 | - Different surrogate models: Gaussian Processes, Student-t Processes, Random Forests, Gradient Boosting Machines.
12 | - Type II Maximum-Likelihood of covariance function hyperparameters.
13 | - MCMC sampling for full-Bayesian inference of hyperparameters (via ``pyMC3``).
14 | - Integrated acquisition functions
15 |
16 | Check us out on `Github `_.
17 |
18 | pyGPGO uses other well-known packages of the Python scientific ecosystem as dependencies:
19 |
20 | - numpy
21 | - scipy
22 | - joblib
23 | - scikit-learn
24 | - pyMC3
25 | - theano
26 |
27 | These are automatically taken care for in the requirements file.
28 |
29 |
30 | What is Bayesian Optimization?
31 | ==================================
32 |
33 | Bayesian optimization is a framework that is useful in several scenarios:
34 |
35 | - Your objective function has no closed-form.
36 | - No access to gradients
37 | - In presence of noise
38 | - It may be expensive to evaluate.
39 |
40 | The bayesian optimization framework uses a surrogate model to approximate the objective function and chooses to
41 | optimize it according to some acquisition function. This framework gives a lot of freedom to the user in terms
42 | of optimization choices:
43 |
44 | - Surrogate model choice
45 | - Covariance function choice
46 | - Acquisition function behaviour
47 | - Hyperparameter treatment
48 |
49 | pyGPGO provides an extensive range of choices in each of the previous points, in a modular way. We recommend checking
50 | [Shahriari2016]_ for an in-depth review of the framework if you're interested.
51 |
52 |
53 | How do I get started with pyGPGO?
54 | ==================================
55 |
56 | Install the latest stable release from pyPI::
57 |
58 | pip install pyGPGO
59 |
60 |
61 | or if you're feeling adventurous, install the latest devel version from the Github repository::
62 |
63 | pip install git+https://github.com/hawk31/pyGPGO
64 |
65 |
66 | pyGPGO is straightforward to use, we only need to specify:
67 |
68 | - A function to optimize according to some parameters.
69 | - A dictionary defining parameters, their type and bounds.
70 | - A surrogate model, such as a Gaussian Process, from the ``surrogates`` module. Some surrogate models require defining
71 | a covariance function, with hyperparameters. (from the ``covfunc`` module)
72 | - An acquisition strategy, from the ``acquisition`` module.
73 | - A GPGO instance, from the ``GPGO`` module
74 |
75 | A simple example can be checked below::
76 |
77 | import numpy as np
78 | from pyGPGO.covfunc import squaredExponential
79 | from pyGPGO.acquisition import Acquisition
80 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
81 | from pyGPGO.GPGO import GPGO
82 |
83 | def f(x):
84 | return (np.sin(x))
85 |
86 |
87 | sexp = squaredExponential()
88 | gp = GaussianProcess(sexp)
89 | acq = Acquisition(mode='ExpectedImprovement')
90 | param = {'x': ('cont', [0, 2 * np.pi])}
91 |
92 | np.random.seed(23)
93 | gpgo = GPGO(gp, acq, f, param)
94 | gpgo.run(max_iter=20)
95 |
96 |
97 | There are a couple of tutorials to help get you started on the `tutorials `_ folder.
98 |
99 | For a full list of features with explanations check our Features section
100 |
101 | .. toctree::
102 | :maxdepth: 1
103 |
104 | features
105 |
106 | pyGPGO is not the only package for bayesian optimization in Python, other excellent alternatives exist. For an in-depth comparison
107 | of the features offered by pyGPGO compared to other sofware, check the following section:
108 |
109 | .. toctree::
110 | :maxdepth: 1
111 |
112 | comparison
113 |
114 | API documentation
115 | =================
116 |
117 | .. toctree::
118 | :maxdepth: 3
119 |
120 | api
121 |
122 |
123 | References
124 | ==========
125 |
126 | .. [Shahriari2016] Shahriari, B., Swersky, K., Wang, Z., Adams, R. P., & De Freitas, N. (2016). Taking the human out of the loop: A review of Bayesian optimization. Proceedings of the IEEE. http://doi.org/10.1109/JPROC.2015.2494218
127 |
128 |
129 | Indices and tables
130 | ==================
131 |
132 | * :ref:`genindex`
133 | * :ref:`modindex`
134 | * :ref:`search`
135 |
136 |
--------------------------------------------------------------------------------
/docs/source/modules.rst:
--------------------------------------------------------------------------------
1 | pyGPGO
2 | ======
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | pyGPGO
8 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.GPGO.rst:
--------------------------------------------------------------------------------
1 | pyGPGO\.GPGO module
2 | ===================
3 |
4 | .. automodule:: pyGPGO.GPGO
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.acquisition.rst:
--------------------------------------------------------------------------------
1 | pyGPGO\.acquisition module
2 | ==========================
3 |
4 | .. automodule:: pyGPGO.acquisition
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.covfunc.rst:
--------------------------------------------------------------------------------
1 | pyGPGO\.covfunc module
2 | ======================
3 |
4 | .. automodule:: pyGPGO.covfunc
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.logger.rst:
--------------------------------------------------------------------------------
1 | pyGPGO\.logger module
2 | =====================
3 |
4 | .. automodule:: pyGPGO.logger
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.rst:
--------------------------------------------------------------------------------
1 | pyGPGO package
2 | ==============
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 |
9 | pyGPGO.surrogates
10 |
11 | Submodules
12 | ----------
13 |
14 | .. toctree::
15 |
16 | pyGPGO.GPGO
17 | pyGPGO.acquisition
18 | pyGPGO.covfunc
19 | pyGPGO.logger
20 | pyGPGO.version
21 |
22 | Module contents
23 | ---------------
24 |
25 | .. automodule:: pyGPGO
26 | :members:
27 | :undoc-members:
28 | :show-inheritance:
29 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.surrogates.BoostedTrees.rst:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates\.BoostedTrees module
2 | =======================================
3 |
4 | .. automodule:: pyGPGO.surrogates.BoostedTrees
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.surrogates.GaussianProcess.rst:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates\.GaussianProcess module
2 | ==========================================
3 |
4 | .. automodule:: pyGPGO.surrogates.GaussianProcess
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.surrogates.GaussianProcessMCMC.rst:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates\.GaussianProcessMCMC module
2 | ==============================================
3 |
4 | .. automodule:: pyGPGO.surrogates.GaussianProcessMCMC
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.surrogates.RandomForest.rst:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates\.RandomForest module
2 | =======================================
3 |
4 | .. automodule:: pyGPGO.surrogates.RandomForest
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.surrogates.rst:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates package
2 | ==========================
3 |
4 | Submodules
5 | ----------
6 |
7 | .. toctree::
8 |
9 | pyGPGO.surrogates.BoostedTrees
10 | pyGPGO.surrogates.GaussianProcess
11 | pyGPGO.surrogates.GaussianProcessMCMC
12 | pyGPGO.surrogates.RandomForest
13 | pyGPGO.surrogates.tStudentProcess
14 | pyGPGO.surrogates.tStudentProcessMCMC
15 |
16 | Module contents
17 | ---------------
18 |
19 | .. automodule:: pyGPGO.surrogates
20 | :members:
21 | :undoc-members:
22 | :show-inheritance:
23 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.surrogates.tStudentProcess.rst:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates\.tStudentProcess module
2 | ==========================================
3 |
4 | .. automodule:: pyGPGO.surrogates.tStudentProcess
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.surrogates.tStudentProcessMCMC.rst:
--------------------------------------------------------------------------------
1 | pyGPGO\.surrogates\.tStudentProcessMCMC module
2 | ==============================================
3 |
4 | .. automodule:: pyGPGO.surrogates.tStudentProcessMCMC
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/pyGPGO.version.rst:
--------------------------------------------------------------------------------
1 | pyGPGO\.version module
2 | ======================
3 |
4 | .. automodule:: pyGPGO.version
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/examples/acqzoo.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # acqzoo: shows the behaviour of different
4 | # acquisition functions on a GP surrogate
5 | # for a sine-like function
6 | #######################################
7 |
8 |
9 | import numpy as np
10 | import matplotlib.pyplot as plt
11 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
12 | from pyGPGO.acquisition import Acquisition
13 | from pyGPGO.covfunc import squaredExponential
14 | from pyGPGO.GPGO import GPGO
15 |
16 |
17 | def plotGPGO(gpgo, param, index, new=True):
18 | param_value = list(param.values())[0][1]
19 | x_test = np.linspace(param_value[0], param_value[1], 1000).reshape((1000, 1))
20 | y_hat, y_var = gpgo.GP.predict(x_test, return_std=True)
21 | std = np.sqrt(y_var)
22 | l, u = y_hat - 1.96 * std, y_hat + 1.96 * std
23 | if new:
24 | plt.figure()
25 | plt.subplot(5, 1, 1)
26 | plt.fill_between(x_test.flatten(), l, u, alpha=0.2)
27 | plt.plot(x_test.flatten(), y_hat)
28 | plt.subplot(5, 1, index)
29 | a = np.array([-gpgo._acqWrapper(np.atleast_1d(x)) for x in x_test]).flatten()
30 | plt.plot(x_test, a, color=colors[index - 2], label=acq_titles[index - 2])
31 | gpgo._optimizeAcq(method='L-BFGS-B', n_start=1000)
32 | plt.axvline(x=gpgo.best)
33 | plt.legend(loc=0)
34 |
35 |
36 | if __name__ == '__main__':
37 | def f(x):
38 | return (np.sin(x))
39 |
40 | acq_1 = Acquisition(mode='ExpectedImprovement')
41 | acq_2 = Acquisition(mode='ProbabilityImprovement')
42 | acq_3 = Acquisition(mode='UCB', beta=0.5)
43 | acq_4 = Acquisition(mode='UCB', beta=1.5)
44 | acq_list = [acq_1, acq_2, acq_3, acq_4]
45 | sexp = squaredExponential()
46 | param = {'x': ('cont', [0, 2 * np.pi])}
47 | new = True
48 | colors = ['green', 'red', 'orange', 'black']
49 | acq_titles = [r'Expected improvement', r'Probability of Improvement', r'GP-UCB $\beta = .5$',
50 | r'GP-UCB $\beta = 1.5$']
51 |
52 | for index, acq in enumerate(acq_list):
53 | np.random.seed(200)
54 | gp = GaussianProcess(sexp)
55 | gpgo = GPGO(gp, acq, f, param)
56 | gpgo._firstRun(n_eval=3)
57 | plotGPGO(gpgo, param, index=index + 2, new=new)
58 | new = False
59 |
60 | plt.show()
61 |
--------------------------------------------------------------------------------
/examples/bayoptwork.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # bayoptwork: Generates a plot to show how the Bayesian Optimization framework
4 | # works, ignoring areas with either low posterior mean or low variance.
5 | #######################################
6 |
7 |
8 | import numpy as np
9 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
10 | from pyGPGO.covfunc import squaredExponential
11 | import matplotlib.pyplot as plt
12 |
13 | if __name__ == '__main__':
14 | # Build synthetic data (sine function)
15 | x = np.arange(0, 2 * np.pi + 0.01, step=np.pi / 1.5)
16 | y = np.sin(x)
17 | X = np.array([np.atleast_2d(u) for u in x])[:, 0]
18 |
19 | # Specify covariance function
20 | sexp = squaredExponential()
21 | # Instantiate GPRegressor class
22 | gp = GaussianProcess(sexp)
23 | # Fit the model to the data
24 | gp.fit(X, y)
25 |
26 | # Predict on new data
27 | xstar = np.arange(0, 2 * np.pi, step=0.01)
28 | Xstar = np.array([np.atleast_2d(u) for u in xstar])[:, 0]
29 | ymean, ystd = gp.predict(Xstar, return_std=True)
30 |
31 | # Confidence interval bounds
32 | lower, upper = ymean - 1.96 * ystd, ymean + 1.96 * ystd
33 |
34 | # Plot values
35 | plt.figure()
36 | plt.plot(xstar, ymean, label='Posterior mean')
37 | plt.plot(xstar, lower, '--', label='Lower confidence bound')
38 | plt.plot(xstar, upper, '--', label='Upper confidence bound')
39 | plt.axhline(y=np.max(lower), color='black')
40 | plt.axvspan(0, .68, color='grey', alpha=0.3)
41 | plt.plot(xstar[np.argmax(lower)], np.max(lower), '*', markersize=20)
42 | plt.axvspan(3.04, 7, color='grey', alpha=0.3, label='Discarded region')
43 | plt.text(3.75, 0.75, 'max LCB')
44 | plt.grid()
45 | plt.legend(loc=0)
46 | plt.show()
47 |
--------------------------------------------------------------------------------
/examples/covzoo.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # bayoptwork: Generates a plot to show how the Bayesian Optimization framework
4 | # works, ignoring areas with either low posterior mean or low variance.
5 | #######################################
6 |
7 | import numpy as np
8 | from pyGPGO.covfunc import squaredExponential, matern32, gammaExponential, rationalQuadratic
9 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
10 | import matplotlib.pyplot as plt
11 |
12 |
13 | if __name__ == '__main__':
14 | # Build synthetic data (sine function)
15 | x = np.arange(0, 2 * np.pi + 0.01, step=np.pi / 2.05)
16 | y = np.sin(x)
17 | X = np.array([np.atleast_2d(u) for u in x])[:, 0]
18 |
19 | # Covariance functions to loop over
20 | covfuncs = [squaredExponential(), matern32(), gammaExponential(), rationalQuadratic()]
21 | titles = [r'Squared Exponential ($l = 1$)', r'Matérn ($\nu = 1.5$, $l = 1$)',
22 | r'Gamma Exponential ($\gamma = 1, l = 1$)', r'Rational Quadratic ($\alpha = 1, l = 1$)']
23 |
24 | cm_bright = ['#9ad2cb', '#add0cd', '#b8c3ce', '#9daec9']
25 | #plt.rc('text', usetex=True)
26 | for i, cov in enumerate(covfuncs):
27 | gp = GaussianProcess(cov, optimize=True, usegrads=False)
28 | gp.fit(X, y)
29 | xstar = np.arange(0, 2 * np.pi, step=0.01)
30 | Xstar = np.array([np.atleast_2d(u) for u in xstar])[:, 0]
31 | ymean, ystd = gp.predict(Xstar, return_std=True)
32 |
33 | lower, upper = ymean - 1.96 * np.sqrt(ystd), ymean + 1.96 * np.sqrt(ystd)
34 | plt.subplot(2, 2, i + 1)
35 | plt.plot(xstar, ymean, label='Posterior mean')
36 | plt.plot(xstar, np.sin(xstar), label='True function')
37 | plt.fill_between(xstar, lower, upper, alpha=0.4, label='95% confidence band', color=cm_bright[i])
38 | plt.grid()
39 | plt.title(titles[i])
40 | plt.legend(loc=0)
41 | plt.show()
42 |
--------------------------------------------------------------------------------
/examples/drawGP.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # drawGP: Samples from a GP prior.
4 | #######################################
5 |
6 | import numpy as np
7 | from numpy.random import multivariate_normal
8 | from pyGPGO.covfunc import squaredExponential
9 | import matplotlib.pyplot as plt
10 |
11 | if __name__ == '__main__':
12 | np.random.seed(93)
13 | # Equally spaced values of Xstar
14 | Xstar = np.arange(0, 2 * np.pi, step=np.pi / 24)
15 | Xstar = np.array([np.atleast_2d(x) for x in Xstar])[:, 0]
16 | sexp = squaredExponential()
17 | # By default assume mean 0
18 | m = np.zeros(Xstar.shape[0])
19 | # Compute squared-exponential matrix
20 | K = sexp.K(Xstar, Xstar)
21 |
22 | n_samples = 3
23 | # Draw samples from multivariate normal
24 | samples = multivariate_normal(m, K, size=n_samples)
25 |
26 | # Plot values
27 | x = Xstar.flatten()
28 | plt.figure()
29 | for i in range(n_samples):
30 | plt.plot(x, samples[i], label='GP sample {}'.format(i + 1))
31 | plt.xlabel('x')
32 | plt.ylabel('y')
33 | plt.title('Sampled GP priors from Squared Exponential kernel')
34 | plt.grid()
35 | plt.legend(loc=0)
36 | plt.show()
37 |
--------------------------------------------------------------------------------
/examples/example1d.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # example1d: SHows how the Bayesian Optimization works on a one-dimensional
4 | # sine-like function, step by step.
5 | #######################################
6 |
7 |
8 | import os
9 |
10 | import matplotlib.pyplot as plt
11 |
12 | import numpy as np
13 | from pyGPGO.GPGO import GPGO
14 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
15 | from pyGPGO.acquisition import Acquisition
16 | from pyGPGO.covfunc import squaredExponential
17 |
18 |
19 | def plotGPGO(gpgo, param):
20 | param_value = list(param.values())[0][1]
21 | x_test = np.linspace(param_value[0], param_value[1], 1000).reshape((1000, 1))
22 | hat = gpgo.GP.predict(x_test, return_std=True)
23 | y_hat, y_std = hat[0], np.sqrt(hat[1])
24 | l, u = y_hat - 1.96 * y_std, y_hat + 1.96 * y_std
25 | fig = plt.figure()
26 | r = fig.add_subplot(2, 1, 1)
27 | r.set_title('Fitted Gaussian process')
28 | plt.fill_between(x_test.flatten(), l, u, alpha=0.2)
29 | plt.plot(x_test.flatten(), y_hat, color='red', label='Posterior mean')
30 | plt.legend(loc=0)
31 | a = np.array([-gpgo._acqWrapper(np.atleast_1d(x)) for x in x_test]).flatten()
32 | r = fig.add_subplot(2, 1, 2)
33 | r.set_title('Acquisition function')
34 | plt.plot(x_test, a, color='green')
35 | gpgo._optimizeAcq(method='L-BFGS-B', n_start=1000)
36 | plt.axvline(x=gpgo.best, color='black', label='Found optima')
37 | plt.legend(loc=0)
38 | plt.tight_layout()
39 | plt.savefig(os.path.join(os.getcwd(), 'mthesis_text/figures/chapter3/sine/{}.pdf'.format(i)))
40 | plt.show()
41 |
42 |
43 | if __name__ == '__main__':
44 | np.random.seed(321)
45 |
46 | def f(x):
47 | return (np.sin(x))
48 |
49 | sexp = squaredExponential()
50 | gp = GaussianProcess(sexp)
51 | acq = Acquisition(mode='ExpectedImprovement')
52 | param = {'x': ('cont', [0, 2 * np.pi])}
53 |
54 | gpgo = GPGO(gp, acq, f, param, n_jobs=-1)
55 | gpgo._firstRun()
56 |
57 | for i in range(6):
58 | plotGPGO(gpgo, param)
59 | gpgo.updateGP()
60 |
--------------------------------------------------------------------------------
/examples/example2d.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # example2d: SHows how the Bayesian Optimization works on a two-dimensional
4 | # rastrigin function, step by step.
5 | #######################################
6 |
7 |
8 | import os
9 | from collections import OrderedDict
10 |
11 | import numpy as np
12 | import matplotlib.pyplot as plt
13 |
14 | from pyGPGO.GPGO import GPGO
15 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
16 | from pyGPGO.acquisition import Acquisition
17 | from pyGPGO.covfunc import squaredExponential
18 |
19 |
20 | def rastrigin(x, y, A=10):
21 | return (2 * A + (x ** 2 - A * np.cos(2 * np.pi * x)) + (y ** 2 - A * np.cos(2 * np.pi * y)))
22 |
23 |
24 | def plot_f(x_values, y_values, f):
25 | z = np.zeros((len(x_values), len(y_values)))
26 | for i in range(len(x_values)):
27 | for j in range(len(y_values)):
28 | z[i, j] = f(x_values[i], y_values[j])
29 | plt.imshow(z.T, origin='lower', extent=[np.min(x_values), np.max(x_values), np.min(y_values), np.max(y_values)])
30 | plt.colorbar()
31 | plt.show()
32 | plt.savefig(os.path.join(os.getcwd(), 'mthesis_text/figures/chapter3/rosen/rosen.pdf'))
33 |
34 |
35 | def plot2dgpgo(gpgo):
36 | tested_X = gpgo.GP.X
37 | n = 100
38 | r_x, r_y = gpgo.parameter_range[0], gpgo.parameter_range[1]
39 | x_test = np.linspace(r_x[0], r_x[1], n)
40 | y_test = np.linspace(r_y[0], r_y[1], n)
41 | z_hat = np.empty((len(x_test), len(y_test)))
42 | z_var = np.empty((len(x_test), len(y_test)))
43 | ac = np.empty((len(x_test), len(y_test)))
44 | for i in range(len(x_test)):
45 | for j in range(len(y_test)):
46 | res = gpgo.GP.predict([x_test[i], y_test[j]])
47 | z_hat[i, j] = res[0]
48 | z_var[i, j] = res[1][0]
49 | ac[i, j] = -gpgo._acqWrapper(np.atleast_1d([x_test[i], y_test[j]]))
50 | fig = plt.figure()
51 | a = fig.add_subplot(2, 2, 1)
52 | a.set_title('Posterior mean')
53 | plt.imshow(z_hat.T, origin='lower', extent=[r_x[0], r_x[1], r_y[0], r_y[1]])
54 | plt.colorbar()
55 | plt.plot(tested_X[:, 0], tested_X[:, 1], 'wx', markersize=10)
56 | a = fig.add_subplot(2, 2, 2)
57 | a.set_title('Posterior variance')
58 | plt.imshow(z_var.T, origin='lower', extent=[r_x[0], r_x[1], r_y[0], r_y[1]])
59 | plt.plot(tested_X[:, 0], tested_X[:, 1], 'wx', markersize=10)
60 | plt.colorbar()
61 | a = fig.add_subplot(2, 2, 3)
62 | a.set_title('Acquisition function')
63 | plt.imshow(ac.T, origin='lower', extent=[r_x[0], r_x[1], r_y[0], r_y[1]])
64 | plt.colorbar()
65 | gpgo._optimizeAcq(method='L-BFGS-B', n_start=500)
66 | plt.plot(gpgo.best[0], gpgo.best[1], 'gx', markersize=15)
67 | plt.tight_layout()
68 | plt.savefig(os.path.join(os.getcwd(), 'mthesis_text/figures/chapter3/rosen/{}.pdf'.format(item)))
69 | plt.show()
70 |
71 |
72 | if __name__ == '__main__':
73 | x = np.linspace(-1, 1, 1000)
74 | y = np.linspace(-1, 1, 1000)
75 | plot_f(x, y, rastrigin)
76 |
77 | np.random.seed(20)
78 | sexp = squaredExponential()
79 | gp = GaussianProcess(sexp)
80 | acq = Acquisition(mode='ExpectedImprovement')
81 |
82 | param = OrderedDict()
83 | param['x'] = ('cont', [-1, 1])
84 | param['y'] = ('cont', [-1, 1])
85 |
86 | gpgo = GPGO(gp, acq, rastrigin, param, n_jobs=-1)
87 | gpgo._firstRun()
88 |
89 | for item in range(7):
90 | plot2dgpgo(gpgo)
91 | gpgo.updateGP()
92 |
--------------------------------------------------------------------------------
/examples/exampleGBM.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # exampleGBM: tests the Gradient Boosting Machine surrogate.
4 | #######################################
5 |
6 | import numpy as np
7 | from pyGPGO.surrogates.BoostedTrees import BoostedTrees
8 | import matplotlib.pyplot as plt
9 |
10 | if __name__ == '__main__':
11 | # Build synthetic data (sine function)
12 | x = np.arange(0, 2 * np.pi + 0.01, step=np.pi / 16)
13 | y = np.sin(x)
14 | X = np.array([np.atleast_2d(u) for u in x])[:, 0]
15 |
16 | gbm = BoostedTrees(q2=0.84, q1=0.16)
17 | # Fit the model to the data
18 | gbm.fit(X, y)
19 | # Predict on new data
20 | xstar = np.arange(0, 2 * np.pi, step=0.01)
21 | Xstar = np.array([np.atleast_2d(u) for u in xstar])[:, 0]
22 | ymean, ystd = gbm.predict(Xstar, return_std=True)
23 |
24 | # Confidence interval bounds
25 | lower, upper = ymean - 1.96 * ystd, ymean + 1.96 * ystd
26 |
27 | # Plot values
28 | plt.figure()
29 | plt.plot(xstar, ymean, label='Posterior mean')
30 | plt.plot(xstar, np.sin(xstar), label='True function')
31 | plt.fill_between(xstar, lower, upper, alpha=0.4, label=r'95% confidence band')
32 | plt.grid()
33 | plt.legend(loc=0)
34 | plt.show()
--------------------------------------------------------------------------------
/examples/exampleRF.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # exampleRF: tests the Random Forest surrogate model.
4 | #######################################
5 |
6 | import numpy as np
7 | from pyGPGO.surrogates.RandomForest import RandomForest
8 | import matplotlib.pyplot as plt
9 |
10 | if __name__ == '__main__':
11 | # Build synthetic data (sine function)
12 | x = np.arange(0, 2 * np.pi + 0.01, step=np.pi / 8)
13 | y = np.sin(x)
14 | X = np.array([np.atleast_2d(u) for u in x])[:, 0]
15 |
16 | rf = RandomForest(n_estimators=20)
17 | # Fit the model to the data
18 | rf.fit(X, y)
19 | # Predict on new data
20 | xstar = np.arange(0, 2 * np.pi, step=0.01)
21 | Xstar = np.array([np.atleast_2d(u) for u in xstar])[:, 0]
22 | ymean, ystd = rf.predict(Xstar, return_std=True)
23 |
24 | # Confidence interval bounds
25 | lower, upper = ymean - 1.96 * ystd, ymean + 1.96 * ystd
26 |
27 | # Plot values
28 | plt.figure()
29 | plt.plot(xstar, ymean, label='Posterior mean')
30 | plt.plot(xstar, np.sin(xstar), label='True function')
31 | plt.fill_between(xstar, lower, upper, alpha=0.4, label=r'95% confidence band')
32 | plt.grid()
33 | plt.legend(loc=0)
34 | plt.show()
--------------------------------------------------------------------------------
/examples/exampleint.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # exampleint: tests and visualizes an integrated acquisition function.
4 | #######################################
5 |
6 | import matplotlib.pyplot as plt
7 |
8 | import numpy as np
9 | from pyGPGO.GPGO import GPGO
10 | from pyGPGO.surrogates.GaussianProcessMCMC import GaussianProcessMCMC
11 | from pyGPGO.acquisition import Acquisition
12 | from pyGPGO.covfunc import squaredExponential
13 | import pymc3 as pm
14 |
15 |
16 | def plotGPGO(gpgo, param):
17 | param_value = list(param.values())[0][1]
18 | x_test = np.linspace(param_value[0], param_value[1], 1000).reshape((1000, 1))
19 | fig = plt.figure()
20 | a = np.array([-gpgo._acqWrapper(np.atleast_1d(x)) for x in x_test]).flatten()
21 | r = fig.add_subplot(1, 1, 1)
22 | r.set_title('Acquisition function')
23 | plt.plot(x_test, a, color='green')
24 | gpgo._optimizeAcq(method='L-BFGS-B', n_start=25)
25 | plt.axvline(x=gpgo.best, color='black', label='Found optima')
26 | plt.legend(loc=0)
27 | plt.tight_layout()
28 | plt.show()
29 |
30 |
31 | if __name__ == '__main__':
32 | np.random.seed(321)
33 |
34 | def f(x):
35 | return (np.sin(x))
36 |
37 | sexp = squaredExponential()
38 | gp = GaussianProcessMCMC(sexp, step=pm.Slice)
39 | acq = Acquisition(mode='IntegratedExpectedImprovement')
40 | param = {'x': ('cont', [0, 2 * np.pi])}
41 |
42 | gpgo = GPGO(gp, acq, f, param, n_jobs=-1)
43 | gpgo._firstRun()
44 |
45 | for i in range(6):
46 | plotGPGO(gpgo, param)
47 | gpgo.updateGP()
48 |
--------------------------------------------------------------------------------
/examples/franke.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # franke: optimizes Franke's function.
4 | #######################################
5 |
6 | import numpy as np
7 | from pyGPGO.covfunc import matern32
8 | from pyGPGO.acquisition import Acquisition
9 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
10 | from pyGPGO.GPGO import GPGO
11 |
12 | import matplotlib.pyplot as plt
13 | from matplotlib import cm
14 |
15 |
16 | def f(x, y):
17 | # Franke's function (https://www.mathworks.com/help/curvefit/franke.html)
18 | one = 0.75 * np.exp(-(9 * x - 2)**2/4 - (9 * y - 2)**2/4)
19 | two = 0.75 * np.exp(-(9 * x + 1)**2/49 - (9 * y + 1)/10)
20 | three = 0.5 * np.exp(-(9 * x - 7)**2/4 - (9*y - 3)**2/4)
21 | four = 0.25 * np.exp(-(9 * x - 4)**2 - (9*y - 7)**2)
22 | return one + two + three - four
23 |
24 |
25 | def plotFranke():
26 | """
27 | Plots Franke's function
28 | """
29 | x = np.linspace(0, 1, num=1000)
30 | y = np.linspace(0, 1, num=1000)
31 | X, Y = np.meshgrid(x, y)
32 | Z = f(X, Y)
33 |
34 | fig = plt.figure()
35 | ax = fig.gca(projection='3d')
36 |
37 | surf = ax.plot_surface(X, Y, Z, cmap=cm.coolwarm,
38 | linewidth=0)
39 | fig.colorbar(surf, shrink=0.5, aspect=5)
40 | plt.show()
41 |
42 |
43 | if __name__ == '__main__':
44 | plotFranke()
45 |
46 | cov = matern32() # Using a matern v=3/2 covariance kernel
47 | gp = GaussianProcess(cov) # A Gaussian Process regressor without hyperparameter optimization
48 | acq = Acquisition(mode='ExpectedImprovement') # Expected Improvement acquisition function
49 | param = {'x': ('cont', [0, 1]),
50 | 'y': ('cont', [0, 1])} # Specify parameter space
51 |
52 | np.random.seed(1337)
53 | gpgo = GPGO(gp, acq, f, param) # Call GPGO class
54 | gpgo.run(max_iter=10) # 10 iterations
55 | gpgo.getResult() # Get your result
56 |
--------------------------------------------------------------------------------
/examples/gif_gen.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # gif_gen: generates a gif (the one in paper.md) showing how the BO framework
4 | # works on the Franke function, step by step.
5 | #######################################
6 |
7 | import numpy as np
8 | from pyGPGO.covfunc import matern32
9 | from pyGPGO.acquisition import Acquisition
10 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
11 | from pyGPGO.GPGO import GPGO
12 |
13 | import matplotlib.pyplot as plt
14 | from matplotlib import cm
15 |
16 |
17 | def f(x, y):
18 | # Franke's function (https://www.mathworks.com/help/curvefit/franke.html)
19 | one = 0.75 * np.exp(-(9 * x - 2)**2/4 - (9 * y - 2)**2/4)
20 | two = 0.75 * np.exp(-(9 * x + 1)**2/49 - (9 * y + 1)/10)
21 | three = 0.5 * np.exp(-(9 * x - 7)**2/4 - (9*y - 3)**2/4)
22 | four = 0.25 * np.exp(-(9 * x - 4)**2 - (9*y - 7)**2)
23 | return one + two + three - four
24 |
25 |
26 | def plotFranke():
27 | x = np.linspace(0, 1, num=1000)
28 | y = np.linspace(0, 1, num=1000)
29 | X, Y = np.meshgrid(x, y)
30 | Z = f(X, Y)
31 | ax = fig.add_subplot(1, 2, 1, projection='3d')
32 | ax.set_title('Original function')
33 |
34 | surf = ax.plot_surface(X, Y, Z, cmap=cm.coolwarm,
35 | linewidth=0)
36 | fig.colorbar(surf, shrink=0.5, aspect=5)
37 |
38 |
39 | def plotPred(gpgo, num=100):
40 | X = np.linspace(0, 1, num=num)
41 | Y = np.linspace(0, 1, num=num)
42 | U = np.zeros((num**2, 2))
43 | i = 0
44 | for x in X:
45 | for y in Y:
46 | U[i, :] = [x, y]
47 | i += 1
48 | z = gpgo.GP.predict(U)[0]
49 | Z = z.reshape((num, num))
50 | X, Y = np.meshgrid(X, Y)
51 | ax = fig.add_subplot(1, 2, 2, projection='3d')
52 | ax.set_title('Gaussian Process surrogate')
53 | surf = ax.plot_surface(X, Y, Z, cmap=cm.coolwarm,
54 | linewidth=0)
55 | fig.colorbar(surf, shrink=0.5, aspect=5)
56 | best = gpgo.best
57 | ax.scatter([best[0]], [best[1]], s=40, marker='x', c='r', label='Sampled point')
58 | plt.legend(loc='lower right')
59 | #plt.show()
60 | return Z
61 |
62 |
63 | if __name__ == '__main__':
64 | n_iter = 10
65 | cov = matern32()
66 | gp = GaussianProcess(cov)
67 | acq = Acquisition(mode='ExpectedImprovement')
68 | param = {'x': ('cont', [0, 1]),
69 | 'y': ('cont', [0, 1])}
70 |
71 | np.random.seed(85)
72 | gpgo = GPGO(gp, acq, f, param)
73 | gpgo.run(max_iter=1)
74 |
75 | for i in range(n_iter):
76 | fig = plt.figure(figsize=plt.figaspect(0.5))
77 | fig.suptitle("Franke's function (Iteration {})".format(i+1))
78 | gpgo.run(max_iter=1, resume=True)
79 | plotFranke()
80 | plotPred(gpgo)
81 | plt.show()
82 | #plt.savefig('/home/jose/gif/{}.png'.format(i), dpi=300)
83 | plt.close()
84 |
--------------------------------------------------------------------------------
/examples/hyperopt.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # hyperopt: shows the gradient w.r.t. the characteristic length scale
4 | # on a simple example.
5 | #######################################
6 |
7 |
8 | import numpy as np
9 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
10 | from pyGPGO.covfunc import squaredExponential
11 | import matplotlib.pyplot as plt
12 |
13 |
14 | def gradient(gp, sexp):
15 | alpha = gp.alpha
16 | K = gp.K
17 | gradK = sexp.gradK(gp.X, gp.X, 'l')
18 | inner = np.dot(np.atleast_2d(alpha).T, np.atleast_2d(alpha)) - np.linalg.inv(K)
19 | return (.5 * np.trace(np.dot(inner, gradK)))
20 |
21 |
22 | if __name__ == '__main__':
23 | x = np.arange(0, 2 * np.pi + 0.01, step=np.pi / 2)
24 | X = np.array([np.atleast_2d(u) for u in x])[:, 0]
25 | y = np.sin(x)
26 |
27 | logp = []
28 | grad = []
29 | length_scales = np.linspace(0.1, 2, 1000)
30 |
31 | for l in length_scales:
32 | sexp = squaredExponential(l=l)
33 | gp = GaussianProcess(sexp)
34 | gp.fit(X, y)
35 | logp.append(gp.logp)
36 | grad.append(gradient(gp, sexp))
37 |
38 | plt.figure()
39 | plt.subplot(1, 2, 1)
40 | plt.plot(length_scales, logp)
41 | plt.title('Marginal log-likelihood')
42 | plt.xlabel('Characteristic length-scale l')
43 | plt.ylabel('log-likelihood')
44 | plt.grid()
45 | plt.subplot(1, 2, 2)
46 | plt.plot(length_scales, grad, '--', color='red')
47 | plt.title('Gradient w.r.t. l')
48 | plt.xlabel('Characteristic length-scale l')
49 | plt.grid()
50 | plt.show()
51 |
--------------------------------------------------------------------------------
/examples/hyperpost.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # hyperpost: shows posterior distribution of hyperparameters
4 | # for a Gaussian Process example
5 | #######################################
6 |
7 | import numpy as np
8 | from pyGPGO.surrogates.GaussianProcessMCMC import GaussianProcessMCMC
9 | from pyGPGO.covfunc import matern32
10 |
11 |
12 | if __name__ == '__main__':
13 | np.random.seed(1337)
14 | sexp = matern32()
15 | gp = GaussianProcessMCMC(sexp, niter=2000, init='MAP', step=None)
16 |
17 | X = np.linspace(0, 6, 7)[:, None]
18 | y = np.sin(X).flatten()
19 | gp.fit(X, y)
20 | gp.posteriorPlot()
--------------------------------------------------------------------------------
/examples/integratedacq.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # integratedacq: Shows the computation of the integrated acquisition function.
4 | #######################################
5 |
6 | import numpy as np
7 | import matplotlib.pyplot as plt
8 | from pyGPGO.surrogates.GaussianProcessMCMC import GaussianProcessMCMC
9 | from pyGPGO.acquisition import Acquisition
10 | from pyGPGO.covfunc import squaredExponential
11 | from pyGPGO.GPGO import GPGO
12 |
13 | import pymc3 as pm
14 |
15 | if __name__ == '__main__':
16 | sexp = squaredExponential()
17 | gp = GaussianProcessMCMC(sexp, step=pm.Slice)
18 |
19 | def f(x):
20 | return np.sin(x)
21 |
22 | np.random.seed(200)
23 | param = {'x': ('cont', [0, 6])}
24 | acq = Acquisition(mode='IntegratedExpectedImprovement')
25 | gpgo = GPGO(gp, acq, f, param)
26 | gpgo._firstRun(n_eval=7)
27 |
28 | plt.figure()
29 | plt.subplot(2, 1, 1)
30 |
31 | Z = np.linspace(0, 6, 100)[:, None]
32 | post_mean, post_var = gpgo.GP.predict(Z, return_std=True, nsamples=200)
33 | for i in range(200):
34 | plt.plot(Z.flatten(), post_mean[i], linewidth=0.4)
35 |
36 | plt.plot(gpgo.GP.X.flatten(), gpgo.GP.y, 'X', label='Sampled data', markersize=10, color='red')
37 | plt.grid()
38 | plt.legend()
39 |
40 | xtest = np.linspace(0, 6, 200)[:, np.newaxis]
41 | a = [-gpgo._acqWrapper(np.atleast_2d(x)) for x in xtest]
42 | plt.subplot(2, 1, 2)
43 | plt.plot(xtest, a, label='Integrated Expected Improvement')
44 | plt.grid()
45 | plt.legend()
46 | plt.show()
47 |
--------------------------------------------------------------------------------
/examples/minimalexample.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # minimalexample: A minimal working pyGPGO example.
4 | #######################################
5 |
6 | import numpy as np
7 | import matplotlib.pyplot as plt
8 | from pyGPGO.covfunc import squaredExponential
9 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
10 | from pyGPGO.acquisition import Acquisition
11 | from pyGPGO.GPGO import GPGO
12 |
13 |
14 | def drawFun(f):
15 | x = np.linspace(0, 1, 1000)
16 | plt.plot(x, f(x))
17 | plt.grid()
18 | plt.show()
19 |
20 |
21 | if __name__ == '__main__':
22 | np.random.seed(20)
23 | def f(x):
24 | return -((6*x-2)**2*np.sin(12*x-4))
25 |
26 | drawFun(f)
27 |
28 | sexp = squaredExponential()
29 | gp = GaussianProcess(sexp)
30 | acq = Acquisition(mode = 'ExpectedImprovement')
31 |
32 | params = {'x': ('cont', (0, 1))}
33 | gpgo = GPGO(gp, acq, f, params)
34 | gpgo.run(max_iter = 10)
35 | print(gpgo.getResult())
36 |
--------------------------------------------------------------------------------
/examples/sineGP.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # sineGP: Fits a Gaussian Process on a sine-like function.
4 | #######################################
5 |
6 | import numpy as np
7 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
8 | from pyGPGO.covfunc import squaredExponential
9 | import matplotlib.pyplot as plt
10 |
11 | if __name__ == '__main__':
12 | # Build synthetic data (sine function)
13 | x = np.arange(0, 2 * np.pi + 0.01, step=np.pi / 2)
14 | y = np.sin(x)
15 | X = np.array([np.atleast_2d(u) for u in x])[:, 0]
16 |
17 | # Specify covariance function
18 | sexp = squaredExponential()
19 |
20 | # Instantiate GaussianProcess class
21 | gp = GaussianProcess(sexp)
22 | # Fit the model to the data
23 | gp.fit(X, y)
24 |
25 | # Predict on new data
26 | xstar = np.arange(0, 2 * np.pi, step=0.01)
27 | Xstar = np.array([np.atleast_2d(u) for u in xstar])[:, 0]
28 | ymean, ystd = gp.predict(Xstar, return_std=True)
29 |
30 | # Confidence interval bounds
31 | lower, upper = ymean - 1.96 * np.sqrt(ystd), ymean + 1.96 * np.sqrt(ystd)
32 |
33 | # Plot values
34 | plt.figure()
35 | plt.plot(xstar, ymean, label='Posterior mean')
36 | plt.plot(xstar, np.sin(xstar), label='True function')
37 | plt.fill_between(xstar, lower, upper, alpha=0.4, label='95% confidence band')
38 | plt.grid()
39 | plt.legend(loc=0)
40 | plt.show()
--------------------------------------------------------------------------------
/examples/sklearnexample.py:
--------------------------------------------------------------------------------
1 | #######################################
2 | # pyGPGO examples
3 | # sklearnexample: Optimizes hyperparameters for an SVM classifier
4 | # on synthetic generated data.
5 | #######################################
6 |
7 |
8 | import numpy as np
9 | import matplotlib.pyplot as plt
10 | from matplotlib.colors import ListedColormap
11 | from sklearn.datasets import make_moons
12 | from sklearn.svm import SVC
13 | from sklearn.model_selection import cross_val_score
14 |
15 |
16 | from pyGPGO.GPGO import GPGO
17 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
18 | from pyGPGO.acquisition import Acquisition
19 | from pyGPGO.covfunc import squaredExponential
20 |
21 |
22 | def evaluateModel(C, gamma):
23 | clf = SVC(C=10**C, gamma=10**gamma)
24 | return np.average(cross_val_score(clf, X, y))
25 |
26 |
27 | if __name__ == '__main__':
28 | np.random.seed(20)
29 | X, y = make_moons(n_samples=200, noise=0.3)
30 |
31 | cm_bright = ListedColormap(['#fc4349', '#6dbcdb'])
32 |
33 | fig = plt.figure()
34 | plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cm_bright)
35 | plt.show()
36 |
37 | sexp = squaredExponential()
38 | gp = GaussianProcess(sexp, optimize=True, usegrads=True)
39 | acq = Acquisition(mode='UCB', beta=1.5)
40 |
41 | params = {'C': ('cont', (-4, 5)),
42 | 'gamma': ('cont', (-4, 5))
43 | }
44 |
45 | gpgo = GPGO(gp, acq, evaluateModel, params)
46 | gpgo.run(max_iter=50)
47 | gpgo.getResult()
48 |
--------------------------------------------------------------------------------
/franke.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/franke.gif
--------------------------------------------------------------------------------
/paper.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: 'pyGPGO: Bayesian Optimization for Python'
3 | tags:
4 | - machine-learning
5 | - bayesian
6 | - optimization
7 | authors:
8 | - name: José Jiménez
9 | orcid: 0000-0002-5335-7834
10 | affiliation: 1
11 | - name: Josep Ginebra
12 | orcid: 0000-0001-9521-9635
13 | affiliation: 2
14 | affiliations:
15 | - name: Computational Biophysics Laboratory, Universitat Pompeu Fabra, Parc de Recerca Biomèdica de Barcelona, Carrer del Dr. Aiguader 88. Barcelona 08003, Spain.
16 | index: 1
17 | - name: Department of Statistics and Operations Research. Universitat Politècnica de Catalunya (UPC). Av. Diagonal 647, Barcelona 08028, Spain.
18 | index: 2
19 | date: 4 September 2017
20 | bibliography: paper.bib
21 | ---
22 |
23 | # Summary
24 |
25 | Bayesian optimization has risen over the last few years as a very attractive method to optimize
26 | expensive to evaluate, black box, derivative-free and possibly noisy functions [@Shahriari2016]. This framework uses _surrogate models_, such as the likes of a Gaussian Process [@Rasmussen2004] which describe a prior belief over the possible objective functions in order to approximate them. The procedure itself is inherently sequential: our function is first evaluated a few times, a surrogate model is then fit with this information, which will later suggest the next point to be evaluated according to a predefined _acquisition function_. These strategies typically aim to balance exploitation and exploration, that is, areas where the posterior mean or variance of our surrogate model are high respectively.
27 |
28 |
29 | These strategies have recently grabbed the attention of machine learning researchers over simpler black-box optimization strategies, such as grid search or random search [@Bergstra2012]. It is specially interesting in areas such as automatic machine-learning hyperparameter optimization [@Snoek2012], A/B testing [@Chapelle2011] or recommender systems [@Vanchinathan2014], among others. Furthermore, the framework is entirely modular; there are many choices a user could take regarding the design of the optimization procedure: choice of surrogate model, covariance function, acquisition function behaviour or hyperparameter treatment, to name a few.
30 |
31 |
32 | Here we present *pyGPGO* , an open-source Python package for Bayesian Optimization, which embraces this modularity in its design. While additional Python packages exist for the same purpose, either they are restricted for non-commercial applications [@SpearmintSnoek2012], implement a small subset of the features [@yelpmoe], or do not provide a modular interface [@scikitoptimize]. *pyGPGO* on the other hand aims to provide the highest degree of freedom in the design and inference of a Bayesian optimization pipeline, while being feature-wise competitive with other existing software. *pyGPGO* currently supports:
33 |
34 | - Different surrogate models: Gaussian Processes, Student-t Processes, Random Forests (& variants)
35 | and Gradient Boosting Machines.
36 | - Most usual covariance function structures, as well as their derivatives: squared exponential,
37 | Matèrn, gamma-exponential, rational-quadratic, exponential-sine and dot-product kernel.
38 | - Several acquisition function behaviours: probability of improvement, expected improvement,
39 | upper confidence bound and entropy-based, as well as their integrated versions.
40 | - Type II maximum-likelihood estimation of covariance hyperparameters.
41 | - MCMC sampling for the full-bayesian treatment of hyperparameters (via `pyMC3` [@Salvatier2016])
42 |
43 |
44 | *pyGPGO* is MIT-licensed and can be retrieved from both [GitHub](https://github.com/hawk31/pyGPGO)
45 | and [PyPI](https://pypi.python.org/pypi/pyGPGO/), with extensive documentation available at [ReadTheDocs](http://pygpgo.readthedocs.io/en/latest/). *pyGPGO* is built on top of other well known packages of the Python scientific ecosystem as dependencies, such as numpy, scikit-learn, pyMC3 and theano.
46 |
47 | 
48 |
49 |
50 | # Future work
51 |
52 | *pyGPGO* is an ongoing project, and as such there are several improvements that will be tackled
53 | in the near future:
54 |
55 | - Support for linear combinations of covariance functions, with automatic gradient computation.
56 | - Support for more diverse acquisition functions, such as Predictive Entropy Search [@Hernandez-Lobato2014].
57 | - A class for constrained Bayesian Optimization is planned for the near future. [@Gardner2014]
58 |
59 |
60 | # References
61 |
--------------------------------------------------------------------------------
/pyGPGO/__init__.py:
--------------------------------------------------------------------------------
1 | from .version import __version__
2 |
--------------------------------------------------------------------------------
/pyGPGO/__pycache__/GPGO.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/__pycache__/GPGO.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/__pycache__/acquisition.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/__pycache__/acquisition.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/__pycache__/covfunc.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/__pycache__/covfunc.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/__pycache__/logger.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/__pycache__/logger.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/__pycache__/version.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/__pycache__/version.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/logger.py:
--------------------------------------------------------------------------------
1 | class bcolors:
2 | HEADER = '\033[95m'
3 | OKBLUE = '\033[94m'
4 | OKGREEN = '\033[92m'
5 | WARNING = '\033[93m'
6 | FAIL = '\033[91m'
7 | ENDC = '\033[0m'
8 | BOLD = '\033[1m'
9 | UNDERLINE = '\033[4m'
10 |
11 | class EventLogger:
12 | def __init__(self, gpgo):
13 | self.gpgo = gpgo
14 | self.header = 'Evaluation \t Proposed point \t Current eval. \t Best eval.'
15 | self.template = '{:6} \t {}. \t {:6} \t {:6}'
16 | print(self.header)
17 |
18 | def _printCurrent(self, gpgo):
19 | eval = str(len(gpgo.GP.y) - gpgo.init_evals)
20 | proposed = str(gpgo.best)
21 | curr_eval = str(gpgo.GP.y[-1])
22 | curr_best = str(gpgo.tau)
23 | if float(curr_eval) >= float(curr_best):
24 | curr_eval = bcolors.OKGREEN + curr_eval + bcolors.ENDC
25 | print(self.template.format(eval, proposed, curr_eval, curr_best))
26 |
27 | def _printInit(self, gpgo):
28 | for init_eval in range(gpgo.init_evals):
29 | print(self.template.format('init', gpgo.GP.X[init_eval], gpgo.GP.y[init_eval], gpgo.tau))
30 |
31 |
--------------------------------------------------------------------------------
/pyGPGO/surrogates/BoostedTrees.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from sklearn.ensemble import GradientBoostingRegressor
3 |
4 | class BoostedTrees:
5 | def __init__(self, q1=.16, q2=.84,**params):
6 | """
7 | Gradient boosted trees as surrogate model for Bayesian Optimization.
8 | Uses quantile regression for an estimate of the 'posterior' variance.
9 | In practice, the std is computed as (`q2` - `q1`) / 2.
10 | Relies on `sklearn.ensemble.GradientBoostingRegressor`
11 |
12 | Parameters
13 | ----------
14 | q1: float
15 | First quantile.
16 | q2: float
17 | Second quantile
18 | params: tuple
19 | Extra parameters to pass to `GradientBoostingRegressor`
20 |
21 | """
22 | self.params = params
23 | self.q1 = q1
24 | self.q2 = q2
25 | self.eps = 1e-1
26 |
27 | def fit(self, X, y):
28 | """
29 | Fit a GBM model to data `X` and targets `y`.
30 |
31 | Parameters
32 | ----------
33 | X : array-like
34 | Input values.
35 | y: array-like
36 | Target values.
37 | """
38 | self.X = X
39 | self.y = y
40 | self.n = self.X.shape[0]
41 | self.modq1 = GradientBoostingRegressor(loss='quantile', alpha=self.q1, **self.params)
42 | self.modq2 = GradientBoostingRegressor(loss='quantile', alpha=self.q2, **self.params)
43 | self.mod = GradientBoostingRegressor(loss = 'ls', **self.params)
44 | self.modq1.fit(self.X, self.y)
45 | self.modq2.fit(self.X, self.y)
46 | self.mod.fit(self.X, self.y)
47 |
48 | def predict(self, Xstar, return_std = True):
49 | """
50 | Predicts 'posterior' mean and variance for the GBM model.
51 |
52 | Parameters
53 | ----------
54 | Xstar: array-like
55 | Input values.
56 | return_std: bool, optional
57 | Whether to return posterior variance estimates. Default is `True`.
58 | eps: float, optional
59 | Floating precision value for negative variance estimates. Default is `1e-6`
60 |
61 |
62 | Returns
63 | -------
64 | array-like:
65 | Posterior predicted mean.
66 | array-like:
67 | Posterior predicted std
68 |
69 | """
70 | Xstar = np.atleast_2d(Xstar)
71 | ymean = self.mod.predict(Xstar)
72 | if return_std:
73 | q1pred = self.modq1.predict(Xstar)
74 | q2pred = self.modq2.predict(Xstar)
75 | ystd = (q2pred - q1pred) / 2 + self.eps
76 | return ymean, ystd
77 | return ymean
78 |
79 | def update(self, xnew, ynew):
80 | """
81 | Updates the internal RF model with observations `xnew` and targets `ynew`.
82 |
83 | Parameters
84 | ----------
85 | xnew: array-like
86 | New observations.
87 | ynew: array-like
88 | New targets.
89 | """
90 | y = np.concatenate((self.y, ynew), axis=0)
91 | X = np.concatenate((self.X, xnew), axis=0)
92 | self.fit(X, y)
--------------------------------------------------------------------------------
/pyGPGO/surrogates/GaussianProcessMCMC.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import theano.tensor as tt
3 | import pymc3 as pm
4 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
5 |
6 |
7 | covariance_equivalence = {'squaredExponential': pm.gp.cov.ExpQuad,
8 | 'matern52': pm.gp.cov.Matern52,
9 | 'matern32': pm.gp.cov.Matern32}
10 |
11 |
12 | class GaussianProcessMCMC:
13 | def __init__(self, covfunc, niter=2000, burnin=1000, init='ADVI', step=None):
14 | """
15 | Gaussian Process class using MCMC sampling of covariance function hyperparameters.
16 |
17 | Parameters
18 | ----------
19 | covfunc:
20 | Covariance function to use. Currently this instance only supports `squaredExponential`
21 | `matern32` and `matern52` kernels.
22 | niter: int
23 | Number of iterations to run MCMC.
24 | burnin: int
25 | Burn-in iterations to discard at trace.
26 | init: str
27 | Initialization method for NUTS. Check pyMC3 docs.
28 | step:
29 | pyMC3's step method for the process, (e.g. `pm.Slice`)
30 | """
31 | self.covfunc = covfunc
32 | self.niter = niter
33 | self.burnin = burnin
34 | self.init = init
35 | self.step = step
36 |
37 | def _extractParam(self, unittrace, covparams):
38 | d = {}
39 | for key, value in unittrace.items():
40 | if key in covparams:
41 | d[key] = value
42 | if 'v' in covparams:
43 | d['v'] = 5 / 2
44 | return d
45 |
46 | def fit(self, X, y):
47 | """
48 | Fits a Gaussian Process regressor using MCMC.
49 |
50 | Parameters
51 | ----------
52 | X: np.ndarray, shape=(nsamples, nfeatures)
53 | Training instances to fit the GP.
54 | y: np.ndarray, shape=(nsamples,)
55 | Corresponding continuous target values to `X`.
56 |
57 | """
58 | self.X = X
59 | self.n = self.X.shape[0]
60 | self.y = y
61 | self.model = pm.Model()
62 |
63 | with self.model as model:
64 | l = pm.Uniform('l', 0, 10)
65 |
66 | log_s2_f = pm.Uniform('log_s2_f', lower=-7, upper=5)
67 | s2_f = pm.Deterministic('sigmaf', tt.exp(log_s2_f))
68 |
69 | log_s2_n = pm.Uniform('log_s2_n', lower=-7, upper=5)
70 | s2_n = pm.Deterministic('sigman', tt.exp(log_s2_n))
71 |
72 | f_cov = s2_f * covariance_equivalence[type(self.covfunc).__name__](1, l)
73 | Sigma = f_cov(self.X) + tt.eye(self.n) * s2_n ** 2
74 | y_obs = pm.MvNormal('y_obs', mu=np.zeros(self.n), cov=Sigma, observed=self.y)
75 | with self.model as model:
76 | if self.step is not None:
77 | self.trace = pm.sample(self.niter, step=self.step(), return_inferencedata=False)[self.burnin:]
78 | else:
79 | self.trace = pm.sample(self.niter, init=self.init, return_inferencedata=False)[self.burnin:]
80 |
81 | def posteriorPlot(self):
82 | """
83 | Plots sampled posterior distributions for hyperparameters.
84 | """
85 | with self.model as model:
86 | pm.traceplot(self.trace, var_names=['l', 'sigmaf', 'sigman'])
87 | plt.tight_layout()
88 | plt.show()
89 |
90 | def predict(self, Xstar, return_std=False, nsamples=10):
91 | """
92 | Returns mean and covariances for each posterior sampled Gaussian Process.
93 |
94 | Parameters
95 | ----------
96 | Xstar: np.ndarray, shape=((nsamples, nfeatures))
97 | Testing instances to predict.
98 | return_std: bool
99 | Whether to return the standard deviation of the posterior process. Otherwise,
100 | it returns the whole covariance matrix of the posterior process.
101 | nsamples:
102 | Number of posterior MCMC samples to consider.
103 |
104 | Returns
105 | -------
106 | np.ndarray
107 | Mean of the posterior process for each MCMC sample and `Xstar`.
108 | np.ndarray
109 | Covariance posterior process for each MCMC sample and `Xstar`.
110 | """
111 | chunk = list(self.trace)
112 | chunk = chunk[::-1][:nsamples]
113 | post_mean = []
114 | post_var = []
115 | for posterior_sample in chunk:
116 | params = self._extractParam(posterior_sample, self.covfunc.parameters)
117 | covfunc = self.covfunc.__class__(**params, bounds=self.covfunc.bounds)
118 | gp = GaussianProcess(covfunc)
119 | gp.fit(self.X, self.y)
120 | m, s = gp.predict(Xstar, return_std=return_std)
121 | post_mean.append(m)
122 | post_var.append(s)
123 | return np.array(post_mean), np.array(post_var)
124 |
125 | def update(self, xnew, ynew):
126 | """
127 | Updates the internal model with `xnew` and `ynew` instances.
128 |
129 | Parameters
130 | ----------
131 | xnew: np.ndarray, shape=((m, nfeatures))
132 | New training instances to update the model with.
133 | ynew: np.ndarray, shape=((m,))
134 | New training targets to update the model with.
135 | """
136 | y = np.concatenate((self.y, ynew), axis=0)
137 | X = np.concatenate((self.X, xnew), axis=0)
138 | self.fit(X, y)
139 |
--------------------------------------------------------------------------------
/pyGPGO/surrogates/RandomForest.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from sklearn.ensemble import RandomForestRegressor, ExtraTreesRegressor
3 |
4 | class RandomForest:
5 | def __init__(self, **params):
6 | """
7 | Wrapper around sklearn's Random Forest implementation for pyGPGO.
8 | Random Forests can also be used for surrogate models in Bayesian Optimization.
9 | An estimate of 'posterior' variance can be obtained by using the `impurity`
10 | criterion value in each subtree.
11 |
12 | Parameters
13 | ----------
14 | params: tuple, optional
15 | Any parameters to pass to `RandomForestRegressor`. Defaults to sklearn's.
16 |
17 | """
18 | self.params = params
19 |
20 | def fit(self, X, y):
21 | """
22 | Fit a Random Forest model to data `X` and targets `y`.
23 |
24 | Parameters
25 | ----------
26 | X : array-like
27 | Input values.
28 | y: array-like
29 | Target values.
30 | """
31 | self.X = X
32 | self.y = y
33 | self.n = self.X.shape[0]
34 | self.model = RandomForestRegressor(**self.params)
35 | self.model.fit(X, y)
36 |
37 | def predict(self, Xstar, return_std = True, eps = 1e-6):
38 | """
39 | Predicts 'posterior' mean and variance for the RF model.
40 |
41 | Parameters
42 | ----------
43 | Xstar: array-like
44 | Input values.
45 | return_std: bool, optional
46 | Whether to return posterior variance estimates. Default is `True`.
47 | eps: float, optional
48 | Floating precision value for negative variance estimates. Default is `1e-6`
49 |
50 |
51 | Returns
52 | -------
53 | array-like:
54 | Posterior predicted mean.
55 | array-like:
56 | Posterior predicted std
57 |
58 | """
59 | Xstar = np.atleast_2d(Xstar)
60 | ymean = self.model.predict(Xstar)
61 | if return_std:
62 | std = np.zeros(len(Xstar))
63 | trees = self.model.estimators_
64 |
65 | for tree in trees:
66 | var_tree = tree.tree_.impurity[tree.apply(Xstar)]
67 | var_tree = np.clip(var_tree, eps, np.inf)
68 | mean_tree = tree.predict(Xstar)
69 | std += var_tree + mean_tree ** 2
70 |
71 | std /= len(trees)
72 | std -= ymean ** 2
73 | std = np.sqrt(np.clip(std, eps, np.inf))
74 | return ymean, std
75 | return ymean
76 |
77 | def update(self, xnew, ynew):
78 | """
79 | Updates the internal RF model with observations `xnew` and targets `ynew`.
80 |
81 | Parameters
82 | ----------
83 | xnew: array-like
84 | New observations.
85 | ynew: array-like
86 | New targets.
87 | """
88 | y = np.concatenate((self.y, ynew), axis=0)
89 | X = np.concatenate((self.X, xnew), axis=0)
90 | self.fit(X, y)
91 |
92 |
93 | class ExtraForest:
94 | def __init__(self, **params):
95 | """
96 | Wrapper around sklearn's ExtraTreesRegressor implementation for pyGPGO.
97 | Random Forests can also be used for surrogate models in Bayesian Optimization.
98 | An estimate of 'posterior' variance can be obtained by using the `impurity`
99 | criterion value in each subtree.
100 |
101 | Parameters
102 | ----------
103 | params: tuple, optional
104 | Any parameters to pass to `RandomForestRegressor`. Defaults to sklearn's.
105 |
106 | """
107 | self.params = params
108 |
109 | def fit(self, X, y):
110 | """
111 | Fit a Random Forest model to data `X` and targets `y`.
112 |
113 | Parameters
114 | ----------
115 | X : array-like
116 | Input values.
117 | y: array-like
118 | Target values.
119 | """
120 | self.X = X
121 | self.y = y
122 | self.n = self.X.shape[0]
123 | self.model = ExtraTreesRegressor(**self.params)
124 | self.model.fit(X, y)
125 |
126 | def predict(self, Xstar, return_std = True, eps = 1e-6):
127 | """
128 | Predicts 'posterior' mean and variance for the RF model.
129 |
130 | Parameters
131 | ----------
132 | Xstar: array-like
133 | Input values.
134 | return_std: bool, optional
135 | Whether to return posterior variance estimates. Default is `True`.
136 | eps: float, optional
137 | Floating precision value for negative variance estimates. Default is `1e-6`
138 |
139 |
140 | Returns
141 | -------
142 | array-like:
143 | Posterior predicted mean.
144 | array-like:
145 | Posterior predicted std
146 |
147 | """
148 | Xstar = np.atleast_2d(Xstar)
149 | ymean = self.model.predict(Xstar)
150 | if return_std:
151 | std = np.zeros(len(Xstar))
152 | trees = self.model.estimators_
153 |
154 | for tree in trees:
155 | var_tree = tree.tree_.impurity[tree.apply(Xstar)]
156 | var_tree = np.clip(var_tree, eps, np.inf)
157 | mean_tree = tree.predict(Xstar)
158 | std += var_tree + mean_tree ** 2
159 |
160 | std /= len(trees)
161 | std -= ymean ** 2
162 | std = np.sqrt(np.clip(std, eps, np.inf))
163 | return ymean, std
164 | return ymean
165 |
166 | def update(self, xnew, ynew):
167 | """
168 | Updates the internal RF model with observations `xnew` and targets `ynew`.
169 |
170 | Parameters
171 | ----------
172 | xnew: array-like
173 | New observations.
174 | ynew: array-like
175 | New targets.
176 | """
177 | y = np.concatenate((self.y, ynew), axis=0)
178 | X = np.concatenate((self.X, xnew), axis=0)
179 | self.fit(X, y)
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__init__.py
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/BoostedTrees.cpython-35.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/BoostedTrees.cpython-35.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/BoostedTrees.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/BoostedTrees.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/GaussianProcess.cpython-35.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/GaussianProcess.cpython-35.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/GaussianProcess.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/GaussianProcess.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/GaussianProcessMCMC.cpython-35.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/GaussianProcessMCMC.cpython-35.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/GaussianProcessMCMC.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/GaussianProcessMCMC.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/RandomForest.cpython-35.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/RandomForest.cpython-35.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/RandomForest.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/RandomForest.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/__init__.cpython-35.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/__init__.cpython-35.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/tStudentProcess.cpython-35.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/tStudentProcess.cpython-35.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/tStudentProcess.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/tStudentProcess.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/tStudentProcessMCMC.cpython-35.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/tStudentProcessMCMC.cpython-35.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/__pycache__/tStudentProcessMCMC.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/josejimenezluna/pyGPGO/0ee19605ebdba61758906a9500399cc2b3530887/pyGPGO/surrogates/__pycache__/tStudentProcessMCMC.cpython-36.pyc
--------------------------------------------------------------------------------
/pyGPGO/surrogates/tStudentProcess.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from collections import OrderedDict
3 | from numpy.linalg import slogdet
4 | from scipy.linalg import inv
5 | from scipy.optimize import minimize
6 | from scipy.special import gamma
7 |
8 |
9 | def logpdf(x, df, mu, Sigma):
10 | """
11 | Marginal log-likelihood of a Student-t Process
12 |
13 | Parameters
14 | ----------
15 | x: array-like
16 | Point to be evaluated
17 | df: float
18 | Degrees of freedom (>2.0)
19 | mu: array-like
20 | Mean of the process.
21 | Sigma: array-like
22 | Covariance matrix of the process.
23 |
24 | Returns
25 | -------
26 | logp: float
27 | log-likelihood
28 |
29 | """
30 | d = len(x)
31 | x = np.atleast_2d(x)
32 | xm = x - mu
33 | V = df * Sigma
34 | V_inv = np.linalg.inv(V)
35 | _, logdet = slogdet(np.pi * V)
36 |
37 | logz = -gamma(df / 2.0 + d / 2.0) + gamma(df / 2.0) + 0.5 * logdet
38 | logp = -0.5 * (df + d) * np.log(1 + np.sum(np.dot(xm, V_inv) * xm, axis=1))
39 |
40 | logp = logp - logz
41 |
42 | return logp[0]
43 |
44 |
45 | class tStudentProcess:
46 | def __init__(self, covfunc, nu=3.0, optimize=False):
47 | """
48 | t-Student Process regressor class.
49 | This class DOES NOT support gradients in ML estimation yet.
50 |
51 | Parameters
52 | ----------
53 | covfunc: instance from a class of covfunc module
54 | An instance from a class from the `covfunc` module.
55 | nu: float
56 | (>2.0) Degrees of freedom
57 |
58 | Attributes
59 | ----------
60 | covfunc: object
61 | Internal covariance function.
62 | nu: float
63 | Degrees of freedom.
64 | optimize: bool
65 | Whether to optimize covariance function hyperparameters.
66 |
67 | """
68 | self.covfunc = covfunc
69 | self.nu = nu
70 | self.optimize = optimize
71 |
72 | def getcovparams(self):
73 | """
74 | Returns current covariance function hyperparameters
75 |
76 | Returns
77 | -------
78 | dict
79 | Dictionary containing covariance function hyperparameters
80 | """
81 | d = {}
82 | for param in self.covfunc.parameters:
83 | d[param] = self.covfunc.__dict__[param]
84 | return d
85 |
86 | def _lmlik(self, param_vector, param_key):
87 | """
88 | Returns marginal negative log-likelihood for given covariance hyperparameters.
89 |
90 | Parameters
91 | ----------
92 | param_vector: list
93 | List of values corresponding to hyperparameters to query.
94 | param_key: list
95 | List of hyperparameter strings corresponding to `param_vector`.
96 |
97 | Returns
98 | -------
99 | float
100 | Negative log-marginal likelihood for chosen hyperparameters.
101 |
102 | """
103 | k_param = OrderedDict()
104 | for k, v in zip(param_key, param_vector):
105 | k_param[k] = v
106 | self.covfunc = self.covfunc.__class__(**k_param, bounds=self.covfunc.bounds)
107 |
108 | # This fixes recursion
109 | original_opt = self.optimize
110 | self.optimize = False
111 | self.fit(self.X, self.y)
112 | self.optimize = original_opt
113 |
114 | return (- self.logp)
115 |
116 | def optHyp(self, param_key, param_bounds, n_trials=5):
117 | """
118 | Optimizes the negative marginal log-likelihood for given hyperparameters and bounds.
119 | This is an empirical Bayes approach (or Type II maximum-likelihood).
120 |
121 | Parameters
122 | ----------
123 | param_key: list
124 | List of hyperparameters to optimize.
125 | param_bounds: list
126 | List containing tuples defining bounds for each hyperparameter to optimize over.
127 |
128 | """
129 | xs = [[1, 1, 1]]
130 | fs = [self._lmlik(xs[0], param_key)]
131 | for trial in range(n_trials):
132 | x0 = []
133 | for param, bound in zip(param_key, param_bounds):
134 | x0.append(np.random.uniform(bound[0], bound[1], 1)[0])
135 |
136 | res = minimize(self._lmlik, x0=x0, args=(param_key), method='L-BFGS-B', bounds=param_bounds)
137 | xs.append(res.x)
138 | fs.append(res.fun)
139 |
140 | argmin = np.argmin(fs)
141 | opt_param = xs[argmin]
142 | k_param = OrderedDict()
143 | for k, x in zip(param_key, opt_param):
144 | k_param[k] = x
145 | self.covfunc = self.covfunc.__class__(**k_param, bounds=self.covfunc.bounds)
146 |
147 | def fit(self, X, y):
148 | """
149 | Fits a t-Student Process regressor
150 |
151 | Parameters
152 | ----------
153 | X: np.ndarray, shape=(nsamples, nfeatures)
154 | Training instances to fit the GP.
155 | y: np.ndarray, shape=(nsamples,)
156 | Corresponding continuous target values to `X`.
157 |
158 | """
159 | self.X = X
160 | self.y = y
161 | self.n1 = X.shape[0]
162 |
163 | if self.optimize:
164 | self.optHyp(param_key=self.covfunc.parameters, param_bounds=self.covfunc.bounds)
165 |
166 | self.K11 = self.covfunc.K(self.X, self.X)
167 | self.beta1 = np.dot(np.dot(self.y.T, inv(self.K11)), self.y)
168 | self.logp = logpdf(self.y, self.nu, mu=np.zeros(self.n1), Sigma=self.K11)
169 |
170 | def predict(self, Xstar, return_std=False):
171 | """
172 | Returns mean and covariances for the posterior t-Student process.
173 |
174 | Parameters
175 | ----------
176 | Xstar: np.ndarray, shape=((nsamples, nfeatures))
177 | Testing instances to predict.
178 | return_std: bool
179 | Whether to return the standard deviation of the posterior process. Otherwise,
180 | it returns the whole covariance matrix of the posterior process.
181 |
182 | Returns
183 | -------
184 | np.ndarray
185 | Mean of the posterior process for testing instances.
186 | np.ndarray
187 | Covariance of the posterior process for testing instances.
188 | """
189 | Xstar = np.atleast_2d(Xstar)
190 | self.K21 = self.covfunc.K(Xstar, self.X)
191 | self.K22 = self.covfunc.K(Xstar, Xstar)
192 | self.K12 = self.covfunc.K(self.X, Xstar)
193 | self.K22_tilde = self.K22 - np.dot(np.dot(self.K21, inv(self.K11)), self.K12)
194 |
195 | phi2 = np.dot(np.dot(self.K21, inv(self.K11)), self.y)
196 | cov = (self.nu + self.beta1 - 2) / (self.nu + self.n1 - 2) * self.K22_tilde
197 | if return_std:
198 | return phi2, np.diag(cov)
199 | return phi2, cov
200 |
201 | def update(self, xnew, ynew):
202 | """
203 | Updates the internal model with `xnew` and `ynew` instances.
204 |
205 | Parameters
206 | ----------
207 | xnew: np.ndarray, shape=((m, nfeatures))
208 | New training instances to update the model with.
209 | ynew: np.ndarray, shape=((m,))
210 | New training targets to update the model with.
211 | """
212 | y = np.concatenate((self.y, ynew), axis=0)
213 | X = np.concatenate((self.X, xnew), axis=0)
214 | self.fit(X, y)
215 |
--------------------------------------------------------------------------------
/pyGPGO/surrogates/tStudentProcessMCMC.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import theano.tensor as tt
3 | import pymc3 as pm
4 | from pyGPGO.surrogates.tStudentProcess import tStudentProcess
5 | from pyGPGO.surrogates.GaussianProcessMCMC import covariance_equivalence
6 |
7 |
8 | class tStudentProcessMCMC:
9 | def __init__(self, covfunc, nu=3.0, niter=2000, burnin=1000, init='ADVI', step=None):
10 | """
11 | Student-t class using MCMC sampling of covariance function hyperparameters.
12 |
13 | Parameters
14 | ----------
15 | covfunc:
16 | Covariance function to use. Currently this instance only supports `squaredExponential`
17 | and `Matern` from the `covfunc` module.
18 | nu: float
19 | Degrees of freedom (>2.0)
20 | niter: int
21 | Number of iterations to run MCMC.
22 | burnin: int
23 | Burn-in iterations to discard at trace.
24 |
25 | init: str
26 | Initialization method for NUTS. Check pyMC3 docs.
27 | """
28 | self.covfunc = covfunc
29 | self.nu = nu
30 | self.niter = niter
31 | self.burnin = burnin
32 | self.init = init
33 | self.step = step
34 |
35 | def _extractParam(self, unittrace, covparams):
36 | d = {}
37 | for key, value in unittrace.items():
38 | if key in covparams:
39 | d[key] = value
40 | if 'v' in covparams:
41 | d['v'] = 5 / 2
42 | return d
43 |
44 | def fit(self, X, y):
45 | """
46 | Fits a Student-t regressor using MCMC.
47 |
48 | Parameters
49 | ----------
50 | X: np.ndarray, shape=(nsamples, nfeatures)
51 | Training instances to fit the GP.
52 | y: np.ndarray, shape=(nsamples,)
53 | Corresponding continuous target values to `X`.
54 |
55 | """
56 | self.X = X
57 | self.n = self.X.shape[0]
58 | self.y = y
59 | self.model = pm.Model()
60 |
61 | with self.model as model:
62 | l = pm.Uniform('l', 0, 10)
63 |
64 | log_s2_f = pm.Uniform('log_s2_f', lower=-7, upper=5)
65 | s2_f = pm.Deterministic('sigmaf', tt.exp(log_s2_f))
66 |
67 | log_s2_n = pm.Uniform('log_s2_n', lower=-7, upper=5)
68 | s2_n = pm.Deterministic('sigman', tt.exp(log_s2_n))
69 |
70 | f_cov = s2_f * covariance_equivalence[type(self.covfunc).__name__](1, l)
71 | Sigma = f_cov(self.X) + tt.eye(self.n) * s2_n ** 2
72 | y_obs = pm.MvStudentT('y_obs', nu=self.nu, mu=np.zeros(self.n), Sigma=Sigma, observed=self.y)
73 | with self.model as model:
74 | if self.step is not None:
75 | self.trace = pm.sample(self.niter, step=self.step(), return_inferencedata=False)[self.burnin:]
76 | else:
77 | self.trace = pm.sample(self.niter, init=self.init, return_inferencedata=False)[self.burnin:]
78 |
79 | def posteriorPlot(self):
80 | """
81 | Plots sampled posterior distributions for hyperparameters.
82 |
83 | """
84 | with self.model as model:
85 | pm.traceplot(self.trace, var_names=['l', 'sigmaf', 'sigman'])
86 | plt.tight_layout()
87 | plt.show()
88 |
89 | def predict(self, Xstar, return_std=False, nsamples=10):
90 | """
91 | Returns mean and covariances for each posterior sampled Student-t Process.
92 |
93 | Parameters
94 | ----------
95 | Xstar: np.ndarray, shape=((nsamples, nfeatures))
96 | Testing instances to predict.
97 | return_std: bool
98 | Whether to return the standard deviation of the posterior process. Otherwise,
99 | it returns the whole covariance matrix of the posterior process.
100 | nsamples:
101 | Number of posterior MCMC samples to consider.
102 |
103 | Returns
104 | -------
105 | np.ndarray
106 | Mean of the posterior process for each MCMC sample and Xstar.
107 | np.ndarray
108 | Covariance posterior process for each MCMC sample and Xstar.
109 | """
110 | chunk = list(self.trace)
111 | chunk = chunk[::-1][:nsamples]
112 | post_mean = []
113 | post_var = []
114 | for posterior_sample in chunk:
115 | params = self._extractParam(posterior_sample, self.covfunc.parameters)
116 | covfunc = self.covfunc.__class__(**params, bounds=self.covfunc.bounds)
117 | gp = tStudentProcess(covfunc, nu=self.nu + self.n)
118 | gp.fit(self.X, self.y)
119 | m, s = gp.predict(Xstar, return_std=return_std)
120 | post_mean.append(m)
121 | post_var.append(s)
122 | return np.array(post_mean), np.array(post_var)
123 |
124 | def update(self, xnew, ynew):
125 | """
126 | Updates the internal model with `xnew` and `ynew` instances.
127 |
128 | Parameters
129 | ----------
130 | xnew: np.ndarray, shape=((m, nfeatures))
131 | New training instances to update the model with.
132 | ynew: np.ndarray, shape=((m,))
133 | New training targets to update the model with.
134 | """
135 | y = np.concatenate((self.y, ynew), axis=0)
136 | X = np.concatenate((self.X, xnew), axis=0)
137 | self.fit(X, y)
138 |
--------------------------------------------------------------------------------
/pyGPGO/version.py:
--------------------------------------------------------------------------------
1 | __version__ = '0.5.1'
2 |
--------------------------------------------------------------------------------
/requirements_rtd.txt:
--------------------------------------------------------------------------------
1 | numpy
2 | scipy
3 | joblib
4 | scikit-learn
5 | pymc3
6 | theano
7 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | version = {}
4 | with open('pyGPGO/version.py') as fp:
5 | exec(fp.read(), version)
6 |
7 | def readme():
8 | with open('README.md') as f:
9 | return f.read()
10 |
11 | setup(name='pyGPGO',
12 | version=version['__version__'],
13 | description='Bayesian Optimization tools in Python',
14 | classifiers=[
15 | 'Development Status :: 2 - Pre-Alpha',
16 | 'License :: OSI Approved :: MIT License',
17 | 'Programming Language :: Python :: 3.6',
18 | 'Topic :: Scientific/Engineering :: Mathematics'
19 | ],
20 | keywords = ['machine-learning', 'optimization', 'bayesian'],
21 | url='https://github.com/hawk31/pyGPGO',
22 | author='Jose Jimenez',
23 | author_email='jose.jimenez@upf.edu',
24 | license='MIT',
25 | packages=find_packages(),
26 | install_requires=[
27 | 'numpy',
28 | 'mkl',
29 | 'scipy',
30 | 'joblib',
31 | 'scikit-learn',
32 | 'Theano-PyMC',
33 | 'pyMC3'
34 | ],
35 | zip_safe=False)
36 |
--------------------------------------------------------------------------------
/tests/test_GPGO.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pymc3 as pm
3 | from pyGPGO.covfunc import squaredExponential
4 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
5 | from pyGPGO.surrogates.GaussianProcessMCMC import GaussianProcessMCMC
6 | from pyGPGO.surrogates.RandomForest import RandomForest
7 | from pyGPGO.acquisition import Acquisition
8 | from pyGPGO.GPGO import GPGO
9 |
10 |
11 | def f(x):
12 | return -((6 * x - 2) ** 2 * np.sin(12 * x - 4))
13 |
14 |
15 | def test_GPGO():
16 | np.random.seed(20)
17 | sexp = squaredExponential()
18 | gp = GaussianProcess(sexp)
19 | acq = Acquisition(mode='ExpectedImprovement')
20 | params = {'x': ('cont', (0, 1))}
21 | gpgo = GPGO(gp, acq, f, params)
22 | gpgo.run(max_iter=10)
23 | res = gpgo.getResult()[0]
24 | assert .6 < res['x'] < .8
25 |
26 |
27 | def test_GPGO_mcmc():
28 | np.random.seed(20)
29 | sexp = squaredExponential()
30 | gp = GaussianProcessMCMC(sexp, step=pm.Slice, niter=100)
31 | acq = Acquisition(mode='IntegratedExpectedImprovement')
32 | params = {'x': ('cont', (0, 1))}
33 | gpgo = GPGO(gp, acq, f, params)
34 | gpgo.run(max_iter=10)
35 | res = gpgo.getResult()[0]
36 | assert .6 < res['x'] < .8
37 |
38 |
39 | def test_GPGO_sk():
40 | np.random.seed(20)
41 | rf = RandomForest()
42 | acq = Acquisition(mode='ExpectedImprovement')
43 | params = {'x': ('cont', (0, 1))}
44 | gpgo = GPGO(rf, acq, f, params)
45 | gpgo.run(max_iter=10)
46 | res = gpgo.getResult()[0]
47 | assert .7 < res['x'] < .8
48 |
49 |
50 | if __name__ == '__main__':
51 | test_GPGO()
52 | test_GPGO_mcmc()
53 | test_GPGO_sk()
54 |
--------------------------------------------------------------------------------
/tests/test_acquisition.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from pyGPGO.acquisition import Acquisition
3 |
4 |
5 | modes = ['ExpectedImprovement', 'ProbabilityImprovement', 'UCB', 'Entropy',
6 | 'tExpectedImprovement']
7 |
8 | modes_mcmc = ['IntegratedExpectedImprovement', 'IntegratedProbabilityImprovement',
9 | 'IntegratedUCB', 'tIntegratedExpectedImprovement']
10 |
11 |
12 | tau = 1.96
13 | mean = np.array([0])
14 | std = np.array([1])
15 |
16 | extra_params = {'beta': 1.5}
17 |
18 | means = np.random.randn(1000)
19 | stds = np.random.uniform(0.8, 1.2, 1000)
20 |
21 |
22 | def test_acq():
23 | for mode in modes:
24 | acq = Acquisition(mode=mode)
25 | acq.eval(tau, mean, std)
26 |
27 |
28 | def test_acq_mcmc():
29 | for mode in modes_mcmc:
30 | acq = Acquisition(mode=mode)
31 | print(acq.eval(tau, means, stds))
32 |
33 |
34 | if __name__ == '__main__':
35 | test_acq()
36 | test_acq_mcmc()
37 |
--------------------------------------------------------------------------------
/tests/test_covfunc.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from pyGPGO.covfunc import squaredExponential, matern, matern32, matern52, \
3 | gammaExponential, rationalQuadratic, expSine, dotProd
4 |
5 |
6 | covfuncs = [squaredExponential(), matern(), matern32(), matern52(), gammaExponential(),
7 | rationalQuadratic(), expSine(), dotProd()]
8 |
9 | grad_enabled = [squaredExponential(), matern32(), matern52(), gammaExponential(),
10 | rationalQuadratic(), expSine()]
11 |
12 | # Some kernels do not have gradient computation enabled, such is the case
13 | # of the generalised matérn kernel.
14 | #
15 | # All (but the dotProd kernel) have a characteristic length-scale l that
16 | # we test for here.
17 |
18 | covariance_classes = dict(squaredExponential=squaredExponential, matern=matern, matern32=matern32, matern52=matern52,
19 | gammaExponential=gammaExponential, rationalQuadratic=rationalQuadratic, dotProd=dotProd)
20 |
21 | hyperparameters_interval = dict(squaredExponential=dict(l=(0, 2.0), sigmaf=(0, 0.5), sigman=(0, 0.5)),
22 | matern=dict(l=(0, 2.0), sigmaf=(0, 0.5), sigman=(0, 0.5)),
23 | matern32=dict(l=(0, 2.0), sigmaf=(0, 0.5), sigman=(0, 0.5)),
24 | matern52=dict(l=(0, 2.0), sigmaf=(0, 0.5), sigman=(0, 0.5)),
25 | gammaExponential=dict(gamma=(0,2.0), l=(0, 2.0), sigmaf=(0, 0.5), sigman=(0, 0.5)),
26 | rationalQuadratic=dict(alpha=(0,2.0), l=(0, 2.0), sigmaf=(0, 0.5), sigman=(0, 0.5)),
27 | dotProd=dict(sigmaf=(0, 0.5), sigman=(0, 0.5)))
28 |
29 | def generate_hyperparameters(**hyperparmeter_interval):
30 | generated_hyperparameters = dict()
31 | for hyperparameter, bound in hyperparmeter_interval.items():
32 | generated_hyperparameters[hyperparameter] = np.random.uniform(bound[0], bound[1])
33 | return generated_hyperparameters
34 |
35 |
36 | def test_psd_covfunc():
37 | # Check if generated covariance functions are positive definite
38 | np.random.seed(0)
39 | for name in covariance_classes:
40 | for i in range(10):
41 | generated_hyperparameters = generate_hyperparameters(**hyperparameters_interval[name])
42 | cov = covariance_classes[name](**generated_hyperparameters)
43 | for j in range(100):
44 | X = np.random.randn(10, 2)
45 | eigvals = np.linalg.eigvals(cov.K(X,X))
46 | assert (eigvals > 0).all()
47 |
48 |
49 | def test_sim():
50 | rng = np.random.RandomState(0)
51 | X = np.random.randn(100, 3)
52 | for cov in covfuncs:
53 | cov.K(X, X)
54 |
55 |
56 | def test_grad():
57 | rng = np.random.RandomState(0)
58 | X = np.random.randn(3, 3)
59 | for cov in grad_enabled:
60 | cov.gradK(X, X, 'l')
61 |
62 |
63 | if __name__ == '__main__':
64 | test_sim()
65 | test_grad()
--------------------------------------------------------------------------------
/tests/test_surrogates.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from pyGPGO.surrogates.GaussianProcess import GaussianProcess
3 | from pyGPGO.surrogates.tStudentProcess import tStudentProcess
4 | from pyGPGO.covfunc import squaredExponential
5 |
6 |
7 | def test_GP():
8 | rng = np.random.RandomState(0)
9 | X = rng.uniform(0, 5, 20)[:, np.newaxis]
10 | y = 0.5 * np.sin(3 * X[:, 0]) + rng.normal(0, 0.5, X.shape[0])
11 |
12 | sexp = squaredExponential()
13 | gp = GaussianProcess(sexp)
14 | gp.fit(X, y)
15 |
16 |
17 | def test_GP_opt_nograd():
18 | rng = np.random.RandomState(0)
19 | X = rng.uniform(0, 5, 20)[:, np.newaxis]
20 | y = 0.5 * np.sin(3 * X[:, 0]) + rng.normal(0, 0.5, X.shape[0])
21 |
22 | sexp = squaredExponential()
23 | gp = GaussianProcess(sexp, optimize=True, usegrads=False)
24 | gp.fit(X, y)
25 |
26 | params = gp.getcovparams()
27 |
28 | assert 0.36 < params['l'] < 0.37
29 | assert 0.39 < params['sigmaf'] < 0.41
30 | assert 0.29 < params['sigman'] < 0.3
31 |
32 |
33 | def test_GP_opt_grad():
34 | rng = np.random.RandomState(0)
35 | X = rng.uniform(0, 5, 20)[:, np.newaxis]
36 | y = 0.5 * np.sin(3 * X[:, 0]) + rng.normal(0, 0.5, X.shape[0])
37 |
38 | sexp = squaredExponential()
39 | gp = GaussianProcess(sexp, optimize=True, usegrads=True)
40 | gp.fit(X, y)
41 |
42 | params = gp.getcovparams()
43 |
44 | assert 0.36 < params['l'] < 0.37
45 | assert 0.39 < params['sigmaf'] < 0.41
46 | assert 0.29 < params['sigman'] < 0.3
47 |
48 |
49 | def test_tSP():
50 | rng = np.random.RandomState(0)
51 | X = rng.uniform(0, 5, 20)[:, np.newaxis]
52 | y = 0.5 * np.sin(3 * X[:, 0]) + rng.normal(0, 0.5, X.shape[0])
53 |
54 | sexp = squaredExponential()
55 | tsp = tStudentProcess(sexp)
56 | tsp.fit(X, y)
57 |
58 |
59 | def test_tSP_opt_nograd():
60 | rng = np.random.RandomState(0)
61 | X = rng.uniform(0, 5, 20)[:, np.newaxis]
62 | y = 0.5 * np.sin(3 * X[:, 0]) + rng.normal(0, 0.5, X.shape[0])
63 |
64 | sexp = squaredExponential()
65 | tsp = tStudentProcess(sexp, optimize=True)
66 | tsp.fit(X, y)
67 | params = tsp.getcovparams()
68 |
69 | assert 0.3 < params['l'] < 0.5
70 | assert 0.3 < params['sigmaf'] < 0.6
71 | assert 0.2 < params['sigman'] < 0.4
72 |
73 |
74 | if __name__ == '__main__':
75 | test_GP()
76 | test_GP_opt_nograd()
77 | test_GP_opt_grad()
78 | test_tSP()
79 | test_tSP_opt_nograd()
80 |
--------------------------------------------------------------------------------
/tests/test_surrogates_mcmc.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pymc3 as pm
3 | from pyGPGO.surrogates.GaussianProcessMCMC import GaussianProcessMCMC
4 | from pyGPGO.surrogates.tStudentProcessMCMC import tStudentProcessMCMC
5 | from pyGPGO.covfunc import squaredExponential
6 |
7 |
8 | def test_GP():
9 | rng = np.random.RandomState(0)
10 | X = rng.uniform(0, 5, 20)[:, np.newaxis]
11 | y = 0.5 * np.sin(3 * X[:, 0]) + rng.normal(0, 0.5, X.shape[0])
12 |
13 | sexp = squaredExponential()
14 | gp = GaussianProcessMCMC(sexp, step=pm.Slice)
15 | gp.fit(X, y)
16 |
17 |
18 | def test_tSP():
19 | rng = np.random.RandomState(0)
20 | X = rng.uniform(0, 5, 20)[:, np.newaxis]
21 | y = 0.5 * np.sin(3 * X[:, 0]) + rng.normal(0, 0.5, X.shape[0])
22 |
23 | sexp = squaredExponential()
24 | tsp = tStudentProcessMCMC(sexp, step=pm.Slice, niter=100)
25 | tsp.fit(X, y)
26 |
27 |
28 | if __name__ == '__main__':
29 | test_GP()
30 | test_tSP()
31 |
--------------------------------------------------------------------------------
/tests/test_surrogates_sk.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from pyGPGO.surrogates.RandomForest import RandomForest, ExtraForest
3 | from pyGPGO.surrogates.BoostedTrees import BoostedTrees
4 |
5 |
6 | def f(x):
7 | return -((6 * x - 2) ** 2 * np.sin(12 * x - 4))
8 |
9 |
10 | def test_rf():
11 | rng = np.random.RandomState(0)
12 | X = rng.uniform(0, 5, 20)[:, np.newaxis]
13 | y = 0.5 * np.sin(3 * X[:, 0]) + rng.normal(0, 0.5, X.shape[0])
14 |
15 | rf = RandomForest()
16 | rf.fit(X, y)
17 |
18 |
19 | def test_ef():
20 | rng = np.random.RandomState(0)
21 | X = rng.uniform(0, 5, 20)[:, np.newaxis]
22 | y = 0.5 * np.sin(3 * X[:, 0]) + rng.normal(0, 0.5, X.shape[0])
23 |
24 | ef = ExtraForest()
25 | ef.fit(X, y)
26 |
27 |
28 | def test_bt():
29 | rng = np.random.RandomState(0)
30 | X = rng.uniform(0, 5, 20)[:, np.newaxis]
31 | y = 0.5 * np.sin(3 * X[:, 0]) + rng.normal(0, 0.5, X.shape[0])
32 |
33 | bt = BoostedTrees()
34 | bt.fit(X, y)
35 |
36 |
37 | if __name__ == '__main__':
38 | test_rf()
39 | test_ef()
40 | test_bt()
41 |
--------------------------------------------------------------------------------