├── .gitignore ├── LICENSE ├── README.md ├── bayesquad ├── __init__.py ├── _cache.py ├── _decorators.py ├── _kernel_gradients.py ├── _maths_helpers.py ├── _optimisation.py ├── _transformations.py ├── _util.py ├── acquisition_functions.py ├── batch_selection.py ├── gps.py ├── plotting.py ├── priors.py └── quadrature.py ├── docs ├── .nojekyll ├── Makefile ├── build │ ├── doctrees │ │ ├── bayesquad.acquisition_functions.doctree │ │ ├── bayesquad.batch_selection.doctree │ │ ├── bayesquad.doctree │ │ ├── bayesquad.gps.doctree │ │ ├── bayesquad.plotting.doctree │ │ ├── bayesquad.priors.doctree │ │ ├── bayesquad.quadrature.doctree │ │ ├── environment.pickle │ │ ├── index.doctree │ │ └── modules.doctree │ └── html │ │ ├── .buildinfo │ │ ├── _modules │ │ ├── bayesquad │ │ │ ├── acquisition_functions.html │ │ │ ├── batch_selection.html │ │ │ ├── gps.html │ │ │ ├── plotting.html │ │ │ ├── priors.html │ │ │ └── quadrature.html │ │ └── index.html │ │ ├── _sources │ │ ├── bayesquad.acquisition_functions.rst.txt │ │ ├── bayesquad.batch_selection.rst.txt │ │ ├── bayesquad.gps.rst.txt │ │ ├── bayesquad.plotting.rst.txt │ │ ├── bayesquad.priors.rst.txt │ │ ├── bayesquad.quadrature.rst.txt │ │ ├── bayesquad.rst.txt │ │ ├── index.rst.txt │ │ └── modules.rst.txt │ │ ├── _static │ │ ├── ajax-loader.gif │ │ ├── basic.css │ │ ├── classic.css │ │ ├── comment-bright.png │ │ ├── comment-close.png │ │ ├── comment.png │ │ ├── doctools.js │ │ ├── documentation_options.js │ │ ├── down-pressed.png │ │ ├── down.png │ │ ├── file.png │ │ ├── jquery-3.2.1.js │ │ ├── jquery.js │ │ ├── minus.png │ │ ├── plus.png │ │ ├── pygments.css │ │ ├── searchtools.js │ │ ├── sidebar.js │ │ ├── underscore-1.3.1.js │ │ ├── underscore.js │ │ ├── up-pressed.png │ │ ├── up.png │ │ └── websupport.js │ │ ├── bayesquad.acquisition_functions.html │ │ ├── bayesquad.batch_selection.html │ │ ├── bayesquad.gps.html │ │ ├── bayesquad.html │ │ ├── bayesquad.plotting.html │ │ ├── bayesquad.priors.html │ │ ├── bayesquad.quadrature.html │ │ ├── genindex.html │ │ ├── index.html │ │ ├── modules.html │ │ ├── objects.inv │ │ ├── py-modindex.html │ │ ├── search.html │ │ └── searchindex.js ├── index.html └── source │ ├── bayesquad.acquisition_functions.rst │ ├── bayesquad.batch_selection.rst │ ├── bayesquad.gps.rst │ ├── bayesquad.plotting.rst │ ├── bayesquad.priors.rst │ ├── bayesquad.quadrature.rst │ ├── bayesquad.rst │ ├── conf.py │ ├── index.rst │ └── modules.rst ├── examples ├── example_1d.py └── example_2d.py ├── make-docs.sh ├── requirements.txt └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | results 2 | 3 | # Don't want to enforce IDE settings in here 4 | .idea 5 | 6 | # Created by https://www.gitignore.io/api/osx,python,pycharm 7 | 8 | ### OSX ### 9 | # General 10 | .DS_Store 11 | .AppleDouble 12 | .LSOverride 13 | 14 | # Icon must end with two \r 15 | Icon 16 | 17 | # Thumbnails 18 | ._* 19 | 20 | # Files that might appear in the root of a volume 21 | .DocumentRevisions-V100 22 | .fseventsd 23 | .Spotlight-V100 24 | .TemporaryItems 25 | .Trashes 26 | .VolumeIcon.icns 27 | .com.apple.timemachine.donotpresent 28 | 29 | # Directories potentially created on remote AFP share 30 | .AppleDB 31 | .AppleDesktop 32 | Network Trash Folder 33 | Temporary Items 34 | .apdisk 35 | 36 | ### PyCharm ### 37 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm 38 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 39 | 40 | # User-specific stuff 41 | .idea/**/workspace.xml 42 | .idea/**/tasks.xml 43 | .idea/**/usage.statistics.xml 44 | .idea/**/dictionaries 45 | .idea/**/shelf 46 | 47 | # Sensitive or high-churn files 48 | .idea/**/dataSources/ 49 | .idea/**/dataSources.ids 50 | .idea/**/dataSources.local.xml 51 | .idea/**/sqlDataSources.xml 52 | .idea/**/dynamic.xml 53 | .idea/**/uiDesigner.xml 54 | .idea/**/dbnavigator.xml 55 | 56 | # Gradle 57 | .idea/**/gradle.xml 58 | .idea/**/libraries 59 | 60 | # Gradle and Maven with auto-import 61 | # When using Gradle or Maven with auto-import, you should exclude module files, 62 | # since they will be recreated, and may cause churn. Uncomment if using 63 | # auto-import. 64 | # .idea/modules.xml 65 | # .idea/*.iml 66 | # .idea/modules 67 | 68 | # CMake 69 | cmake-build-*/ 70 | 71 | # Mongo Explorer plugin 72 | .idea/**/mongoSettings.xml 73 | 74 | # File-based project format 75 | *.iws 76 | 77 | # IntelliJ 78 | out/ 79 | 80 | # mpeltonen/sbt-idea plugin 81 | .idea_modules/ 82 | 83 | # JIRA plugin 84 | atlassian-ide-plugin.xml 85 | 86 | # Cursive Clojure plugin 87 | .idea/replstate.xml 88 | 89 | # Crashlytics plugin (for Android Studio and IntelliJ) 90 | com_crashlytics_export_strings.xml 91 | crashlytics.properties 92 | crashlytics-build.properties 93 | fabric.properties 94 | 95 | # Editor-based Rest Client 96 | .idea/httpRequests 97 | 98 | ### PyCharm Patch ### 99 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 100 | 101 | # *.iml 102 | # modules.xml 103 | # .idea/misc.xml 104 | # *.ipr 105 | 106 | # Sonarlint plugin 107 | .idea/sonarlint 108 | 109 | ### Python ### 110 | # Byte-compiled / optimized / DLL files 111 | __pycache__/ 112 | *.py[cod] 113 | *$py.class 114 | 115 | # C extensions 116 | *.so 117 | 118 | # Distribution / packaging 119 | .Python 120 | # build/ 121 | develop-eggs/ 122 | dist/ 123 | downloads/ 124 | eggs/ 125 | .eggs/ 126 | lib/ 127 | lib64/ 128 | parts/ 129 | sdist/ 130 | var/ 131 | wheels/ 132 | *.egg-info/ 133 | .installed.cfg 134 | *.egg 135 | MANIFEST 136 | 137 | # PyInstaller 138 | # Usually these files are written by a python script from a template 139 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 140 | *.manifest 141 | *.spec 142 | 143 | # Installer logs 144 | pip-log.txt 145 | pip-delete-this-directory.txt 146 | 147 | # Unit test / coverage reports 148 | htmlcov/ 149 | .tox/ 150 | .coverage 151 | .coverage.* 152 | .cache 153 | nosetests.xml 154 | coverage.xml 155 | *.cover 156 | .hypothesis/ 157 | .pytest_cache/ 158 | 159 | # Translations 160 | *.mo 161 | *.pot 162 | 163 | # Django stuff: 164 | *.log 165 | local_settings.py 166 | db.sqlite3 167 | 168 | # Flask stuff: 169 | instance/ 170 | .webassets-cache 171 | 172 | # Scrapy stuff: 173 | .scrapy 174 | 175 | # Sphinx documentation 176 | # docs/_build/ 177 | 178 | # PyBuilder 179 | target/ 180 | 181 | # Jupyter Notebook 182 | .ipynb_checkpoints 183 | 184 | # pyenv 185 | .python-version 186 | 187 | # celery beat schedule file 188 | celerybeat-schedule 189 | 190 | # SageMath parsed files 191 | *.sage.py 192 | 193 | # Environments 194 | .env 195 | .venv 196 | env/ 197 | venv/ 198 | ENV/ 199 | env.bak/ 200 | venv.bak/ 201 | 202 | # Spyder project settings 203 | .spyderproject 204 | .spyproject 205 | 206 | # Rope project settings 207 | .ropeproject 208 | 209 | # mkdocs documentation 210 | /site 211 | 212 | # mypy 213 | .mypy_cache/ 214 | 215 | ### Python Patch ### 216 | .venv/ 217 | 218 | ### Python.VirtualEnv Stack ### 219 | # Virtualenv 220 | # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ 221 | [Bb]in 222 | [Ii]nclude 223 | [Ll]ib 224 | [Ll]ib64 225 | [Ll]ocal 226 | [Ss]cripts 227 | pyvenv.cfg 228 | pip-selfcheck.json 229 | 230 | 231 | # End of https://www.gitignore.io/api/osx,python,pycharm 232 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Edward Wagstaff 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Bayesian Quadrature 2 | 3 | This library provides code for evaluating the integral of non-negative functions using Bayesian Quadrature, both serially and in a batched mode. 4 | 5 | For some background on Bayesian quadrature, see: 6 | 7 | - [Introductory slides by David Duvenaud](https://www.cs.toronto.edu/~duvenaud/talks/intro_bq.pdf), or [these slides by Roman Garnett](http://probabilistic-numerics.org/assets/pdf/nips2015_probint/roman_talk.pdf) 8 | 9 | And for gory detail: 10 | 11 | - [Sampling for Inference in Probabilistic Models with Fast Bayesian Quadrature, Gunter et al. 2014](https://papers.nips.cc/paper/5483-sampling-for-inference-in-probabilistic-models-with-fast-bayesian-quadrature.pdf) for details on the warped Gaussian Process model implemented here ("WSABI") 12 | - [Batch Selection for Parallelisation of Bayesian Quadrature](https://arxiv.org/abs/1812.01553) for details on our batch selection process 13 | 14 | ## Installation 15 | 16 | Check out this repository and run `pip install .` in the root directory of the repository (i.e. in the directory containing setup.py). You should then be able to run the example scripts. 17 | 18 | ## Documentation 19 | 20 | Documentation is still a work in progress, but some docs are available at https://OxfordML.github.io/bayesquad 21 | 22 | -------------------------------------------------------------------------------- /bayesquad/__init__.py: -------------------------------------------------------------------------------- 1 | """A module for performing bayesian quadrature, supporting batch selection of points for evaluation and warped GP models 2 | """ 3 | -------------------------------------------------------------------------------- /bayesquad/_cache.py: -------------------------------------------------------------------------------- 1 | """Basic caching functionality.""" 2 | from functools import wraps 3 | 4 | _cache = {} 5 | 6 | 7 | def last_value_cache(func): 8 | """Cache the result of most recent invocation of this method. 9 | 10 | This decorator may be applied to a method which takes one argument (excluding `self`). If the method is called 11 | consecutively with the same argument, the method will immediately return the previous result rather than computing 12 | the result again. Note that by "the same argument" we mean the same object - two different but equal objects will 13 | not be regarded as the same by this decorator. 14 | 15 | The cache is not shared between different instances of the same class. 16 | 17 | Warnings 18 | -------- 19 | Instances of a class with at least one method using this decorator **must** have :func:`~clear_last_value_caches` 20 | called on them when they are destroyed (e.g. in the class's `__del__` method). If this is not done, a new instance 21 | with the same id may incorrectly share the destroyed instance's cache. 22 | 23 | Examples 24 | -------- 25 | >>> import numpy as np 26 | 27 | >>> class Foo: 28 | ... def __init__(self): 29 | ... self._count_invocations = 0 30 | ... 31 | ... def __del__(self): 32 | ... clear_last_value_caches(self) 33 | ... 34 | ... @last_value_cache 35 | ... def do_something_expensive(self, array): 36 | ... # Do something expensive here. 37 | ... 38 | ... self._count_invocations += 1 39 | ... 40 | ... def count_expensive_operations(self): 41 | ... return self._count_invocations 42 | 43 | >>> foo = Foo() 44 | >>> a = np.array(1) 45 | >>> b = np.array(1) 46 | 47 | `a` and `b` are distinct: 48 | 49 | >>> a is b 50 | False 51 | 52 | Passing `a` twice in succession will hit the cache: 53 | 54 | >>> foo.do_something_expensive(a) 55 | >>> foo.count_expensive_operations() 56 | 1 57 | 58 | >>> foo.do_something_expensive(a) 59 | >>> foo.count_expensive_operations() 60 | 1 61 | 62 | We get a cache miss when passing a different object: 63 | 64 | >>> foo.do_something_expensive(b) 65 | >>> foo.count_expensive_operations() 66 | 2 67 | 68 | Since only a single function call is cached, we get a cache miss when passing the original object again: 69 | 70 | >>> foo.do_something_expensive(a) 71 | >>> foo.count_expensive_operations() 72 | 3 73 | 74 | The cache is not shared between instances: 75 | 76 | >>> bar = Foo() 77 | >>> bar.do_something_expensive(a) 78 | >>> bar.count_expensive_operations() 79 | 1 80 | 81 | The following is a hack to stop PyCharm wrongly warning about unresolved references in this doctest. 82 | See https://youtrack.jetbrains.com/issue/PY-31517 83 | 84 | >>> self = Foo() 85 | """ 86 | @wraps(func) 87 | def transformed_function(self, x): 88 | cache_key = "{}_{}".format(id(self), id(func)) 89 | 90 | if cache_key not in _cache or x is not _cache[cache_key][0]: 91 | ret = func(self, x) 92 | 93 | _cache[cache_key] = [x, ret] 94 | 95 | return ret 96 | else: 97 | return _cache[cache_key][1] 98 | 99 | return transformed_function 100 | 101 | 102 | def clear_last_value_caches(obj): 103 | """Clear the :func:`~last_value_cache` of every method on the given object. 104 | 105 | See Also 106 | -------- 107 | :func:`~last_value_cache`""" 108 | obj_id = str(id(obj)) 109 | 110 | keys_to_delete = [] 111 | 112 | for key in _cache: 113 | if key.startswith(obj_id): 114 | keys_to_delete.append(key) 115 | 116 | for key in keys_to_delete: 117 | del _cache[key] 118 | -------------------------------------------------------------------------------- /bayesquad/_decorators.py: -------------------------------------------------------------------------------- 1 | """Decorators to modify function behaviour.""" 2 | from functools import wraps 3 | 4 | import numpy as np 5 | from numpy import ndarray 6 | 7 | 8 | def flexible_array_dimensions(func): 9 | """Modifies a function so that it can accept either 1D or 2D arrays, and return arrays of consistent dimension. 10 | 11 | This decorator allows a vectorised function to be evaluated at a single point, passed as a 1D array. It is intended 12 | to be applied to a function whose array arguments have first dimension ranging across data points, and whose return 13 | values are also arrays with first dimension ranging across data points. After this decorator has been applied, the 14 | function may be evaluated at a single point by passing a 1D array, and the trivial first axis will be removed from 15 | the returned arrays. Within a method which has this decorator applied, we may assume that all array arguments are 16 | 2D, with shape (num_points, num_dimensions). 17 | 18 | NB this means that a 1D array of length n will be interpreted as a single n-dimensional point, rather than n 19 | 1-dimensional points. 20 | """ 21 | @wraps(func) 22 | def transformed_function(*args, **kwargs): 23 | new_args = tuple(np.atleast_2d(arg) if isinstance(arg, ndarray) 24 | else arg 25 | for arg in args) 26 | 27 | values = func(*new_args, **kwargs) 28 | 29 | if all([arg is new_arg for arg, new_arg in zip(args, new_args)]): 30 | return values 31 | 32 | if isinstance(values, tuple): 33 | return tuple(np.squeeze(value, axis=0) if isinstance(value, ndarray) and np.size(value, 0) == 1 34 | else value 35 | for value in values) 36 | elif isinstance(values, ndarray) and np.size(values, 0) == 1: 37 | return np.squeeze(values, axis=0) 38 | else: 39 | return values 40 | 41 | return transformed_function 42 | -------------------------------------------------------------------------------- /bayesquad/_kernel_gradients.py: -------------------------------------------------------------------------------- 1 | """Functions for computing the gradients of Gaussian Process kernels.""" 2 | 3 | import numpy as np 4 | 5 | from GPy.kern.src.kern import Kern 6 | from GPy.kern.src.rbf import RBF 7 | from GPy.kern.src.stationary import Stationary 8 | from numpy import ndarray, newaxis 9 | 10 | 11 | def jacobian(kernel: Kern, variable_points: ndarray, fixed_points: ndarray) -> ndarray: 12 | """Return the Jacobian of a kernel evaluated at all pairs from two sets of points. 13 | 14 | Given a kernel and two sets :math:`X, D` of points (`variable_points` and `fixed_points` respectively), this 15 | function will evaluate the Jacobian of the kernel at each pair of points :math:`(x, d)` with :math:`x \in X` and 16 | :math:`d \in D`. The derivative is taken with respect to the first argument, i.e. :math:`d` is regarded as a 17 | fixed quantity. Typically, :math:`D` will be the set of :math:`x` values in the data set of a Gaussian Process, and 18 | :math:`X` will be the set of :math:`x` values at which we wish to find the gradient of the posterior GP. 19 | 20 | Parameters 21 | ---------- 22 | kernel 23 | The kernel to be differentiated. Currently supported kernels are: 24 | - :class:`GPy.kern.src.rbf.RBF` 25 | variable_points 26 | A 2D array of points, with shape (num_variable_points, num_dimensions). 27 | fixed_points 28 | A 2D array of points, with shape (num_fixed_points, num_dimensions). 29 | 30 | Returns 31 | ------- 32 | ndarray 33 | A 3D array of shape (num_variable_points, num_fixed_points, num_dimensions), whose (i, j, k)-th element is the 34 | k-th component of the Jacobian of the kernel evaluated at the i-th point of :math:`X` and the j-th point of 35 | :math:`D`. 36 | 37 | Raises 38 | ------ 39 | NotImplementedError 40 | If the provided kernel type is not supported. See the parameters list for a list of supported kernels. 41 | """ 42 | if isinstance(kernel, RBF): 43 | lengthscale = kernel.lengthscale.values[0] 44 | k = kernel.K(variable_points, fixed_points) 45 | 46 | # The (i, j, k)-th element of this is the k-th component of X_i - D_j. 47 | differences = variable_points[:, newaxis, :] - fixed_points[newaxis, :, :] 48 | 49 | return -k[:, :, newaxis] * differences / (lengthscale ** 2) 50 | else: 51 | raise NotImplementedError 52 | 53 | 54 | def hessian(kernel: Kern, variable_points: ndarray, fixed_points: ndarray) -> ndarray: 55 | """Return the Hessian of a kernel evaluated at all pairs from two sets of points. 56 | 57 | Given a kernel and two sets :math:`X, D` of points (`variable_points` and `fixed_points` respectively), this 58 | function will evaluate the Hessian of the kernel at each pair of points :math:`(x, d)` with :math:`x \in X` and 59 | :math:`d \in D`. The derivatives are taken with respect to the first argument, i.e. :math:`d` is regarded as a 60 | fixed quantity. Typically, :math:`D` will be the set of :math:`x` values in the data set of a Gaussian Process, and 61 | :math:`X` will be the set of :math:`x` values at which we wish to find the gradient of the posterior GP. 62 | 63 | Parameters 64 | ---------- 65 | kernel 66 | The kernel to be differentiated. Currently supported kernels are: 67 | - :class:`GPy.kern.src.rbf.RBF` 68 | fixed_points 69 | A 2D array of points, with shape (num_variable_points, num_dimensions). 70 | variable_points 71 | A 2D array of points, with shape (num_fixed_points, num_dimensions). 72 | 73 | Returns 74 | ------- 75 | ndarray 76 | A 4D array of shape (num_variable_points, num_fixed_points, num_dimensions, num_dimensions), whose 77 | (i, j, k, l)-th element is the (k, l)-th mixed partial derivative of the kernel evaluated at the i-th point of 78 | :math:`X` and the j-th point of :math:`D`. 79 | 80 | Raises 81 | ------ 82 | NotImplementedError 83 | If the provided kernel type is not supported. See the parameters list for a list of supported kernels. 84 | """ 85 | if isinstance(kernel, RBF): 86 | lengthscale = kernel.lengthscale.values[0] 87 | k = kernel.K(variable_points, fixed_points) 88 | 89 | _, num_dimensions = variable_points.shape 90 | 91 | # The (i, j, k)-th element of this is the k-th component of X_i - D_j (i.e. (X_i - D_j)_k). 92 | differences = variable_points[:, newaxis, :] - fixed_points[newaxis, :, :] 93 | 94 | # The (i, j, k, l)-th element of this is (X_i - D_j)_k * (X_i - D_j)_l. This can be viewed as a matrix of 95 | # matrices, whose (i, j)-th matrix is the outer product of (X_i - D_j) with itself. 96 | outer_products_of_differences = np.einsum('ijk,ijl->ijkl', differences, differences, optimize=True) 97 | 98 | transformed_outer_products = (outer_products_of_differences / lengthscale ** 2) - np.eye(num_dimensions) 99 | 100 | # Now multiply the (i, j)-th transformed outer product by K(X_i, D_j). 101 | product = np.einsum('ij,ijkl->ijkl', k, transformed_outer_products, optimize=True) 102 | 103 | return product / (lengthscale ** 2) 104 | else: 105 | raise NotImplementedError 106 | 107 | 108 | def diagonal_hessian(kernel: Kern, x: ndarray) -> ndarray: 109 | """Return the Hessian of a kernel considered as a function of one variable by constraining both inputs to be equal. 110 | 111 | Given a kernel :math:`K` and a set of points :math:`X`, this function will evaluate the Hessian of :math:`K(x, x)` 112 | at each point :math:`x` of :math:`X`. 113 | 114 | Parameters 115 | ---------- 116 | kernel 117 | The kernel to be differentiated. Currently supported kernels are: 118 | - All subclasses of :class:`GPy.kern.src.rbf.Stationary` 119 | x 120 | A 2D array of points, with shape (num_points, num_dimensions). 121 | 122 | Returns 123 | ------- 124 | ndarray 125 | A 3D array of shape (num_points, num_dimensions, num_dimensions). 126 | """ 127 | if isinstance(kernel, Stationary): 128 | num_points, num_dimensions = x.shape 129 | 130 | return np.zeros((num_points, num_dimensions, num_dimensions)) 131 | else: 132 | raise NotImplementedError 133 | -------------------------------------------------------------------------------- /bayesquad/_maths_helpers.py: -------------------------------------------------------------------------------- 1 | """A home for mathematical operations which are used multiple times in this package.""" 2 | 3 | from numpy import ndarray, newaxis 4 | 5 | 6 | def jacobian_of_f_squared_times_g(*, 7 | f: ndarray, f_jacobian: ndarray, 8 | g: ndarray, g_jacobian: ndarray) -> ndarray: 9 | """Given two functions f and g, along with their Jacobians, returns the Jacobian of the function f^2 * g. 10 | 11 | Parameters 12 | ---------- 13 | f 14 | A 1D array whose :math:`i`-th element is the value of the function :math:`f` at point :math:`x_i`. 15 | f_jacobian 16 | A 2D array whose :math:`(i,j)`-th element is the :math:`j`-th component of the Jacobian of :math:`f` at point 17 | :math:`x_i`. 18 | g 19 | A 1D array whose :math:`i`-th element is the value of the function :math:`g` at point :math:`x_i`. 20 | g_jacobian 21 | A 2D array whose :math:`(i,j)`-th element is the :math:`j`-th component of the Jacobian of :math:`g` at point 22 | :math:`x_i`. 23 | 24 | Returns 25 | ------- 26 | jacobian : ndarray 27 | A 2D array of shape (num_points, num_dimensions). The :math:`(i, j)`-th element is the :math:`j`-th component of 28 | the Jacobian of :math:`f^2 g` at point :math:`x_i`. 29 | 30 | Notes 31 | ----- 32 | The required derivative is as follows: 33 | 34 | .. math:: 35 | 36 | \\frac{\\partial f^2 g}{\\partial x_j} = 2 f g \\frac{\\partial f}{\\partial x_j} 37 | + f^2 \\frac{\\partial g}{\\partial x_j} 38 | """ 39 | assert f.ndim == g.ndim == 1, "Function data must be a 1-dimensional array" 40 | assert f_jacobian.ndim == g_jacobian.ndim == 2, "Function Jacobian data must be a 2-dimensional array" 41 | 42 | # The Jacobian has dimensions (num_points, num_dimensions). For NumPy to broadcast the calculations 43 | # appropriately, we need to augment our 1D variables with a new axis. 44 | f, g = f[:, newaxis], g[:, newaxis] 45 | 46 | jacobian = 2 * f * g * f_jacobian + g_jacobian * f ** 2 47 | 48 | return jacobian 49 | 50 | 51 | def hessian_of_f_squared_times_g(*, 52 | f: ndarray, f_jacobian: ndarray, f_hessian: ndarray, 53 | g: ndarray, g_jacobian: ndarray, g_hessian: ndarray) -> ndarray: 54 | """Given two functions f and g, along with their Jacobian and Hessian, returns the Hessian of the function f^2 * g. 55 | 56 | Parameters 57 | ---------- 58 | f 59 | A 1D array whose :math:`i`-th element is the value of the function :math:`f` at point :math:`x_i`. 60 | f_jacobian 61 | A 2D array whose :math:`(i,j)`-th element is the :math:`j`-th component of the Jacobian of :math:`f` at point 62 | :math:`x_i`. 63 | f_hessian 64 | A 3D array whose :math:`(i,j,k)`-th element is the :math:`(j,k)`-th mixed partial derivative of :math:`f` at 65 | point :math:`x_i`. 66 | g 67 | A 1D array whose :math:`i`-th element is the value of the function :math:`g` at point :math:`x_i`. 68 | g_jacobian 69 | A 2D array whose :math:`(i,j)`-th element is the :math:`j`-th component of the Jacobian of :math:`g` at point 70 | :math:`x_i`. 71 | g_hessian 72 | A 3D array whose :math:`(i,j,k)`-th element is the :math:`(j,k)`-th mixed partial derivative of :math:`g` at 73 | point :math:`x_i`. 74 | 75 | Returns 76 | ------- 77 | hessian : ndarray 78 | A 3D array of shape (num_points, num_dimensions, num_dimensions). The :math:`(i, j, k)`-th element is the 79 | :math:`(j, k)`-th mixed partial derivative of :math:`f^2 g` at point :math:`x_i`. 80 | 81 | Notes 82 | ----- 83 | The required derivatives are as follows: 84 | 85 | .. math:: 86 | 87 | \\frac{\\partial f^2 g}{\\partial x_j} & = & 2 f g \\frac{\\partial f}{\\partial x_j} 88 | + f^2 \\frac{\\partial g}{\\partial x_j} \\\\ 89 | \\frac{\\partial^2 f^2 g}{\\partial x_j \\partial x_k} & = & 90 | 2 f \\left( g \\frac{\\partial^2 f}{\\partial x_j \\partial x_k} 91 | + \\frac{\\partial g}{\\partial x_j} \\frac{\\partial f}{\\partial x_k} 92 | + \\frac{\\partial f}{\\partial x_j} \\frac{\\partial g}{\\partial x_k} \\right) \\\\ 93 | & & + 2 g \\frac{\\partial f}{\\partial x_j} \\frac{\\partial f}{\\partial x_k} 94 | + f^2 \\frac{\\partial^2 f}{\\partial x_j \\partial x_k} 95 | """ 96 | assert f.ndim == g.ndim == 1, "Function data must be a 1-dimensional array" 97 | assert f_jacobian.ndim == g_jacobian.ndim == 2, "Function Jacobian data must be a 2-dimensional array" 98 | assert f_hessian.ndim == g_hessian.ndim == 3, "Function Hessian data must be a 3-dimensional array" 99 | 100 | # The Hessian has dimensions (num_points, num_dimensions, num_dimensions). For NumPy to broadcast the calculations 101 | # appropriately, we need to augment our 1D variables with new axes. 102 | f, g = f[:, newaxis, newaxis], g[:, newaxis, newaxis] 103 | 104 | # The (i,j,k)-th element of these arrays is the j-th component of the Jacobian at x_i (the k axis has size 1). 105 | f_jacobian_dxj, g_jacobian_dxj = f_jacobian[:, :, newaxis], g_jacobian[:, :, newaxis] 106 | 107 | # The (i,j,k)-th element of these arrays is the k-th component of the Jacobian at x_i (the j axis has size 1). 108 | f_jacobian_dxk, g_jacobian_dxk = f_jacobian[:, newaxis, :], g_jacobian[:, newaxis, :] 109 | 110 | hessian = \ 111 | 2 * f * ( 112 | f_hessian * g + 113 | g_jacobian_dxj * f_jacobian_dxk + 114 | f_jacobian_dxj * g_jacobian_dxk 115 | ) + 2 * g * f_jacobian_dxj * f_jacobian_dxk \ 116 | + g_hessian * f ** 2 117 | 118 | return hessian 119 | -------------------------------------------------------------------------------- /bayesquad/_optimisation.py: -------------------------------------------------------------------------------- 1 | """Utility functions wrapping a scipy optimizer.""" 2 | 3 | from typing import Tuple, Callable 4 | 5 | import numpy as np 6 | import scipy.optimize 7 | from numpy import ndarray 8 | 9 | DEFAULT_GTOL = 1e-2 10 | 11 | DEFAULT_MINIMIZER_KWARGS = {'method': 'BFGS', 12 | 'jac': True, 13 | 'options': {'gtol': DEFAULT_GTOL}} 14 | 15 | 16 | def multi_start_maximise(objective_function: Callable, 17 | initial_points: ndarray, **kwargs) -> Tuple[ndarray, float]: 18 | """Run multi-start maximisation of the given objective function. 19 | 20 | Warnings 21 | -------- 22 | This is a hack to take advantage of fast vectorised computation and avoid expensive python loops. There may be some 23 | issues with this method! 24 | 25 | The objective function provided here must be a vectorised function. We take advantage of the fast computation of 26 | vectorised functions to view a multi-start optimisation as a single pass of a higher-dimensional optimisation, 27 | rather than several passes of a low-dimensional optimisation (which would require an expensive python loop). We 28 | simply concatenate all the points where the function is to be evaluated into a single high-dimensional vector, give 29 | the function value as the sum of all the individual function values, and give the Jacobian as the concatenation of 30 | all the individual Jacobians. In this way we can essentially perform many optimisations in parallel. Note that 31 | there is an issue here with the stopping condition: we can only consider all optimisations together, so even if most 32 | have come very close to an optimum, the process will continue as long as one is far away. However, this does seem to 33 | perform well in practice. 34 | 35 | Parameters 36 | ---------- 37 | objective_function 38 | Function to be maximised. Must return both the function value and the Jacobian. Must also accept a 2D array of 39 | points, returning a 1D array and a 2D array for the function values and Jacobians respectively. 40 | initial_points 41 | Points at which to begin the optimisation, as a 2D array of shape (num_points, num_dimensions). 42 | **kwargs 43 | Keyword arguments will be included in the 'options' dict passed to the underlying scipy optimiser. 44 | 45 | Returns 46 | ------- 47 | ndarray 48 | The location of the found maximum. 49 | float 50 | The value of the objective function at the found maximum. 51 | """ 52 | minimizer_kwargs = DEFAULT_MINIMIZER_KWARGS.copy() 53 | minimizer_kwargs['options'] = {**minimizer_kwargs['options'], **kwargs} # This merges the two dicts. 54 | 55 | num_points, num_dims = np.shape(initial_points) 56 | 57 | def function_to_minimise(x, *inner_args, **inner_kwargs): 58 | x = np.reshape(x, (num_points, num_dims)) 59 | 60 | value, jacobian = objective_function(x, *inner_args, **inner_kwargs) 61 | combined_value, combined_jacobian = -value.sum(), -jacobian.ravel() 62 | 63 | if not np.isfinite(combined_value) or not np.all(np.isfinite(combined_jacobian)): 64 | raise FloatingPointError("Objective function for multi-start optimisation returned NaN or infinity.") 65 | 66 | return combined_value, combined_jacobian 67 | 68 | maximum = scipy.optimize.minimize(function_to_minimise, initial_points, **minimizer_kwargs) 69 | maxima = maximum.x.reshape(num_points, num_dims) 70 | 71 | values, _ = objective_function(maxima) 72 | max_index = np.argmax(values) 73 | 74 | optimal_x = maxima[max_index, :] 75 | optimal_y = values[max_index] 76 | 77 | return optimal_x, optimal_y 78 | 79 | 80 | def _indices_where(array: ndarray) -> Tuple: 81 | """Returns the indices where the elements of `array` are True.""" 82 | return np.nonzero(array) 83 | -------------------------------------------------------------------------------- /bayesquad/_transformations.py: -------------------------------------------------------------------------------- 1 | """Transform functions""" 2 | from functools import wraps 3 | from typing import Callable 4 | 5 | import numpy as np 6 | import numpy.ma as ma 7 | 8 | 9 | def log_of_function(original_function: Callable) -> Callable: 10 | """Given a function f and its Jacobian, return the log(f) and the Jacobian of log(f). 11 | 12 | f may evaluate to 0 in some places (e.g. due to numerical issues), so we set the log to be -1e10 where f is 0. 13 | Since it's not possible to evaluate the log Jacobian in this case, we set the Jacobian to be the Jacobian of the 14 | original function. 15 | 16 | Parameters 17 | ---------- 18 | original_function 19 | A function returning a tuple of arrays (f(x), Jac(f(x))). The second element of this tuple may be `None`. 20 | 21 | Returns 22 | ------- 23 | log_function : Callable 24 | A function returning a tuple of arrays (log(f(x)), Jac(log(f(x)))). If `original_function` returns `None` for 25 | Jac(f(x)), then Jac(log(f(x))) will also be `None`. 26 | """ 27 | @wraps(original_function) 28 | def log_function(x, *args, **kwargs): 29 | value, jacobian = original_function(x, *args, **kwargs) 30 | masked_value = ma.masked_equal(value, 0) 31 | 32 | log_value = ma.log(masked_value) 33 | log_value = ma.filled(log_value, -1e10) 34 | 35 | if jacobian is not None: 36 | # We need expand_dims here because value is lower-dimensional than jacobian, but they must have the same 37 | # dimensionality for numpy broadcasting to work here. 38 | masked_value = ma.filled(masked_value, 1) 39 | log_jacobian = jacobian / np.expand_dims(masked_value, -1) 40 | else: 41 | log_jacobian = None 42 | 43 | return log_value, log_jacobian 44 | 45 | return log_function 46 | -------------------------------------------------------------------------------- /bayesquad/_util.py: -------------------------------------------------------------------------------- 1 | """Miscellaneous utility functions""" 2 | 3 | import numpy as np 4 | from numpy import ndarray 5 | 6 | 7 | def validate_dimensions(x: ndarray, expected_dimensionality: int): 8 | """Checks that `x` represents data of dimensionality `expected_dimensions`. 9 | 10 | Raises 11 | ------ 12 | ValueError 13 | If `x` is not a 2D array, or if the second dimension of `x` does not have size `expected_dimensions`. 14 | """ 15 | array_dimensions = np.ndim(x) 16 | 17 | if array_dimensions != 2: 18 | raise ValueError("Expected a 2-dimensional array, but got a {}-dimensional array.".format(array_dimensions)) 19 | 20 | actual_dimensionality = np.size(x, 1) 21 | 22 | if actual_dimensionality != expected_dimensionality: 23 | raise ValueError("Expected data in {} dimensions, but got data in {} dimensions." 24 | .format(expected_dimensionality, actual_dimensionality)) 25 | -------------------------------------------------------------------------------- /bayesquad/acquisition_functions.py: -------------------------------------------------------------------------------- 1 | """Acquisition functions and related functions.""" 2 | import numpy as np 3 | 4 | from .plotting import plottable 5 | from .quadrature import IntegrandModel 6 | 7 | 8 | def model_variance(integrand_model: IntegrandModel): 9 | 10 | @plottable("Model variance", default_plotting_parameters={'calculate_jacobian': False}) 11 | def f(x, *, calculate_jacobian=True): 12 | """Evaluate the variance, and the Jacobian of the variance, for the given `IntegrandModel` at a point, or a set 13 | of points. 14 | 15 | Given an array of shape (num_points, num_dimensions), returns an array of shape (num_points) containing the 16 | function values and an array of shape (num_points, num_dimensions) containing the function Jacobians. 17 | 18 | Given an array of shape (num_dimensions), returns a 0D array containing the function value and an array of shape 19 | (num_dimensions) containing the function Jacobian. 20 | 21 | If the Jacobian is not required (e.g. for plotting), the relevant calculations can be disabled by setting 22 | `calculate_jacobian=False`. 23 | """ 24 | _, variance = integrand_model.posterior_mean_and_variance(x) 25 | 26 | if calculate_jacobian: 27 | variance_jacobian = integrand_model.posterior_variance_jacobian(x) 28 | else: 29 | variance_jacobian = None 30 | 31 | return variance, variance_jacobian 32 | 33 | return f 34 | 35 | 36 | def model_variance_norm_of_gradient_squared(integrand_model: IntegrandModel): 37 | 38 | @plottable("Gradient squared", default_plotting_parameters={'calculate_jacobian': False}) 39 | def f(x, *, calculate_jacobian=True): 40 | """Evaluate the squared norm of the gradient of the variance, and the Jacobian of this quantity, for the given 41 | `IntegrandModel` at a point, or a set of points. 42 | 43 | Given an array of shape (num_points, num_dimensions), returns an array of shape (num_points) containing the 44 | function values and an array of shape (num_points, num_dimensions) containing the function Jacobians. 45 | 46 | Given an array of shape (num_dimensions), returns a 0D array containing the function value and an array of shape 47 | (num_dimensions) containing the function Jacobian. 48 | 49 | If the Jacobian is not required (e.g. for plotting), the relevant calculations can be disabled by setting 50 | `calculate_jacobian=False`. 51 | """ 52 | variance_jacobian = integrand_model.posterior_variance_jacobian(x) 53 | 54 | # Inner product of the Jacobian with itself, for each point. 55 | gradient_squared = np.einsum('...i,...i->...', variance_jacobian, variance_jacobian, optimize=True) 56 | 57 | if calculate_jacobian: 58 | variance_hessian = integrand_model.posterior_variance_hessian(x) 59 | 60 | # Matrix product of Hessian and Jacobian, for each point. 61 | gradient_squared_jacobian = 2 * np.einsum('...ij,...j->...i', 62 | variance_hessian, 63 | variance_jacobian, 64 | optimize=True) 65 | else: 66 | gradient_squared_jacobian = None 67 | 68 | return gradient_squared, gradient_squared_jacobian 69 | 70 | return f 71 | -------------------------------------------------------------------------------- /bayesquad/batch_selection.py: -------------------------------------------------------------------------------- 1 | """Methods for selecting a batch of points to evaluate for Bayesian quadrature.""" 2 | from abc import ABC, abstractmethod 3 | from math import sqrt 4 | from typing import List, Callable 5 | 6 | import numpy as np 7 | import numpy.ma as ma 8 | from numpy import ndarray 9 | 10 | from .acquisition_functions import model_variance, model_variance_norm_of_gradient_squared 11 | from ._optimisation import multi_start_maximise 12 | from ._transformations import log_of_function 13 | from .plotting import plottable 14 | from .quadrature import IntegrandModel 15 | 16 | LOCAL_PENALISATION = "Local Penalisation" 17 | KRIGING_BELIEVER = "Kriging Believer" 18 | KRIGING_OPTIMIST = "Kriging Optimist" 19 | 20 | 21 | def select_batch(integrand_model: IntegrandModel, 22 | batch_size: int, 23 | batch_method: str = LOCAL_PENALISATION) -> List[ndarray]: 24 | """Select a batch of points at which to evaluate the integrand. 25 | 26 | Parameters 27 | ---------- 28 | integrand_model 29 | The model with which we wish to perform Bayesian quadrature. 30 | batch_size 31 | The number of points to return in the new batch. 32 | batch_method 33 | The method by which to compute the new batch. Currently supported methods are: 34 | - "Local Penalisation" 35 | - "Kriging Believer" 36 | - "Kriging Optimist" 37 | 38 | Returns 39 | ------- 40 | list[ndarray] 41 | A list of arrays. Each array is a point of the new batch. 42 | """ 43 | if batch_method == LOCAL_PENALISATION: 44 | method = _LocalPenalisation(integrand_model) 45 | elif batch_method == KRIGING_BELIEVER: 46 | method = _KrigingBeliever(integrand_model) 47 | elif batch_method == KRIGING_OPTIMIST: 48 | method = _KrigingOptimist(integrand_model) 49 | else: 50 | raise NotImplementedError("{} is not a supported batch method.".format(batch_method)) 51 | 52 | return method.select_batch(batch_size) 53 | 54 | 55 | class _BatchSelectionMethod(ABC): 56 | """A method for selecting a batch of points at which to evaluate an integrand, based on a model of the integrand. 57 | 58 | This selection will always be done on the basis of an acquisition function, which can be chosen or modified by 59 | this class. In general, we sequentially select batch points by maximising the acquisition function (currently, this 60 | is fixed to be the posterior variance of the integrand). In order to avoid repeatedly selecting the same point (or 61 | very nearby points) repeatedly, we update the acquisition function after selecting each point. Different methods 62 | will perform this update in different ways.""" 63 | 64 | def __init__(self, integrand_model: IntegrandModel): 65 | self._integrand_model = integrand_model 66 | self._batch: List[ndarray] = [] 67 | 68 | def select_batch(self, batch_size: int) -> List[ndarray]: 69 | """Select a batch containing the given number of points.""" 70 | while len(self._batch) < batch_size: 71 | acquisition_function = self._get_acquisition_function() 72 | initial_points = self._select_initial_points() 73 | batch_point, _ = multi_start_maximise(acquisition_function, initial_points) 74 | 75 | self._batch.append(batch_point) 76 | 77 | self._cleanup() 78 | 79 | return self._batch 80 | 81 | @abstractmethod 82 | def _get_acquisition_function(self) -> Callable: 83 | """Based on the current state (including at least the current state of the `IntegrandModel` and the batch points 84 | selected so far), return the acquisition function to be maximised to select the next batch point.""" 85 | 86 | def _cleanup(self): 87 | """After the full batch has been chosen, perform any necessary cleanup/finalisation. By default, this method 88 | takes no action.""" 89 | return 90 | 91 | def _select_initial_points(self): 92 | """Select the initial points for multi-start maximisation of the acquisition function.""" 93 | num_initial_points = 10 * self._integrand_model.dimensions 94 | 95 | return self._integrand_model.prior.sample(num_initial_points) 96 | 97 | 98 | class _KrigingBeliever(_BatchSelectionMethod): 99 | """After each batch point is selected, this method updates the model with fictitious data generated by taking the 100 | posterior mean of the model at the batch point.""" 101 | 102 | def __init__(self, *args, **kwargs): 103 | self._fantasised_evaluations: List[ndarray] = [] 104 | super().__init__(*args, **kwargs) 105 | 106 | def _get_acquisition_function(self) -> Callable: 107 | if self._batch: 108 | batch_point = self._batch[-1] 109 | 110 | mean_y, _ = self._integrand_model.posterior_mean_and_variance(batch_point) 111 | self._fantasised_evaluations.append(mean_y) 112 | 113 | self._integrand_model.update(batch_point, mean_y) 114 | 115 | acquisition_function = model_variance(self._integrand_model) 116 | return log_of_function(acquisition_function) 117 | 118 | def _cleanup(self): 119 | points_with_fantasised_evaluations = self._batch[:-1] 120 | self._integrand_model.remove(points_with_fantasised_evaluations, self._fantasised_evaluations) 121 | 122 | 123 | class _KrigingOptimist(_BatchSelectionMethod): 124 | """After each batch point is selected, this method updates the model with fictitious data generated by taking the 125 | posterior mean of the model at the batch point, plus two posterior standard deviations.""" 126 | 127 | def __init__(self, *args, **kwargs): 128 | self._fantasised_evaluations: List[ndarray] = [] 129 | super().__init__(*args, **kwargs) 130 | 131 | def _get_acquisition_function(self) -> Callable: 132 | if self._batch: 133 | batch_point = self._batch[-1] 134 | 135 | mean_y, var_y = self._integrand_model.posterior_mean_and_variance(batch_point) 136 | optimistic_y = mean_y + 2 * np.sqrt(var_y) 137 | self._fantasised_evaluations.append(optimistic_y) 138 | 139 | self._integrand_model.update(batch_point, optimistic_y) 140 | 141 | acquisition_function = model_variance(self._integrand_model) 142 | return log_of_function(acquisition_function) 143 | 144 | def _cleanup(self): 145 | points_with_fantasised_evaluations = self._batch[:-1] 146 | self._integrand_model.remove(points_with_fantasised_evaluations, self._fantasised_evaluations) 147 | 148 | 149 | class _LocalPenalisation(_BatchSelectionMethod): 150 | """After each batch point is selected, this method directly penalises the acquisition function around the point. 151 | 152 | Notes 153 | ----- 154 | In this method, after choosing a batch point we maximise the norm of the gradient of the acquisition function in the 155 | neighbourhood of this point. We then take the penalised function to be the minimum of the original function and a 156 | cone with half of this maximal gradient around the selected point. We choose half the gradient since the true 157 | function must have zero gradient at the selected point and at the nearest maximum, so the average gradient here will 158 | not be as large as the maximum. 159 | 160 | Since this minimum of multiple functions will not be smooth, we use an approximation to the minimum (a "soft 161 | minimum"). This is done by taking the p-norm of all function values for a negative value of p. 162 | """ 163 | 164 | def __init__(self, *args, **kwargs): 165 | self._penaliser_gradients: List[float] = [] 166 | super().__init__(*args, **kwargs) 167 | 168 | def _get_acquisition_function(self) -> Callable: 169 | if self._batch: 170 | max_gradient = self._find_max_gradient_near_latest_batch_point() 171 | 172 | self._penaliser_gradients.append(max_gradient / 2) 173 | 174 | acquisition_function = model_variance(self._integrand_model) 175 | 176 | softmin_penalised_log_acquisition_function = \ 177 | self._get_soft_penalised_log_acquisition_function(acquisition_function) 178 | 179 | return softmin_penalised_log_acquisition_function 180 | 181 | def _find_max_gradient_near_latest_batch_point(self) -> float: 182 | batch_point = self._batch[-1] 183 | 184 | num_local_initial_points = self._integrand_model.dimensions * 10 185 | local_initial_points = self._get_local_initial_points(batch_point, num_local_initial_points) 186 | 187 | _log_variance_gradient_squared_and_jacobian = \ 188 | log_of_function(model_variance_norm_of_gradient_squared(self._integrand_model)) 189 | 190 | _, log_max_gradient_squared = multi_start_maximise(_log_variance_gradient_squared_and_jacobian, 191 | local_initial_points) 192 | 193 | max_gradient_squared = np.exp(log_max_gradient_squared) 194 | return sqrt(max_gradient_squared) 195 | 196 | def _get_soft_penalised_log_acquisition_function(self, acquisition_function: Callable) -> Callable: 197 | """Create a function which will return the log of a soft minimum of the given acquisition function and the given 198 | penalisers at any point, or set of points. 199 | 200 | The soft minimisation is performed by taking the p-norm of all function values for a negative value of p. This 201 | gives a differentiable function which is approximately equal to the min of the given functions. 202 | 203 | If the Jacobian is not required (e.g. for plotting), the relevant calculations can be disabled by passing 204 | `calculate_jacobian=False` as a parameter to the returned function. 205 | """ 206 | penaliser_centres = self._batch 207 | penaliser_gradients = self._penaliser_gradients 208 | 209 | penalisers = [self._cone(centre, gradient) for centre, gradient in zip(penaliser_centres, penaliser_gradients)] 210 | p = 6 211 | 212 | @plottable("Soft penalised log acquisition function", default_plotting_parameters={'calculate_jacobian': False}) 213 | def penalised_acquisition_function(x, *, calculate_jacobian=True): 214 | function_evaluations = \ 215 | [acquisition_function(x, calculate_jacobian=calculate_jacobian)] + [f(x) for f in penalisers] 216 | function_values, function_jacobians = [np.array(ret) for ret in zip(*function_evaluations)] 217 | 218 | # This is necessary to ensure that function_values has the same dimensions as function_jacobians, so that we 219 | # can index into both arrays in a consistent manner. 220 | function_values = np.expand_dims(function_values, -1) 221 | 222 | # We want to avoid dividing by zero and taking the log of zero, so we mask out all zeroes. 223 | function_values = ma.masked_equal(function_values, 0) 224 | 225 | min_function_values = np.min(function_values, axis=0) 226 | min_function_values = ma.masked_array(min_function_values, mask=np.any(function_values.mask, axis=0)) 227 | 228 | # Any values more than roughly an order of magnitude from the minimum value will be irrelevant to the final 229 | # result, but might cause overflows, so we clip them here. 230 | scaled_function_values = (function_values / min_function_values).clip(max=1e2) 231 | 232 | scaled_inverse_power_sum = (1 / (scaled_function_values ** p)).sum(axis=0) 233 | values = -ma.log(scaled_inverse_power_sum) / p + ma.log(min_function_values) 234 | values = ma.filled(values, -1e9) 235 | 236 | if calculate_jacobian: 237 | scaled_function_jacobians = (function_jacobians / min_function_values).clip(max=1e2, min=-1e2) 238 | jacobian_numerator = (1 / (scaled_function_values ** (p + 1)) * scaled_function_jacobians).sum(axis=0) 239 | jacobians = jacobian_numerator / scaled_inverse_power_sum 240 | jacobians = ma.filled(jacobians, np.random.randn()) 241 | else: 242 | jacobians = None 243 | 244 | return values.squeeze(), jacobians 245 | 246 | return penalised_acquisition_function 247 | 248 | @staticmethod 249 | def _get_local_initial_points(central_point: ndarray, num_points: int): 250 | """Get a set of points close to a given point.""" 251 | perturbations = 0.01 * np.random.randn(num_points, *central_point.shape) 252 | return central_point + perturbations 253 | 254 | @staticmethod 255 | def _cone(centre: ndarray, gradient: float): 256 | def f(x): 257 | """Evaluate a cone around the given centre with the given gradient, i.e. a function whose value increases 258 | linearly with distance from the centre. 259 | 260 | Given an array of shape (num_points, num_dimensions), returns an array of shape (num_points) containing the 261 | function values and an array of shape (num_points, num_dimensions) containing the function Jacobians. 262 | 263 | Given an array of shape (num_dimensions), returns a 0D array containing the function value and an array of 264 | shape (num_dimensions) containing the function Jacobian. 265 | """ 266 | distance = np.linalg.norm(x - centre, axis=-1) 267 | 268 | value = distance * gradient 269 | 270 | distance = np.expand_dims(distance, -1) 271 | distance = ma.masked_equal(distance, 0) # Avoid division by zero at the centre. 272 | 273 | jacobian = (x - centre) * gradient / distance 274 | 275 | # The Jacobian isn't defined at the centre of the cone but we must return a value. 276 | jacobian = ma.filled(jacobian, x) 277 | 278 | return value, jacobian 279 | 280 | return f 281 | -------------------------------------------------------------------------------- /bayesquad/plotting.py: -------------------------------------------------------------------------------- 1 | """Functions to allow plotting code to be decoupled from the rest of the code.""" 2 | 3 | from functools import wraps 4 | from types import MappingProxyType 5 | from typing import Callable 6 | 7 | 8 | class _Registry: 9 | def __init__(self): 10 | self._callback_registry = {} 11 | 12 | def trigger_callbacks(self, identifier: str, func: Callable): 13 | if identifier not in self._callback_registry: 14 | return 15 | 16 | for callback in self._callback_registry[identifier]: 17 | callback(func) 18 | 19 | def add_callback(self, identifier: str, callback: Callable): 20 | if identifier not in self._callback_registry: 21 | self._callback_registry[identifier] = [] 22 | 23 | self._callback_registry[identifier].append(callback) 24 | 25 | 26 | _function_registry = _Registry() 27 | 28 | # Using a mutable object (e.g. an empty dict) as a default parameter can lead to undesirable behaviour, so we use this 29 | # read-only proxy. 30 | # 31 | # See: 32 | # The problem: https://stackoverflow.com/q/1132941 33 | # A solution: https://stackoverflow.com/a/30638022 34 | _EMPTY = MappingProxyType({}) 35 | 36 | 37 | def plottable(identifier: str, *, default_plotting_parameters=_EMPTY): 38 | def decorator(func: Callable): 39 | @wraps(func) 40 | def func_for_plotting(*args, **kwargs): 41 | # Merge default_plotting_parameters into kwargs 42 | kwargs = {**default_plotting_parameters, **kwargs} 43 | return func(*args, **kwargs) 44 | 45 | _function_registry.trigger_callbacks(identifier, func_for_plotting) 46 | 47 | return func 48 | return decorator 49 | 50 | 51 | def add_callback(identifier: str, callback: Callable): 52 | _function_registry.add_callback(identifier, callback) 53 | -------------------------------------------------------------------------------- /bayesquad/priors.py: -------------------------------------------------------------------------------- 1 | """Classes representing probability distributions, intended to be integrated against a likelihood.""" 2 | 3 | from abc import ABC, abstractmethod 4 | from typing import Tuple 5 | 6 | import numpy as np 7 | import scipy.stats 8 | from numpy import ndarray, newaxis 9 | 10 | from ._util import validate_dimensions 11 | 12 | 13 | class Prior(ABC): 14 | """A prior, providing methods for sampling, and for pointwise evaluation of the pdf and its derivatives.""" 15 | 16 | @abstractmethod 17 | def gradient(self, x: ndarray) -> Tuple[ndarray, ndarray]: 18 | """Compute the Jacobian and Hessian of the prior's pdf at the given set of points. 19 | 20 | Parameters 21 | ---------- 22 | x 23 | A 2D array of the points at which to evaluate the derivatives, with shape (num_points, num_dimensions). 24 | 25 | Returns 26 | ------- 27 | jacobian 28 | A 2D array of shape (num_points, num_dimensions), containing the value of the Jacobian at each point. 29 | hessian 30 | A 3D array of shape (num_points, num_dimensions, num_dimensions), whose (i, j, k)-th element is the 31 | (j, k)-th mixed partial derivative of the pdf at the i-th point of `x`. 32 | """ 33 | 34 | @abstractmethod 35 | def sample(self, num_points: int = 1) -> ndarray: 36 | """Sample `num_points` points independently from the prior. 37 | 38 | Returns 39 | ------- 40 | ndarray 41 | `num_points` samples from the prior, as a 2D array of shape (num_points, num_dimensions). 42 | """ 43 | 44 | @abstractmethod 45 | def logpdf(self, x: ndarray) -> ndarray: 46 | """Evaluate the prior's log pdf at the given set of points. 47 | 48 | Parameters 49 | ---------- 50 | x 51 | An array of shape (num_points, num_dimensions). 52 | 53 | Returns 54 | ------- 55 | ndarray 56 | A 1D array of shape (num_points). 57 | """ 58 | 59 | @abstractmethod 60 | def __call__(self, x: ndarray) -> ndarray: 61 | """Evaluate the prior's pdf at the given set of points. 62 | 63 | Parameters 64 | ---------- 65 | x 66 | An array of shape (num_points, num_dimensions). 67 | 68 | Returns 69 | ------- 70 | ndarray 71 | A 1D array of shape (num_points). 72 | """ 73 | 74 | 75 | class Gaussian(Prior): 76 | """A multivariate Gaussian prior. 77 | 78 | Parameters 79 | ---------- 80 | mean 81 | A 1D array of shape (num_dimensions). 82 | covariance 83 | A 2D array of shape (num_dimensions, num_dimensions). 84 | 85 | Attributes 86 | ---------- 87 | mean : ndarray 88 | A 1D array of shape (num_dimensions). 89 | covariance : ndarray 90 | A 2D array of shape (num_dimensions, num_dimensions). 91 | precision : ndarray 92 | The inverse of the covariance matrix. 93 | """ 94 | def __init__(self, mean: ndarray, covariance: ndarray): 95 | self.mean = mean 96 | self.covariance = covariance 97 | self.precision = np.linalg.inv(covariance) 98 | 99 | self._dimensions = np.size(mean) 100 | self._multivariate_normal = scipy.stats.multivariate_normal(mean=mean, cov=covariance) 101 | 102 | def sample(self, num_points: int = 1) -> ndarray: 103 | """See :func:`~Prior.sample`""" 104 | sample = self._multivariate_normal.rvs(size=num_points) 105 | 106 | if self._dimensions == 1: 107 | sample = sample[:, newaxis] 108 | 109 | return sample 110 | 111 | def gradient(self, x: ndarray) -> Tuple[ndarray, ndarray]: 112 | """See :func:`~Prior.gradient`""" 113 | validate_dimensions(x, self._dimensions) 114 | 115 | # The (i, j)-th element of this is (covariance^-1 (x_i - mean))_j, where x_i is the i-th point of x. 116 | cov_inv_x = np.einsum('jk,ik->ij', self.precision, x - self.mean, optimize=True) 117 | 118 | jacobian = -self(x)[:, newaxis] * cov_inv_x 119 | 120 | # The outer product of each row of cov_inv_x with itself. 121 | outer_products = cov_inv_x[:, newaxis, :] * cov_inv_x[:, :, newaxis] 122 | 123 | hessian = self(x)[:, newaxis, newaxis] * (outer_products - self.precision[newaxis, :, :]) 124 | 125 | return jacobian, hessian 126 | 127 | def logpdf(self, x: ndarray) -> ndarray: 128 | """See :func:`~Prior.logpdf`""" 129 | validate_dimensions(x, self._dimensions) 130 | return np.atleast_1d(self._multivariate_normal.logpdf(x)) 131 | 132 | def __call__(self, x: ndarray) -> ndarray: 133 | """See :func:`~Prior.__call__`""" 134 | validate_dimensions(x, self._dimensions) 135 | return np.atleast_1d(self._multivariate_normal.pdf(x)) 136 | -------------------------------------------------------------------------------- /bayesquad/quadrature.py: -------------------------------------------------------------------------------- 1 | """Provides a model of the integrand, with the capability to perform Bayesian quadrature.""" 2 | 3 | from typing import Tuple, Union, List 4 | 5 | import numpy as np 6 | from GPy.kern import Kern, RBF 7 | from multimethod import multimethod 8 | from numpy import ndarray, newaxis 9 | 10 | from ._decorators import flexible_array_dimensions 11 | from .gps import WarpedGP, WsabiLGP 12 | from ._maths_helpers import jacobian_of_f_squared_times_g, hessian_of_f_squared_times_g 13 | from .priors import Gaussian, Prior 14 | 15 | 16 | class IntegrandModel: 17 | """Represents the product of a warped Gaussian Process and a prior. 18 | 19 | Typically, this product is the function that we're interested in integrating.""" 20 | 21 | def __init__(self, warped_gp: WarpedGP, prior: Prior): 22 | self.warped_gp = warped_gp 23 | self.prior = prior 24 | self.dimensions = warped_gp.dimensions 25 | 26 | @flexible_array_dimensions 27 | def posterior_mean_and_variance(self, x: ndarray) -> Tuple[ndarray, ndarray]: 28 | """Get the posterior mean and variance of the product of warped GP and prior at a point, or a set of points. 29 | 30 | Parameters 31 | ---------- 32 | x 33 | The point(s) at which to evaluate the posterior mean and variance. A 2D array of shape 34 | (num_points, num_dimensions), or a 1D array of shape (num_dimensions). 35 | 36 | Returns 37 | ------- 38 | mean : ndarray 39 | A 1D array of shape (num_points) if the input was 2D, or a 0D array if the input was 1D. The :math:`i`-th 40 | element is the posterior mean at the :math:`i`-th point of `x`. 41 | variance : ndarray 42 | A 1D array of shape (num_points) if the input was 2D, or a 0D array if the input was 1D. The :math:`i`-th 43 | element is the posterior variance at the :math:`i`-th point of `x`. 44 | """ 45 | warped_gp_mean, warped_gp_variance = self.warped_gp.posterior_mean_and_variance(x) 46 | prior = self.prior(x) 47 | 48 | mean = warped_gp_mean * prior 49 | variance = warped_gp_variance * prior ** 2 50 | 51 | return mean, variance 52 | 53 | @flexible_array_dimensions 54 | def posterior_variance_jacobian(self, x: ndarray) -> ndarray: 55 | """Get the Jacobian of the posterior variance of the product of warped GP and prior at a point or set of points. 56 | 57 | Parameters 58 | ---------- 59 | x 60 | The point(s) at which to evaluate the Jacobian. A 2D array of shape (num_points, num_dimensions), or a 1D 61 | array of shape (num_dimensions). 62 | 63 | Returns 64 | ------- 65 | jacobian : ndarray 66 | A 2D array of shape (num_points, num_dimensions) if the input was 2D, or a 1D array of shape 67 | (num_dimensions) if the input was 1D. The :math:`(i, j)`-th element is the :math:`j`-th component of the 68 | Jacobian of the posterior variance at the :math:`i`-th point of `x`. 69 | 70 | Notes 71 | ----- 72 | Writing :math:`\\pi(x)` for the prior, and :math:`V(x)` for the posterior variance, the posterior variance of 73 | the product is :math:`\\pi(x)^2 V(x)`. 74 | """ 75 | _, gp_variance = self.warped_gp.posterior_mean_and_variance(x) 76 | gp_variance_jacobian = self.warped_gp.posterior_variance_jacobian(x) 77 | 78 | prior = self.prior(x) 79 | prior_jacobian, _ = self.prior.gradient(x) 80 | 81 | return jacobian_of_f_squared_times_g( 82 | f=prior, f_jacobian=prior_jacobian, 83 | g=gp_variance, g_jacobian=gp_variance_jacobian) 84 | 85 | @flexible_array_dimensions 86 | def posterior_variance_hessian(self, x: ndarray) -> ndarray: 87 | """Get the Hessian of the posterior variance of the product of warped GP and prior at a point, or set of points. 88 | 89 | Parameters 90 | ---------- 91 | x 92 | The point(s) at which to evaluate the Hessian. A 2D array of shape (num_points, num_dimensions), or a 1D 93 | array of shape (num_dimensions). 94 | 95 | Returns 96 | ------- 97 | hessian : ndarray 98 | A 3D array of shape (num_points, num_dimensions, num_dimensions) if the input was 2D, or a 2D array of shape 99 | (num_dimensions, num_dimensions) if the input was 1D. The :math:`(i, j, k)`-th element is the 100 | :math:`(j, k)`-th mixed partial derivative of the posterior variance at the :math:`i`-th point of `x`. 101 | 102 | Notes 103 | ----- 104 | Writing :math:`\\pi(x)` for the prior, and :math:`V(x)` for the posterior variance, the posterior variance of 105 | the product is :math:`\\pi(x)^2 V(x)`. 106 | """ 107 | _, gp_variance = self.warped_gp.posterior_mean_and_variance(x) 108 | gp_variance_jacobian = self.warped_gp.posterior_variance_jacobian(x) 109 | gp_variance_hessian = self.warped_gp.posterior_variance_hessian(x) 110 | 111 | prior = self.prior(x) 112 | prior_jacobian, prior_hessian = self.prior.gradient(x) 113 | 114 | return hessian_of_f_squared_times_g( 115 | f=prior, f_jacobian=prior_jacobian, f_hessian=prior_hessian, 116 | g=gp_variance, g_jacobian=gp_variance_jacobian, g_hessian=gp_variance_hessian) 117 | 118 | def update(self, x: ndarray, y: ndarray) -> None: 119 | """Add new data to the model. 120 | 121 | Parameters 122 | ---------- 123 | x 124 | A 2D array of shape (num_points, num_dimensions), or a 1D array of shape (num_dimensions). 125 | y 126 | A 1D array of shape (num_points). If X is 1D, this may also be a 0D array or float. 127 | 128 | Raises 129 | ------ 130 | ValueError 131 | If the number of points in `x` does not equal the number of points in `y`. 132 | """ 133 | self.warped_gp.update(x, y) 134 | 135 | def remove(self, x: Union[ndarray, List[ndarray]], y: Union[ndarray, List[ndarray]]) -> None: 136 | """Remove data from the model. 137 | 138 | Parameters 139 | ---------- 140 | x 141 | A 2D array of shape (num_points, num_dimensions), or a 1D array of shape (num_dimensions), or a list of such 142 | arrays. 143 | y 144 | A 1D array of shape (num_points), or a list of such arrays. If `x` is 1D, this may also be a 0D array or 145 | float. Must be of the same type as `x`. 146 | 147 | Raises 148 | ------ 149 | ValueError 150 | If the number of points in `x` does not equal the number of points in `y`. 151 | If `x` is an array and `y` is a list, or vice versa. 152 | """ 153 | self.warped_gp.remove(x, y) 154 | 155 | def integral_mean(self) -> float: 156 | """Compute the mean of the integral of the function under this model.""" 157 | return _compute_mean(self.prior, self.warped_gp, self.warped_gp.kernel) 158 | 159 | 160 | @multimethod 161 | def _compute_mean(prior: Prior, gp: WarpedGP, kernel: Kern) -> float: 162 | """Compute the mean of the integral for the given prior, warped GP, and kernel. 163 | 164 | This method will delegate to other methods of the same name defined in this module, based on the type of the 165 | arguments. If no implementation is found for the provided types, this default implementation will raise an error.""" 166 | raise NotImplementedError("Integration is not supported for this combination of prior, warping and kernel.\n\n" 167 | "Prior was of type {}.\n" 168 | "Warped GP was of type {}.\n" 169 | "Kernel was of type {}." 170 | .format(type(prior), type(gp), type(kernel))) 171 | 172 | 173 | @multimethod 174 | def _compute_mean(prior: Gaussian, gp: WsabiLGP, kernel: RBF) -> float: 175 | """Compute the mean of the integral for a WSABI-L GP with a squared exponential kernel against a Gaussian prior.""" 176 | underlying_gp = gp.underlying_gp 177 | 178 | dimensions = gp.dimensions 179 | 180 | alpha = gp.alpha 181 | kernel_lengthscale = kernel.lengthscale.values[0] 182 | kernel_variance = kernel.variance.values[0] 183 | 184 | X_D = underlying_gp.X 185 | 186 | mu = prior.mean 187 | sigma = prior.covariance 188 | sigma_inv = prior.precision 189 | 190 | nu = (X_D[:, newaxis, :] + X_D[newaxis, :, :]) / 2 191 | A = underlying_gp.K_inv_Y 192 | 193 | L = np.exp(-(np.linalg.norm(X_D[:, newaxis, :] - X_D[newaxis, :, :], axis=2) ** 2)/(4 * kernel_lengthscale**2)) 194 | L = kernel_variance ** 2 * L 195 | L = np.linalg.det(2 * np.pi * sigma) ** (-1/2) * L 196 | 197 | C = sigma_inv + 2 * np.eye(dimensions) / kernel_lengthscale ** 2 198 | 199 | C_inv = np.linalg.inv(C) 200 | gamma_part = 2 * nu / kernel_lengthscale ** 2 + (sigma_inv @ mu)[newaxis, newaxis, :] 201 | 202 | gamma = np.einsum('kl,ijl->ijk', C_inv, gamma_part) 203 | 204 | k_1 = 2 * np.einsum('ijk,ijk->ij', nu, nu) / kernel_lengthscale ** 2 205 | k_2 = mu.T @ sigma_inv @ mu 206 | k_3 = np.einsum('ijk,kl,ijl->ij', gamma, C, gamma) 207 | 208 | k = k_1 + k_2 - k_3 209 | 210 | K = np.exp(-k/2) 211 | 212 | integral_mean = alpha + (np.linalg.det(2 * np.pi * np.linalg.inv(C)) ** 0.5)/2 * (A.T @ (K * L) @ A) 213 | 214 | return integral_mean.item() 215 | -------------------------------------------------------------------------------- /docs/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/.nojekyll -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = BatchBayesianquadrature 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/build/doctrees/bayesquad.acquisition_functions.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/doctrees/bayesquad.acquisition_functions.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/bayesquad.batch_selection.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/doctrees/bayesquad.batch_selection.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/bayesquad.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/doctrees/bayesquad.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/bayesquad.gps.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/doctrees/bayesquad.gps.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/bayesquad.plotting.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/doctrees/bayesquad.plotting.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/bayesquad.priors.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/doctrees/bayesquad.priors.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/bayesquad.quadrature.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/doctrees/bayesquad.quadrature.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/environment.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/doctrees/environment.pickle -------------------------------------------------------------------------------- /docs/build/doctrees/index.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/doctrees/index.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/modules.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/doctrees/modules.doctree -------------------------------------------------------------------------------- /docs/build/html/.buildinfo: -------------------------------------------------------------------------------- 1 | # Sphinx build info version 1 2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. 3 | config: 36f55e483e252199e5ae2354fe16e6a8 4 | tags: 645f666f9bcd5a90fca523b33c5a78b7 5 | -------------------------------------------------------------------------------- /docs/build/html/_modules/bayesquad/acquisition_functions.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | bayesquad.acquisition_functions — Batch Bayesian quadrature documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 33 | 34 |
35 |
36 |
37 |
38 | 39 |

Source code for bayesquad.acquisition_functions

 40 | """Acquisition functions and related functions."""
 41 | import numpy as np
 42 | 
 43 | from .plotting import plottable
 44 | from .quadrature import IntegrandModel
 45 | 
 46 | 
 47 | 
[docs]def model_variance(integrand_model: IntegrandModel): 48 | 49 | @plottable("Model variance", default_plotting_parameters={'calculate_jacobian': False}) 50 | def f(x, *, calculate_jacobian=True): 51 | """Evaluate the variance, and the Jacobian of the variance, for the given `IntegrandModel` at a point, or a set 52 | of points. 53 | 54 | Given an array of shape (num_points, num_dimensions), returns an array of shape (num_points) containing the 55 | function values and an array of shape (num_points, num_dimensions) containing the function Jacobians. 56 | 57 | Given an array of shape (num_dimensions), returns a 0D array containing the function value and an array of shape 58 | (num_dimensions) containing the function Jacobian. 59 | 60 | If the Jacobian is not required (e.g. for plotting), the relevant calculations can be disabled by setting 61 | `calculate_jacobian=False`. 62 | """ 63 | _, variance = integrand_model.posterior_mean_and_variance(x) 64 | 65 | if calculate_jacobian: 66 | variance_jacobian = integrand_model.posterior_variance_jacobian(x) 67 | else: 68 | variance_jacobian = None 69 | 70 | return variance, variance_jacobian 71 | 72 | return f
73 | 74 | 75 |
[docs]def model_variance_norm_of_gradient_squared(integrand_model: IntegrandModel): 76 | 77 | @plottable("Gradient squared", default_plotting_parameters={'calculate_jacobian': False}) 78 | def f(x, *, calculate_jacobian=True): 79 | """Evaluate the squared norm of the gradient of the variance, and the Jacobian of this quantity, for the given 80 | `IntegrandModel` at a point, or a set of points. 81 | 82 | Given an array of shape (num_points, num_dimensions), returns an array of shape (num_points) containing the 83 | function values and an array of shape (num_points, num_dimensions) containing the function Jacobians. 84 | 85 | Given an array of shape (num_dimensions), returns a 0D array containing the function value and an array of shape 86 | (num_dimensions) containing the function Jacobian. 87 | 88 | If the Jacobian is not required (e.g. for plotting), the relevant calculations can be disabled by setting 89 | `calculate_jacobian=False`. 90 | """ 91 | variance_jacobian = integrand_model.posterior_variance_jacobian(x) 92 | 93 | # Inner product of the Jacobian with itself, for each point. 94 | gradient_squared = np.einsum('...i,...i->...', variance_jacobian, variance_jacobian, optimize=True) 95 | 96 | if calculate_jacobian: 97 | variance_hessian = integrand_model.posterior_variance_hessian(x) 98 | 99 | # Matrix product of Hessian and Jacobian, for each point. 100 | gradient_squared_jacobian = 2 * np.einsum('...ij,...j->...i', 101 | variance_hessian, 102 | variance_jacobian, 103 | optimize=True) 104 | else: 105 | gradient_squared_jacobian = None 106 | 107 | return gradient_squared, gradient_squared_jacobian 108 | 109 | return f
110 |
111 | 112 |
113 |
114 |
115 | 131 |
132 |
133 | 146 | 150 | 151 | -------------------------------------------------------------------------------- /docs/build/html/_modules/bayesquad/plotting.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | bayesquad.plotting — Batch Bayesian quadrature documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 33 | 34 |
35 |
36 |
37 |
38 | 39 |

Source code for bayesquad.plotting

 40 | """Functions to allow plotting code to be decoupled from the rest of the code."""
 41 | 
 42 | from functools import wraps
 43 | from types import MappingProxyType
 44 | from typing import Callable
 45 | 
 46 | 
 47 | class _Registry:
 48 |     def __init__(self):
 49 |         self._callback_registry = {}
 50 | 
 51 |     def trigger_callbacks(self, identifier: str, func: Callable):
 52 |         if identifier not in self._callback_registry:
 53 |             return
 54 | 
 55 |         for callback in self._callback_registry[identifier]:
 56 |             callback(func)
 57 | 
 58 |     def add_callback(self, identifier: str, callback: Callable):
 59 |         if identifier not in self._callback_registry:
 60 |             self._callback_registry[identifier] = []
 61 | 
 62 |         self._callback_registry[identifier].append(callback)
 63 | 
 64 | 
 65 | _function_registry = _Registry()
 66 | 
 67 | # Using a mutable object (e.g. an empty dict) as a default parameter can lead to undesirable behaviour, so we use this
 68 | # read-only proxy.
 69 | #
 70 | # See:
 71 | #   The problem: https://stackoverflow.com/q/1132941
 72 | #   A solution:  https://stackoverflow.com/a/30638022
 73 | _EMPTY = MappingProxyType({})
 74 | 
 75 | 
 76 | 
[docs]def plottable(identifier: str, *, default_plotting_parameters=_EMPTY): 77 | def decorator(func: Callable): 78 | @wraps(func) 79 | def func_for_plotting(*args, **kwargs): 80 | # Merge default_plotting_parameters into kwargs 81 | kwargs = {**default_plotting_parameters, **kwargs} 82 | return func(*args, **kwargs) 83 | 84 | _function_registry.trigger_callbacks(identifier, func_for_plotting) 85 | 86 | return func 87 | return decorator
88 | 89 | 90 |
[docs]def add_callback(identifier: str, callback: Callable): 91 | _function_registry.add_callback(identifier, callback)
92 |
93 | 94 |
95 |
96 |
97 | 113 |
114 |
115 | 128 | 132 | 133 | -------------------------------------------------------------------------------- /docs/build/html/_modules/index.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | Overview: module code — Batch Bayesian quadrature documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 32 | 33 |
34 |
35 |
36 |
37 | 38 |

All modules for which code is available

39 | 46 | 47 |
48 |
49 |
50 | 66 |
67 |
68 | 80 | 84 | 85 | -------------------------------------------------------------------------------- /docs/build/html/_sources/bayesquad.acquisition_functions.rst.txt: -------------------------------------------------------------------------------- 1 | bayesquad.acquisition\_functions module 2 | ======================================= 3 | 4 | .. automodule:: bayesquad.acquisition_functions 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/build/html/_sources/bayesquad.batch_selection.rst.txt: -------------------------------------------------------------------------------- 1 | bayesquad.batch\_selection module 2 | ================================= 3 | 4 | .. automodule:: bayesquad.batch_selection 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/build/html/_sources/bayesquad.gps.rst.txt: -------------------------------------------------------------------------------- 1 | bayesquad.gps module 2 | ==================== 3 | 4 | .. automodule:: bayesquad.gps 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/build/html/_sources/bayesquad.plotting.rst.txt: -------------------------------------------------------------------------------- 1 | bayesquad.plotting module 2 | ========================= 3 | 4 | .. automodule:: bayesquad.plotting 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/build/html/_sources/bayesquad.priors.rst.txt: -------------------------------------------------------------------------------- 1 | bayesquad.priors module 2 | ======================= 3 | 4 | .. automodule:: bayesquad.priors 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/build/html/_sources/bayesquad.quadrature.rst.txt: -------------------------------------------------------------------------------- 1 | bayesquad.quadrature module 2 | =========================== 3 | 4 | .. automodule:: bayesquad.quadrature 5 | :members: 6 | :private-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/build/html/_sources/bayesquad.rst.txt: -------------------------------------------------------------------------------- 1 | bayesquad package 2 | ================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | .. toctree:: 8 | 9 | bayesquad.acquisition_functions 10 | bayesquad.batch_selection 11 | bayesquad.gps 12 | bayesquad.plotting 13 | bayesquad.priors 14 | bayesquad.quadrature 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: bayesquad 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /docs/build/html/_sources/index.rst.txt: -------------------------------------------------------------------------------- 1 | .. Batch Bayesian quadrature documentation master file, created by 2 | sphinx-quickstart on Fri Aug 17 15:43:05 2018. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to Batch Bayesian quadrature's documentation! 7 | ===================================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | 14 | 15 | Indices and tables 16 | ================== 17 | 18 | * :ref:`genindex` 19 | * :ref:`modindex` 20 | * :ref:`search` 21 | -------------------------------------------------------------------------------- /docs/build/html/_sources/modules.rst.txt: -------------------------------------------------------------------------------- 1 | bayesquad 2 | ========= 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | bayesquad 8 | -------------------------------------------------------------------------------- /docs/build/html/_static/ajax-loader.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/html/_static/ajax-loader.gif -------------------------------------------------------------------------------- /docs/build/html/_static/classic.css: -------------------------------------------------------------------------------- 1 | /* 2 | * classic.css_t 3 | * ~~~~~~~~~~~~~ 4 | * 5 | * Sphinx stylesheet -- classic theme. 6 | * 7 | * :copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | @import url("basic.css"); 13 | 14 | /* -- page layout ----------------------------------------------------------- */ 15 | 16 | body { 17 | font-family: sans-serif; 18 | font-size: 100%; 19 | background-color: #11303d; 20 | color: #000; 21 | margin: 0; 22 | padding: 0; 23 | } 24 | 25 | div.document { 26 | background-color: #1c4e63; 27 | } 28 | 29 | div.documentwrapper { 30 | float: left; 31 | width: 100%; 32 | } 33 | 34 | div.bodywrapper { 35 | margin: 0 0 0 230px; 36 | } 37 | 38 | div.body { 39 | background-color: #ffffff; 40 | color: #000000; 41 | padding: 0 20px 30px 20px; 42 | } 43 | 44 | div.footer { 45 | color: #ffffff; 46 | width: 100%; 47 | padding: 9px 0 9px 0; 48 | text-align: center; 49 | font-size: 75%; 50 | } 51 | 52 | div.footer a { 53 | color: #ffffff; 54 | text-decoration: underline; 55 | } 56 | 57 | div.related { 58 | background-color: #133f52; 59 | line-height: 30px; 60 | color: #ffffff; 61 | } 62 | 63 | div.related a { 64 | color: #ffffff; 65 | } 66 | 67 | div.sphinxsidebar { 68 | } 69 | 70 | div.sphinxsidebar h3 { 71 | font-family: 'Trebuchet MS', sans-serif; 72 | color: #ffffff; 73 | font-size: 1.4em; 74 | font-weight: normal; 75 | margin: 0; 76 | padding: 0; 77 | } 78 | 79 | div.sphinxsidebar h3 a { 80 | color: #ffffff; 81 | } 82 | 83 | div.sphinxsidebar h4 { 84 | font-family: 'Trebuchet MS', sans-serif; 85 | color: #ffffff; 86 | font-size: 1.3em; 87 | font-weight: normal; 88 | margin: 5px 0 0 0; 89 | padding: 0; 90 | } 91 | 92 | div.sphinxsidebar p { 93 | color: #ffffff; 94 | } 95 | 96 | div.sphinxsidebar p.topless { 97 | margin: 5px 10px 10px 10px; 98 | } 99 | 100 | div.sphinxsidebar ul { 101 | margin: 10px; 102 | padding: 0; 103 | color: #ffffff; 104 | } 105 | 106 | div.sphinxsidebar a { 107 | color: #98dbcc; 108 | } 109 | 110 | div.sphinxsidebar input { 111 | border: 1px solid #98dbcc; 112 | font-family: sans-serif; 113 | font-size: 1em; 114 | } 115 | 116 | 117 | 118 | /* -- hyperlink styles ------------------------------------------------------ */ 119 | 120 | a { 121 | color: #355f7c; 122 | text-decoration: none; 123 | } 124 | 125 | a:visited { 126 | color: #355f7c; 127 | text-decoration: none; 128 | } 129 | 130 | a:hover { 131 | text-decoration: underline; 132 | } 133 | 134 | 135 | 136 | /* -- body styles ----------------------------------------------------------- */ 137 | 138 | div.body h1, 139 | div.body h2, 140 | div.body h3, 141 | div.body h4, 142 | div.body h5, 143 | div.body h6 { 144 | font-family: 'Trebuchet MS', sans-serif; 145 | background-color: #f2f2f2; 146 | font-weight: normal; 147 | color: #20435c; 148 | border-bottom: 1px solid #ccc; 149 | margin: 20px -20px 10px -20px; 150 | padding: 3px 0 3px 10px; 151 | } 152 | 153 | div.body h1 { margin-top: 0; font-size: 200%; } 154 | div.body h2 { font-size: 160%; } 155 | div.body h3 { font-size: 140%; } 156 | div.body h4 { font-size: 120%; } 157 | div.body h5 { font-size: 110%; } 158 | div.body h6 { font-size: 100%; } 159 | 160 | a.headerlink { 161 | color: #c60f0f; 162 | font-size: 0.8em; 163 | padding: 0 4px 0 4px; 164 | text-decoration: none; 165 | } 166 | 167 | a.headerlink:hover { 168 | background-color: #c60f0f; 169 | color: white; 170 | } 171 | 172 | div.body p, div.body dd, div.body li, div.body blockquote { 173 | text-align: justify; 174 | line-height: 130%; 175 | } 176 | 177 | div.admonition p.admonition-title + p { 178 | display: inline; 179 | } 180 | 181 | div.admonition p { 182 | margin-bottom: 5px; 183 | } 184 | 185 | div.admonition pre { 186 | margin-bottom: 5px; 187 | } 188 | 189 | div.admonition ul, div.admonition ol { 190 | margin-bottom: 5px; 191 | } 192 | 193 | div.note { 194 | background-color: #eee; 195 | border: 1px solid #ccc; 196 | } 197 | 198 | div.seealso { 199 | background-color: #ffc; 200 | border: 1px solid #ff6; 201 | } 202 | 203 | div.topic { 204 | background-color: #eee; 205 | } 206 | 207 | div.warning { 208 | background-color: #ffe4e4; 209 | border: 1px solid #f66; 210 | } 211 | 212 | p.admonition-title { 213 | display: inline; 214 | } 215 | 216 | p.admonition-title:after { 217 | content: ":"; 218 | } 219 | 220 | pre { 221 | padding: 5px; 222 | background-color: #eeffcc; 223 | color: #333333; 224 | line-height: 120%; 225 | border: 1px solid #ac9; 226 | border-left: none; 227 | border-right: none; 228 | } 229 | 230 | code { 231 | background-color: #ecf0f3; 232 | padding: 0 1px 0 1px; 233 | font-size: 0.95em; 234 | } 235 | 236 | th { 237 | background-color: #ede; 238 | } 239 | 240 | .warning code { 241 | background: #efc2c2; 242 | } 243 | 244 | .note code { 245 | background: #d6d6d6; 246 | } 247 | 248 | .viewcode-back { 249 | font-family: sans-serif; 250 | } 251 | 252 | div.viewcode-block:target { 253 | background-color: #f4debf; 254 | border-top: 1px solid #ac9; 255 | border-bottom: 1px solid #ac9; 256 | } 257 | 258 | div.code-block-caption { 259 | color: #efefef; 260 | background-color: #1c4e63; 261 | } -------------------------------------------------------------------------------- /docs/build/html/_static/comment-bright.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/html/_static/comment-bright.png -------------------------------------------------------------------------------- /docs/build/html/_static/comment-close.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/html/_static/comment-close.png -------------------------------------------------------------------------------- /docs/build/html/_static/comment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/html/_static/comment.png -------------------------------------------------------------------------------- /docs/build/html/_static/doctools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * doctools.js 3 | * ~~~~~~~~~~~ 4 | * 5 | * Sphinx JavaScript utilities for all documentation. 6 | * 7 | * :copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /** 13 | * select a different prefix for underscore 14 | */ 15 | $u = _.noConflict(); 16 | 17 | /** 18 | * make the code below compatible with browsers without 19 | * an installed firebug like debugger 20 | if (!window.console || !console.firebug) { 21 | var names = ["log", "debug", "info", "warn", "error", "assert", "dir", 22 | "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", 23 | "profile", "profileEnd"]; 24 | window.console = {}; 25 | for (var i = 0; i < names.length; ++i) 26 | window.console[names[i]] = function() {}; 27 | } 28 | */ 29 | 30 | /** 31 | * small helper function to urldecode strings 32 | */ 33 | jQuery.urldecode = function(x) { 34 | return decodeURIComponent(x).replace(/\+/g, ' '); 35 | }; 36 | 37 | /** 38 | * small helper function to urlencode strings 39 | */ 40 | jQuery.urlencode = encodeURIComponent; 41 | 42 | /** 43 | * This function returns the parsed url parameters of the 44 | * current request. Multiple values per key are supported, 45 | * it will always return arrays of strings for the value parts. 46 | */ 47 | jQuery.getQueryParameters = function(s) { 48 | if (typeof s === 'undefined') 49 | s = document.location.search; 50 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 51 | var result = {}; 52 | for (var i = 0; i < parts.length; i++) { 53 | var tmp = parts[i].split('=', 2); 54 | var key = jQuery.urldecode(tmp[0]); 55 | var value = jQuery.urldecode(tmp[1]); 56 | if (key in result) 57 | result[key].push(value); 58 | else 59 | result[key] = [value]; 60 | } 61 | return result; 62 | }; 63 | 64 | /** 65 | * highlight a given string on a jquery object by wrapping it in 66 | * span elements with the given class name. 67 | */ 68 | jQuery.fn.highlightText = function(text, className) { 69 | function highlight(node, addItems) { 70 | if (node.nodeType === 3) { 71 | var val = node.nodeValue; 72 | var pos = val.toLowerCase().indexOf(text); 73 | if (pos >= 0 && 74 | !jQuery(node.parentNode).hasClass(className) && 75 | !jQuery(node.parentNode).hasClass("nohighlight")) { 76 | var span; 77 | var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); 78 | if (isInSVG) { 79 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 80 | } else { 81 | span = document.createElement("span"); 82 | span.className = className; 83 | } 84 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 85 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 86 | document.createTextNode(val.substr(pos + text.length)), 87 | node.nextSibling)); 88 | node.nodeValue = val.substr(0, pos); 89 | if (isInSVG) { 90 | var bbox = span.getBBox(); 91 | var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); 92 | rect.x.baseVal.value = bbox.x; 93 | rect.y.baseVal.value = bbox.y; 94 | rect.width.baseVal.value = bbox.width; 95 | rect.height.baseVal.value = bbox.height; 96 | rect.setAttribute('class', className); 97 | var parentOfText = node.parentNode.parentNode; 98 | addItems.push({ 99 | "parent": node.parentNode, 100 | "target": rect}); 101 | } 102 | } 103 | } 104 | else if (!jQuery(node).is("button, select, textarea")) { 105 | jQuery.each(node.childNodes, function() { 106 | highlight(this, addItems); 107 | }); 108 | } 109 | } 110 | var addItems = []; 111 | var result = this.each(function() { 112 | highlight(this, addItems); 113 | }); 114 | for (var i = 0; i < addItems.length; ++i) { 115 | jQuery(addItems[i].parent).before(addItems[i].target); 116 | } 117 | return result; 118 | }; 119 | 120 | /* 121 | * backward compatibility for jQuery.browser 122 | * This will be supported until firefox bug is fixed. 123 | */ 124 | if (!jQuery.browser) { 125 | jQuery.uaMatch = function(ua) { 126 | ua = ua.toLowerCase(); 127 | 128 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || 129 | /(webkit)[ \/]([\w.]+)/.exec(ua) || 130 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || 131 | /(msie) ([\w.]+)/.exec(ua) || 132 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || 133 | []; 134 | 135 | return { 136 | browser: match[ 1 ] || "", 137 | version: match[ 2 ] || "0" 138 | }; 139 | }; 140 | jQuery.browser = {}; 141 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; 142 | } 143 | 144 | /** 145 | * Small JavaScript module for the documentation. 146 | */ 147 | var Documentation = { 148 | 149 | init : function() { 150 | this.fixFirefoxAnchorBug(); 151 | this.highlightSearchWords(); 152 | this.initIndexTable(); 153 | 154 | }, 155 | 156 | /** 157 | * i18n support 158 | */ 159 | TRANSLATIONS : {}, 160 | PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; }, 161 | LOCALE : 'unknown', 162 | 163 | // gettext and ngettext don't access this so that the functions 164 | // can safely bound to a different name (_ = Documentation.gettext) 165 | gettext : function(string) { 166 | var translated = Documentation.TRANSLATIONS[string]; 167 | if (typeof translated === 'undefined') 168 | return string; 169 | return (typeof translated === 'string') ? translated : translated[0]; 170 | }, 171 | 172 | ngettext : function(singular, plural, n) { 173 | var translated = Documentation.TRANSLATIONS[singular]; 174 | if (typeof translated === 'undefined') 175 | return (n == 1) ? singular : plural; 176 | return translated[Documentation.PLURALEXPR(n)]; 177 | }, 178 | 179 | addTranslations : function(catalog) { 180 | for (var key in catalog.messages) 181 | this.TRANSLATIONS[key] = catalog.messages[key]; 182 | this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); 183 | this.LOCALE = catalog.locale; 184 | }, 185 | 186 | /** 187 | * add context elements like header anchor links 188 | */ 189 | addContextElements : function() { 190 | $('div[id] > :header:first').each(function() { 191 | $('\u00B6'). 192 | attr('href', '#' + this.id). 193 | attr('title', _('Permalink to this headline')). 194 | appendTo(this); 195 | }); 196 | $('dt[id]').each(function() { 197 | $('\u00B6'). 198 | attr('href', '#' + this.id). 199 | attr('title', _('Permalink to this definition')). 200 | appendTo(this); 201 | }); 202 | }, 203 | 204 | /** 205 | * workaround a firefox stupidity 206 | * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 207 | */ 208 | fixFirefoxAnchorBug : function() { 209 | if (document.location.hash && $.browser.mozilla) 210 | window.setTimeout(function() { 211 | document.location.href += ''; 212 | }, 10); 213 | }, 214 | 215 | /** 216 | * highlight the search words provided in the url in the text 217 | */ 218 | highlightSearchWords : function() { 219 | var params = $.getQueryParameters(); 220 | var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; 221 | if (terms.length) { 222 | var body = $('div.body'); 223 | if (!body.length) { 224 | body = $('body'); 225 | } 226 | window.setTimeout(function() { 227 | $.each(terms, function() { 228 | body.highlightText(this.toLowerCase(), 'highlighted'); 229 | }); 230 | }, 10); 231 | $('') 233 | .appendTo($('#searchbox')); 234 | } 235 | }, 236 | 237 | /** 238 | * init the domain index toggle buttons 239 | */ 240 | initIndexTable : function() { 241 | var togglers = $('img.toggler').click(function() { 242 | var src = $(this).attr('src'); 243 | var idnum = $(this).attr('id').substr(7); 244 | $('tr.cg-' + idnum).toggle(); 245 | if (src.substr(-9) === 'minus.png') 246 | $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); 247 | else 248 | $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); 249 | }).css('display', ''); 250 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { 251 | togglers.click(); 252 | } 253 | }, 254 | 255 | /** 256 | * helper function to hide the search marks again 257 | */ 258 | hideSearchWords : function() { 259 | $('#searchbox .highlight-link').fadeOut(300); 260 | $('span.highlighted').removeClass('highlighted'); 261 | }, 262 | 263 | /** 264 | * make the url absolute 265 | */ 266 | makeURL : function(relativeURL) { 267 | return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; 268 | }, 269 | 270 | /** 271 | * get the current relative url 272 | */ 273 | getCurrentURL : function() { 274 | var path = document.location.pathname; 275 | var parts = path.split(/\//); 276 | $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { 277 | if (this === '..') 278 | parts.pop(); 279 | }); 280 | var url = parts.join('/'); 281 | return path.substring(url.lastIndexOf('/') + 1, path.length - 1); 282 | }, 283 | 284 | initOnKeyListeners: function() { 285 | $(document).keyup(function(event) { 286 | var activeElementType = document.activeElement.tagName; 287 | // don't navigate when in search box or textarea 288 | if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT') { 289 | switch (event.keyCode) { 290 | case 37: // left 291 | var prevHref = $('link[rel="prev"]').prop('href'); 292 | if (prevHref) { 293 | window.location.href = prevHref; 294 | return false; 295 | } 296 | case 39: // right 297 | var nextHref = $('link[rel="next"]').prop('href'); 298 | if (nextHref) { 299 | window.location.href = nextHref; 300 | return false; 301 | } 302 | } 303 | } 304 | }); 305 | } 306 | }; 307 | 308 | // quick alias for translations 309 | _ = Documentation.gettext; 310 | 311 | $(document).ready(function() { 312 | Documentation.init(); 313 | }); -------------------------------------------------------------------------------- /docs/build/html/_static/documentation_options.js: -------------------------------------------------------------------------------- 1 | var DOCUMENTATION_OPTIONS = { 2 | URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), 3 | VERSION: '', 4 | LANGUAGE: 'None', 5 | COLLAPSE_INDEX: false, 6 | FILE_SUFFIX: '.html', 7 | HAS_SOURCE: true, 8 | SOURCELINK_SUFFIX: '.txt' 9 | }; -------------------------------------------------------------------------------- /docs/build/html/_static/down-pressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/html/_static/down-pressed.png -------------------------------------------------------------------------------- /docs/build/html/_static/down.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/html/_static/down.png -------------------------------------------------------------------------------- /docs/build/html/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/html/_static/file.png -------------------------------------------------------------------------------- /docs/build/html/_static/minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/html/_static/minus.png -------------------------------------------------------------------------------- /docs/build/html/_static/plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/html/_static/plus.png -------------------------------------------------------------------------------- /docs/build/html/_static/pygments.css: -------------------------------------------------------------------------------- 1 | .highlight .hll { background-color: #ffffcc } 2 | .highlight { background: #eeffcc; } 3 | .highlight .c { color: #408090; font-style: italic } /* Comment */ 4 | .highlight .err { border: 1px solid #FF0000 } /* Error */ 5 | .highlight .k { color: #007020; font-weight: bold } /* Keyword */ 6 | .highlight .o { color: #666666 } /* Operator */ 7 | .highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */ 8 | .highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ 9 | .highlight .cp { color: #007020 } /* Comment.Preproc */ 10 | .highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */ 11 | .highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ 12 | .highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ 13 | .highlight .gd { color: #A00000 } /* Generic.Deleted */ 14 | .highlight .ge { font-style: italic } /* Generic.Emph */ 15 | .highlight .gr { color: #FF0000 } /* Generic.Error */ 16 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ 17 | .highlight .gi { color: #00A000 } /* Generic.Inserted */ 18 | .highlight .go { color: #333333 } /* Generic.Output */ 19 | .highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ 20 | .highlight .gs { font-weight: bold } /* Generic.Strong */ 21 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ 22 | .highlight .gt { color: #0044DD } /* Generic.Traceback */ 23 | .highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ 24 | .highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ 25 | .highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ 26 | .highlight .kp { color: #007020 } /* Keyword.Pseudo */ 27 | .highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ 28 | .highlight .kt { color: #902000 } /* Keyword.Type */ 29 | .highlight .m { color: #208050 } /* Literal.Number */ 30 | .highlight .s { color: #4070a0 } /* Literal.String */ 31 | .highlight .na { color: #4070a0 } /* Name.Attribute */ 32 | .highlight .nb { color: #007020 } /* Name.Builtin */ 33 | .highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ 34 | .highlight .no { color: #60add5 } /* Name.Constant */ 35 | .highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ 36 | .highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ 37 | .highlight .ne { color: #007020 } /* Name.Exception */ 38 | .highlight .nf { color: #06287e } /* Name.Function */ 39 | .highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ 40 | .highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ 41 | .highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ 42 | .highlight .nv { color: #bb60d5 } /* Name.Variable */ 43 | .highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ 44 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */ 45 | .highlight .mb { color: #208050 } /* Literal.Number.Bin */ 46 | .highlight .mf { color: #208050 } /* Literal.Number.Float */ 47 | .highlight .mh { color: #208050 } /* Literal.Number.Hex */ 48 | .highlight .mi { color: #208050 } /* Literal.Number.Integer */ 49 | .highlight .mo { color: #208050 } /* Literal.Number.Oct */ 50 | .highlight .sa { color: #4070a0 } /* Literal.String.Affix */ 51 | .highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ 52 | .highlight .sc { color: #4070a0 } /* Literal.String.Char */ 53 | .highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */ 54 | .highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ 55 | .highlight .s2 { color: #4070a0 } /* Literal.String.Double */ 56 | .highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ 57 | .highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ 58 | .highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ 59 | .highlight .sx { color: #c65d09 } /* Literal.String.Other */ 60 | .highlight .sr { color: #235388 } /* Literal.String.Regex */ 61 | .highlight .s1 { color: #4070a0 } /* Literal.String.Single */ 62 | .highlight .ss { color: #517918 } /* Literal.String.Symbol */ 63 | .highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ 64 | .highlight .fm { color: #06287e } /* Name.Function.Magic */ 65 | .highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ 66 | .highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ 67 | .highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ 68 | .highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */ 69 | .highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /docs/build/html/_static/sidebar.js: -------------------------------------------------------------------------------- 1 | /* 2 | * sidebar.js 3 | * ~~~~~~~~~~ 4 | * 5 | * This script makes the Sphinx sidebar collapsible. 6 | * 7 | * .sphinxsidebar contains .sphinxsidebarwrapper. This script adds 8 | * in .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton 9 | * used to collapse and expand the sidebar. 10 | * 11 | * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden 12 | * and the width of the sidebar and the margin-left of the document 13 | * are decreased. When the sidebar is expanded the opposite happens. 14 | * This script saves a per-browser/per-session cookie used to 15 | * remember the position of the sidebar among the pages. 16 | * Once the browser is closed the cookie is deleted and the position 17 | * reset to the default (expanded). 18 | * 19 | * :copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS. 20 | * :license: BSD, see LICENSE for details. 21 | * 22 | */ 23 | 24 | $(function() { 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | // global elements used by the functions. 34 | // the 'sidebarbutton' element is defined as global after its 35 | // creation, in the add_sidebar_button function 36 | var bodywrapper = $('.bodywrapper'); 37 | var sidebar = $('.sphinxsidebar'); 38 | var sidebarwrapper = $('.sphinxsidebarwrapper'); 39 | 40 | // for some reason, the document has no sidebar; do not run into errors 41 | if (!sidebar.length) return; 42 | 43 | // original margin-left of the bodywrapper and width of the sidebar 44 | // with the sidebar expanded 45 | var bw_margin_expanded = bodywrapper.css('margin-left'); 46 | var ssb_width_expanded = sidebar.width(); 47 | 48 | // margin-left of the bodywrapper and width of the sidebar 49 | // with the sidebar collapsed 50 | var bw_margin_collapsed = '.8em'; 51 | var ssb_width_collapsed = '.8em'; 52 | 53 | // colors used by the current theme 54 | var dark_color = $('.related').css('background-color'); 55 | var light_color = $('.document').css('background-color'); 56 | 57 | function sidebar_is_collapsed() { 58 | return sidebarwrapper.is(':not(:visible)'); 59 | } 60 | 61 | function toggle_sidebar() { 62 | if (sidebar_is_collapsed()) 63 | expand_sidebar(); 64 | else 65 | collapse_sidebar(); 66 | } 67 | 68 | function collapse_sidebar() { 69 | sidebarwrapper.hide(); 70 | sidebar.css('width', ssb_width_collapsed); 71 | bodywrapper.css('margin-left', bw_margin_collapsed); 72 | sidebarbutton.css({ 73 | 'margin-left': '0', 74 | 'height': bodywrapper.height() 75 | }); 76 | sidebarbutton.find('span').text('»'); 77 | sidebarbutton.attr('title', _('Expand sidebar')); 78 | document.cookie = 'sidebar=collapsed'; 79 | } 80 | 81 | function expand_sidebar() { 82 | bodywrapper.css('margin-left', bw_margin_expanded); 83 | sidebar.css('width', ssb_width_expanded); 84 | sidebarwrapper.show(); 85 | sidebarbutton.css({ 86 | 'margin-left': ssb_width_expanded-12, 87 | 'height': bodywrapper.height() 88 | }); 89 | sidebarbutton.find('span').text('«'); 90 | sidebarbutton.attr('title', _('Collapse sidebar')); 91 | document.cookie = 'sidebar=expanded'; 92 | } 93 | 94 | function add_sidebar_button() { 95 | sidebarwrapper.css({ 96 | 'float': 'left', 97 | 'margin-right': '0', 98 | 'width': ssb_width_expanded - 28 99 | }); 100 | // create the button 101 | sidebar.append( 102 | '
«
' 103 | ); 104 | var sidebarbutton = $('#sidebarbutton'); 105 | light_color = sidebarbutton.css('background-color'); 106 | // find the height of the viewport to center the '<<' in the page 107 | var viewport_height; 108 | if (window.innerHeight) 109 | viewport_height = window.innerHeight; 110 | else 111 | viewport_height = $(window).height(); 112 | sidebarbutton.find('span').css({ 113 | 'display': 'block', 114 | 'margin-top': (viewport_height - sidebar.position().top - 20) / 2 115 | }); 116 | 117 | sidebarbutton.click(toggle_sidebar); 118 | sidebarbutton.attr('title', _('Collapse sidebar')); 119 | sidebarbutton.css({ 120 | 'color': '#FFFFFF', 121 | 'border-left': '1px solid ' + dark_color, 122 | 'font-size': '1.2em', 123 | 'cursor': 'pointer', 124 | 'height': bodywrapper.height(), 125 | 'padding-top': '1px', 126 | 'margin-left': ssb_width_expanded - 12 127 | }); 128 | 129 | sidebarbutton.hover( 130 | function () { 131 | $(this).css('background-color', dark_color); 132 | }, 133 | function () { 134 | $(this).css('background-color', light_color); 135 | } 136 | ); 137 | } 138 | 139 | function set_position_from_cookie() { 140 | if (!document.cookie) 141 | return; 142 | var items = document.cookie.split(';'); 143 | for(var k=0; k2;a== 12 | null&&(a=[]);if(y&&a.reduce===y)return e&&(c=b.bind(c,e)),f?a.reduce(c,d):a.reduce(c);j(a,function(a,b,i){f?d=c.call(e,d,a,b,i):(d=a,f=true)});if(!f)throw new TypeError("Reduce of empty array with no initial value");return d};b.reduceRight=b.foldr=function(a,c,d,e){var f=arguments.length>2;a==null&&(a=[]);if(z&&a.reduceRight===z)return e&&(c=b.bind(c,e)),f?a.reduceRight(c,d):a.reduceRight(c);var g=b.toArray(a).reverse();e&&!f&&(c=b.bind(c,e));return f?b.reduce(g,c,d,e):b.reduce(g,c)};b.find=b.detect= 13 | function(a,c,b){var e;E(a,function(a,g,h){if(c.call(b,a,g,h))return e=a,true});return e};b.filter=b.select=function(a,c,b){var e=[];if(a==null)return e;if(A&&a.filter===A)return a.filter(c,b);j(a,function(a,g,h){c.call(b,a,g,h)&&(e[e.length]=a)});return e};b.reject=function(a,c,b){var e=[];if(a==null)return e;j(a,function(a,g,h){c.call(b,a,g,h)||(e[e.length]=a)});return e};b.every=b.all=function(a,c,b){var e=true;if(a==null)return e;if(B&&a.every===B)return a.every(c,b);j(a,function(a,g,h){if(!(e= 14 | e&&c.call(b,a,g,h)))return n});return e};var E=b.some=b.any=function(a,c,d){c||(c=b.identity);var e=false;if(a==null)return e;if(C&&a.some===C)return a.some(c,d);j(a,function(a,b,h){if(e||(e=c.call(d,a,b,h)))return n});return!!e};b.include=b.contains=function(a,c){var b=false;if(a==null)return b;return p&&a.indexOf===p?a.indexOf(c)!=-1:b=E(a,function(a){return a===c})};b.invoke=function(a,c){var d=i.call(arguments,2);return b.map(a,function(a){return(b.isFunction(c)?c||a:a[c]).apply(a,d)})};b.pluck= 15 | function(a,c){return b.map(a,function(a){return a[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);if(!c&&b.isEmpty(a))return-Infinity;var e={computed:-Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b>=e.computed&&(e={value:a,computed:b})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);if(!c&&b.isEmpty(a))return Infinity;var e={computed:Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;bd?1:0}),"value")};b.groupBy=function(a,c){var d={},e=b.isFunction(c)?c:function(a){return a[c]};j(a,function(a,b){var c=e(a,b);(d[c]||(d[c]=[])).push(a)});return d};b.sortedIndex=function(a, 17 | c,d){d||(d=b.identity);for(var e=0,f=a.length;e>1;d(a[g])=0})})};b.difference=function(a){var c=b.flatten(i.call(arguments,1));return b.filter(a,function(a){return!b.include(c,a)})};b.zip=function(){for(var a=i.call(arguments),c=b.max(b.pluck(a,"length")),d=Array(c),e=0;e=0;d--)b=[a[d].apply(this,b)];return b[0]}}; 24 | b.after=function(a,b){return a<=0?b():function(){if(--a<1)return b.apply(this,arguments)}};b.keys=J||function(a){if(a!==Object(a))throw new TypeError("Invalid object");var c=[],d;for(d in a)b.has(a,d)&&(c[c.length]=d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=b.methods=function(a){var c=[],d;for(d in a)b.isFunction(a[d])&&c.push(d);return c.sort()};b.extend=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]=b[d]});return a};b.defaults=function(a){j(i.call(arguments, 25 | 1),function(b){for(var d in b)a[d]==null&&(a[d]=b[d])});return a};b.clone=function(a){return!b.isObject(a)?a:b.isArray(a)?a.slice():b.extend({},a)};b.tap=function(a,b){b(a);return a};b.isEqual=function(a,b){return q(a,b,[])};b.isEmpty=function(a){if(b.isArray(a)||b.isString(a))return a.length===0;for(var c in a)if(b.has(a,c))return false;return true};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=o||function(a){return l.call(a)=="[object Array]"};b.isObject=function(a){return a===Object(a)}; 26 | b.isArguments=function(a){return l.call(a)=="[object Arguments]"};if(!b.isArguments(arguments))b.isArguments=function(a){return!(!a||!b.has(a,"callee"))};b.isFunction=function(a){return l.call(a)=="[object Function]"};b.isString=function(a){return l.call(a)=="[object String]"};b.isNumber=function(a){return l.call(a)=="[object Number]"};b.isNaN=function(a){return a!==a};b.isBoolean=function(a){return a===true||a===false||l.call(a)=="[object Boolean]"};b.isDate=function(a){return l.call(a)=="[object Date]"}; 27 | b.isRegExp=function(a){return l.call(a)=="[object RegExp]"};b.isNull=function(a){return a===null};b.isUndefined=function(a){return a===void 0};b.has=function(a,b){return I.call(a,b)};b.noConflict=function(){r._=G;return this};b.identity=function(a){return a};b.times=function(a,b,d){for(var e=0;e/g,">").replace(/"/g,""").replace(/'/g,"'").replace(/\//g,"/")};b.mixin=function(a){j(b.functions(a), 28 | function(c){K(c,b[c]=a[c])})};var L=0;b.uniqueId=function(a){var b=L++;return a?a+b:b};b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var t=/.^/,u=function(a){return a.replace(/\\\\/g,"\\").replace(/\\'/g,"'")};b.template=function(a,c){var d=b.templateSettings,d="var __p=[],print=function(){__p.push.apply(__p,arguments);};with(obj||{}){__p.push('"+a.replace(/\\/g,"\\\\").replace(/'/g,"\\'").replace(d.escape||t,function(a,b){return"',_.escape("+ 29 | u(b)+"),'"}).replace(d.interpolate||t,function(a,b){return"',"+u(b)+",'"}).replace(d.evaluate||t,function(a,b){return"');"+u(b).replace(/[\r\n\t]/g," ")+";__p.push('"}).replace(/\r/g,"\\r").replace(/\n/g,"\\n").replace(/\t/g,"\\t")+"');}return __p.join('');",e=new Function("obj","_",d);return c?e(c,b):function(a){return e.call(this,a,b)}};b.chain=function(a){return b(a).chain()};var m=function(a){this._wrapped=a};b.prototype=m.prototype;var v=function(a,c){return c?b(a).chain():a},K=function(a,c){m.prototype[a]= 30 | function(){var a=i.call(arguments);H.call(a,this._wrapped);return v(c.apply(b,a),this._chain)}};b.mixin(b);j("pop,push,reverse,shift,sort,splice,unshift".split(","),function(a){var b=k[a];m.prototype[a]=function(){var d=this._wrapped;b.apply(d,arguments);var e=d.length;(a=="shift"||a=="splice")&&e===0&&delete d[0];return v(d,this._chain)}});j(["concat","join","slice"],function(a){var b=k[a];m.prototype[a]=function(){return v(b.apply(this._wrapped,arguments),this._chain)}});m.prototype.chain=function(){this._chain= 31 | true;return this};m.prototype.value=function(){return this._wrapped}}).call(this); 32 | -------------------------------------------------------------------------------- /docs/build/html/_static/up-pressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/html/_static/up-pressed.png -------------------------------------------------------------------------------- /docs/build/html/_static/up.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/html/_static/up.png -------------------------------------------------------------------------------- /docs/build/html/bayesquad.acquisition_functions.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | bayesquad.acquisition_functions module — Batch Bayesian quadrature documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 32 | 33 |
34 |
35 |
36 |
37 | 38 |
39 |

bayesquad.acquisition_functions module

40 |

Acquisition functions and related functions.

41 |
42 |
43 | bayesquad.acquisition_functions.model_variance(integrand_model: bayesquad.quadrature.IntegrandModel)[source]
44 |
45 | 46 |
47 |
48 | bayesquad.acquisition_functions.model_variance_norm_of_gradient_squared(integrand_model: bayesquad.quadrature.IntegrandModel)[source]
49 |
50 | 51 |
52 | 53 | 54 |
55 |
56 |
57 | 80 |
81 |
82 | 94 | 98 | 99 | -------------------------------------------------------------------------------- /docs/build/html/bayesquad.batch_selection.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | bayesquad.batch_selection module — Batch Bayesian quadrature documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 32 | 33 |
34 |
35 |
36 |
37 | 38 |
39 |

bayesquad.batch_selection module

40 |

Methods for selecting a batch of points to evaluate for Bayesian quadrature.

41 |
42 |
43 | bayesquad.batch_selection.select_batch(integrand_model: bayesquad.quadrature.IntegrandModel, batch_size: int, batch_method: str = 'Local Penalisation') → List[numpy.ndarray][source]
44 |

Select a batch of points at which to evaluate the integrand.

45 | 46 | 47 | 48 | 49 | 64 | 65 | 67 | 68 | 70 | 71 | 72 |
Parameters:
    50 |
  • integrand_model – The model with which we wish to perform Bayesian quadrature.
  • 51 |
  • batch_size – The number of points to return in the new batch.
  • 52 |
  • batch_method
    53 |
    The method by which to compute the new batch. Currently supported methods are:
    54 |
      55 |
    • ”Local Penalisation”
    • 56 |
    • ”Kriging Believer”
    • 57 |
    • ”Kriging Optimist”
    • 58 |
    59 |
    60 |
    61 |
  • 62 |
63 |
Returns:

A list of arrays. Each array is a point of the new batch.

66 |
Return type:

list[ndarray]

69 |
73 |
74 | 75 |
76 | 77 | 78 |
79 |
80 |
81 | 104 |
105 |
106 | 118 | 122 | 123 | -------------------------------------------------------------------------------- /docs/build/html/bayesquad.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | bayesquad package — Batch Bayesian quadrature documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 32 | 33 |
34 |
35 |
36 |
37 | 38 |
39 |

bayesquad package

40 | 53 |
54 |

Module contents

55 |

A module for performing bayesian quadrature, supporting batch selection of points for evaluation and warped GP models

56 |
57 |
58 | 59 | 60 |
61 |
62 |
63 | 95 |
96 |
97 | 109 | 113 | 114 | -------------------------------------------------------------------------------- /docs/build/html/bayesquad.plotting.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | bayesquad.plotting module — Batch Bayesian quadrature documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 32 | 33 |
34 |
35 |
36 |
37 | 38 |
39 |

bayesquad.plotting module

40 |

Functions to allow plotting code to be decoupled from the rest of the code.

41 |
42 |
43 | bayesquad.plotting.add_callback(identifier: str, callback: Callable)[source]
44 |
45 | 46 |
47 |
48 | bayesquad.plotting.plottable(identifier: str, *, default_plotting_parameters=mappingproxy({}))[source]
49 |
50 | 51 |
52 | 53 | 54 |
55 |
56 |
57 | 80 |
81 |
82 | 94 | 98 | 99 | -------------------------------------------------------------------------------- /docs/build/html/bayesquad.priors.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | bayesquad.priors module — Batch Bayesian quadrature documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 32 | 33 |
34 |
35 |
36 |
37 | 38 |
39 |

bayesquad.priors module

40 |

Classes representing probability distributions, intended to be integrated against a likelihood.

41 |
42 |
43 | class bayesquad.priors.Gaussian(mean: numpy.ndarray, covariance: numpy.ndarray)[source]
44 |

Bases: bayesquad.priors.Prior

45 |

A multivariate Gaussian prior.

46 | 47 | 48 | 49 | 50 | 55 | 56 | 57 |
Parameters:
    51 |
  • mean – A 1D array of shape (num_dimensions).
  • 52 |
  • covariance – A 2D array of shape (num_dimensions, num_dimensions).
  • 53 |
54 |
58 |
59 |
60 | mean
61 |

ndarray – A 1D array of shape (num_dimensions).

62 |
63 | 64 |
65 |
66 | covariance
67 |

ndarray – A 2D array of shape (num_dimensions, num_dimensions).

68 |
69 | 70 |
71 |
72 | precision
73 |

ndarray – The inverse of the covariance matrix.

74 |
75 | 76 |
77 |
78 | gradient(x: numpy.ndarray) → Tuple[numpy.ndarray, numpy.ndarray][source]
79 |

See gradient()

80 |
81 | 82 |
83 |
84 | logpdf(x: numpy.ndarray) → numpy.ndarray[source]
85 |

See logpdf()

86 |
87 | 88 |
89 |
90 | sample(num_points: int = 1) → numpy.ndarray[source]
91 |

See sample()

92 |
93 | 94 |
95 | 96 |
97 |
98 | class bayesquad.priors.Prior[source]
99 |

Bases: abc.ABC

100 |

A prior, providing methods for sampling, and for pointwise evaluation of the pdf and its derivatives.

101 |
102 |
103 | gradient(x: numpy.ndarray) → Tuple[numpy.ndarray, numpy.ndarray][source]
104 |

Compute the Jacobian and Hessian of the prior’s pdf at the given set of points.

105 | 106 | 107 | 108 | 109 | 110 | 111 | 117 | 118 | 119 |
Parameters:x – A 2D array of the points at which to evaluate the derivatives, with shape (num_points, num_dimensions).
Returns:
    112 |
  • jacobian – A 2D array of shape (num_points, num_dimensions), containing the value of the Jacobian at each point.
  • 113 |
  • hessian – A 3D array of shape (num_points, num_dimensions, num_dimensions), whose (i, j, k)-th element is the 114 | (j, k)-th mixed partial derivative of the pdf at the i-th point of x.
  • 115 |
116 |
120 |
121 | 122 |
123 |
124 | logpdf(x: numpy.ndarray) → numpy.ndarray[source]
125 |

Evaluate the prior’s log pdf at the given set of points.

126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 |
Parameters:x – An array of shape (num_points, num_dimensions).
Returns:A 1D array of shape (num_points).
Return type:ndarray
138 |
139 | 140 |
141 |
142 | sample(num_points: int = 1) → numpy.ndarray[source]
143 |

Sample num_points points independently from the prior.

144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 |
Returns:num_points samples from the prior, as a 2D array of shape (num_points, num_dimensions).
Return type:ndarray
154 |
155 | 156 |
157 | 158 |
159 | 160 | 161 |
162 |
163 |
164 | 187 |
188 |
189 | 201 | 205 | 206 | -------------------------------------------------------------------------------- /docs/build/html/index.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | Welcome to Batch Bayesian quadrature’s documentation! — Batch Bayesian quadrature documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 32 | 33 |
34 |
35 |
36 |
37 | 38 |
39 |

Welcome to Batch Bayesian quadrature’s documentation!

40 |
41 |
42 |
43 |
44 |

Indices and tables

45 | 50 |
51 | 52 | 53 |
54 |
55 |
56 | 85 |
86 |
87 | 99 | 103 | 104 | -------------------------------------------------------------------------------- /docs/build/html/modules.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | bayesquad — Batch Bayesian quadrature documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 32 | 33 |
34 |
35 |
36 |
37 | 38 | 58 | 59 | 60 |
61 |
62 |
63 | 86 |
87 |
88 | 100 | 104 | 105 | -------------------------------------------------------------------------------- /docs/build/html/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OxfordML/bayesquad/eeb56af5cdeba02a6bfc9e6036bf53f7f9ea3cd6/docs/build/html/objects.inv -------------------------------------------------------------------------------- /docs/build/html/py-modindex.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | Python Module Index — Batch Bayesian quadrature documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 35 | 36 |
37 |
38 |
39 |
40 | 41 | 42 |

Python Module Index

43 | 44 |
45 | b 46 |
47 | 48 | 49 | 50 | 52 | 53 | 55 | 58 | 59 | 60 | 63 | 64 | 65 | 68 | 69 | 70 | 73 | 74 | 75 | 78 | 79 | 80 | 83 | 84 | 85 | 88 |
 
51 | b
56 | bayesquad 57 |
    61 | bayesquad.acquisition_functions 62 |
    66 | bayesquad.batch_selection 67 |
    71 | bayesquad.gps 72 |
    76 | bayesquad.plotting 77 |
    81 | bayesquad.priors 82 |
    86 | bayesquad.quadrature 87 |
89 | 90 | 91 |
92 |
93 |
94 | 110 |
111 |
112 | 124 | 128 | 129 | -------------------------------------------------------------------------------- /docs/build/html/search.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | Search — Batch Bayesian quadrature documentation 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 23 | 24 | 25 | 26 | 27 | 28 | 40 | 41 |
42 |
43 |
44 |
45 | 46 |

Search

47 |
48 | 49 |

50 | Please activate JavaScript to enable the search 51 | functionality. 52 |

53 |
54 |

55 | From here you can search these documents. Enter your search 56 | words into the box below and click "search". Note that the search 57 | function will automatically search for all of the words. Pages 58 | containing fewer words won't appear in the result list. 59 |

60 |
61 | 62 | 63 | 64 |
65 | 66 |
67 | 68 |
69 | 70 |
71 |
72 |
73 | 77 |
78 |
79 | 91 | 95 | 96 | -------------------------------------------------------------------------------- /docs/build/html/searchindex.js: -------------------------------------------------------------------------------- 1 | Search.setIndex({docnames:["bayesquad","bayesquad.acquisition_functions","bayesquad.batch_selection","bayesquad.gps","bayesquad.plotting","bayesquad.priors","bayesquad.quadrature","index","modules"],envversion:53,filenames:["bayesquad.rst","bayesquad.acquisition_functions.rst","bayesquad.batch_selection.rst","bayesquad.gps.rst","bayesquad.plotting.rst","bayesquad.priors.rst","bayesquad.quadrature.rst","index.rst","modules.rst"],objects:{"":{bayesquad:[0,0,0,"-"]},"bayesquad.acquisition_functions":{model_variance:[1,1,1,""],model_variance_norm_of_gradient_squared:[1,1,1,""]},"bayesquad.batch_selection":{select_batch:[2,1,1,""]},"bayesquad.gps":{GP:[3,2,1,""],WarpedGP:[3,2,1,""],WsabiLGP:[3,2,1,""]},"bayesquad.gps.GP":{posterior_hessians:[3,3,1,""],posterior_jacobians:[3,3,1,""],posterior_mean_and_variance:[3,3,1,""],update:[3,3,1,""]},"bayesquad.gps.WarpedGP":{kernel:[3,4,1,""],posterior_mean_and_variance:[3,3,1,""],posterior_variance_hessian:[3,3,1,""],posterior_variance_jacobian:[3,3,1,""],remove:[3,3,1,""],update:[3,3,1,""]},"bayesquad.gps.WsabiLGP":{posterior_mean_and_variance:[3,3,1,""],posterior_variance_hessian:[3,3,1,""],posterior_variance_jacobian:[3,3,1,""],remove:[3,3,1,""],update:[3,3,1,""]},"bayesquad.plotting":{add_callback:[4,1,1,""],plottable:[4,1,1,""]},"bayesquad.priors":{Gaussian:[5,2,1,""],Prior:[5,2,1,""]},"bayesquad.priors.Gaussian":{covariance:[5,4,1,""],gradient:[5,3,1,""],logpdf:[5,3,1,""],mean:[5,4,1,""],precision:[5,4,1,""],sample:[5,3,1,""]},"bayesquad.priors.Prior":{gradient:[5,3,1,""],logpdf:[5,3,1,""],sample:[5,3,1,""]},"bayesquad.quadrature":{IntegrandModel:[6,2,1,""],_compute_mean:[6,4,1,""]},"bayesquad.quadrature.IntegrandModel":{integral_mean:[6,3,1,""],posterior_mean_and_variance:[6,3,1,""],posterior_variance_hessian:[6,3,1,""],posterior_variance_jacobian:[6,3,1,""],remove:[6,3,1,""],update:[6,3,1,""]},bayesquad:{acquisition_functions:[1,0,0,"-"],batch_selection:[2,0,0,"-"],gps:[3,0,0,"-"],plotting:[4,0,0,"-"],priors:[5,0,0,"-"],quadrature:[6,0,0,"-"]}},objnames:{"0":["py","module","Python module"],"1":["py","function","Python function"],"2":["py","class","Python class"],"3":["py","method","Python method"],"4":["py","attribute","Python attribute"]},objtypes:{"0":"py:module","1":"py:function","2":"py:class","3":"py:method","4":"py:attribute"},terms:{"class":[3,5,6],"default":6,"float":[3,6],"function":[1,3,4,6],"int":[2,5],"new":[2,3,6],"return":[2,3,5,6],The:[2,3,5,6],These:3,With:3,_clear_cach:3,_compute_mean:6,abc:[3,5],access:3,acquisit:1,acquisition_funct:[0,8],add:[3,6],add_callback:4,addit:3,advanc:3,again:3,against:5,all:3,allow:4,alpha:3,also:[3,6],ani:3,appli:3,approxim:3,arg:3,argument:[3,6],around:3,arrai:[2,3,5,6],base:[3,5,6],batch:[0,2],batch_method:2,batch_select:[0,8],batch_siz:2,bayesian:[0,2,3,6],been:3,believ:2,between:3,cach:3,calcul:3,call:3,callabl:4,callback:4,capabl:6,clear:3,code:[3,4],complic:3,compon:[3,6],comput:[2,3,5,6],consist:3,contain:5,content:8,conveni:3,core:3,correctli:3,covari:5,current:2,data:[3,6],data_dependent_hessian:3,deal:3,decoupl:4,default_plotting_paramet:4,defin:6,deleg:6,depend:3,deriv:[3,5,6],detail:3,diagon:3,diagonal_hessian:3,differ:3,dimens:3,dimension:3,directli:3,disabl:3,distribut:5,document:3,doe:[3,6],dub:3,duplic:3,each:[2,3,5],element:[3,5,6],equal:[3,6],error:6,evalu:[0,2,3,5,6],exactli:3,fast:3,follow:3,found:6,from:[3,4,5,6],further:3,gaussian:[3,5,6],get:[3,6],given:[3,5,6],gps:[0,6,8],gpy:3,gpy_gp:3,gradient:[3,5],gunter:3,has:3,have:3,hessian:[3,5,6],hit:3,identifi:4,immedi:3,implement:[3,6],includ:3,independ:[3,5],index:7,infer:3,inform:3,input:[3,6],instanc:3,integr:[5,6],integral_mean:6,integrand:[2,6],integrand_model:[1,2],integrandmodel:[1,2,6],intend:5,interest:6,introduc:3,invers:[3,5],its:[3,5],jacobian:[3,5,6],k_d_inv:3,k_star:3,kern:3,kernel:[3,6],kernel_hessian:3,kernel_jacobian:3,keyword:3,krige:2,kwarg:3,length:3,likelihood:5,line:3,linearis:3,list:[2,3,6],local:2,locat:3,log:5,logpdf:5,mai:[3,6],make:3,manual:3,mappingproxi:4,matrix:[3,5],mean:[3,5,6],mean_hessian:3,mean_jacobian:3,mechan:3,method:[2,3,5,6],mix:[3,5,6],model:[0,2,3,6],model_vari:1,model_variance_norm_of_gradient_squar:1,modifi:3,modul:[7,8],more:3,most:3,multivari:5,must:[3,6],name:6,ndarrai:[2,3,5,6],necessari:3,need:3,neural:3,none:[3,6],notat:3,note:[3,6],num_dimens:[3,5,6],num_point:[3,5,6],number:[2,3,6],numpi:[2,3,5,6],object:[3,6],observ:3,occur:3,one:3,onli:3,optimis:3,optimist:2,other:6,our:3,output:3,overrid:3,own:3,packag:8,page:7,pain:3,paramet:[2,3,5,6],part:3,partial:[3,5,6],pass:3,pdf:5,penalis:2,perform:[0,2,3,6],pleas:3,plot:[0,8],plottabl:4,point:[0,2,3,5,6],pointwis:5,posit:3,possibl:3,posterior:[3,6],posterior_hessian:3,posterior_jacobian:3,posterior_mean_and_vari:[3,6],posterior_variance_hessian:[3,6],posterior_variance_jacobian:[3,6],precis:5,predict:3,predictive_gradi:3,prevent:3,prior:[0,6,8],probabilist:3,probabl:5,process:[3,6],produc:3,product:6,properti:3,provid:[3,5,6],quadratur:[0,1,2,3,8],rais:[3,6],reason:3,recent:3,refer:3,relat:1,remain:3,remov:[3,6],repres:[3,5,6],respect:3,rest:4,result:3,root:3,same:[3,6],sampl:[3,5],search:7,see:[3,5],select:[0,2],select_batch:2,self:3,separ:3,set:[3,5,6],shape:[3,5,6],share:3,should:3,sinc:3,slightli:3,some:3,sourc:[1,2,3,4,5,6],space:3,squar:3,str:[2,4],straightforward:3,submodul:8,support:[0,2],system:3,tensor:3,term:3,thi:[3,6],through:3,tom:3,transform:3,tupl:[3,5,6],type:[2,3,5,6],typic:6,under:6,underli:3,union:[3,6],updat:[3,6],update_model:3,use:3,using:3,valu:[3,5],valueerror:[3,6],varianc:[3,6],variance_hessian:3,variance_jacobian:3,vector:3,versa:[3,6],vice:[3,6],warp:[0,3,6],warped_gp:6,warpedgp:[3,6],whenev:3,where:3,which:[2,3,5,6],whose:5,wish:2,work:3,would:3,wrap:3,wrapper:3,write:[3,6],wsabi:3,wsabilgp:3,x_d:3,y_d:3},titles:["bayesquad package","bayesquad.acquisition_functions module","bayesquad.batch_selection module","bayesquad.gps module","bayesquad.plotting module","bayesquad.priors module","bayesquad.quadrature module","Welcome to Batch Bayesian quadrature\u2019s documentation!","bayesquad"],titleterms:{acquisition_funct:1,batch:7,batch_select:2,bayesian:7,bayesquad:[0,1,2,3,4,5,6,8],content:0,document:7,gps:3,indic:7,modul:[0,1,2,3,4,5,6],packag:0,plot:4,prior:5,quadratur:[6,7],submodul:0,tabl:7,welcom:7}}) -------------------------------------------------------------------------------- /docs/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /docs/source/bayesquad.acquisition_functions.rst: -------------------------------------------------------------------------------- 1 | bayesquad.acquisition\_functions module 2 | ======================================= 3 | 4 | .. automodule:: bayesquad.acquisition_functions 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/bayesquad.batch_selection.rst: -------------------------------------------------------------------------------- 1 | bayesquad.batch\_selection module 2 | ================================= 3 | 4 | .. automodule:: bayesquad.batch_selection 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/bayesquad.gps.rst: -------------------------------------------------------------------------------- 1 | bayesquad.gps module 2 | ==================== 3 | 4 | .. automodule:: bayesquad.gps 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/bayesquad.plotting.rst: -------------------------------------------------------------------------------- 1 | bayesquad.plotting module 2 | ========================= 3 | 4 | .. automodule:: bayesquad.plotting 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/bayesquad.priors.rst: -------------------------------------------------------------------------------- 1 | bayesquad.priors module 2 | ======================= 3 | 4 | .. automodule:: bayesquad.priors 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/bayesquad.quadrature.rst: -------------------------------------------------------------------------------- 1 | bayesquad.quadrature module 2 | =========================== 3 | 4 | .. automodule:: bayesquad.quadrature 5 | :members: 6 | :private-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/bayesquad.rst: -------------------------------------------------------------------------------- 1 | bayesquad package 2 | ================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | .. toctree:: 8 | 9 | bayesquad.acquisition_functions 10 | bayesquad.batch_selection 11 | bayesquad.gps 12 | bayesquad.plotting 13 | bayesquad.priors 14 | bayesquad.quadrature 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: bayesquad 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | 15 | import os 16 | import sys 17 | sys.path.insert(0, os.path.abspath('../../')) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = 'Batch Bayesian quadrature' 23 | copyright = '2018, Ed Wagstaff' 24 | author = 'Ed Wagstaff' 25 | 26 | # The short X.Y version 27 | version = '' 28 | # The full version, including alpha/beta/rc tags 29 | release = '' 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # If your documentation needs a minimal Sphinx version, state it here. 35 | # 36 | # needs_sphinx = '1.0' 37 | 38 | # Add any Sphinx extension module names here, as strings. They can be 39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 40 | # ones. 41 | extensions = [ 42 | 'sphinx.ext.autodoc', 43 | 'sphinx.ext.doctest', 44 | 'sphinx.ext.coverage', 45 | 'sphinx.ext.mathjax', 46 | 'sphinx.ext.viewcode', 47 | 'sphinx.ext.napoleon', 48 | ] 49 | 50 | # Add any paths that contain templates here, relative to this directory. 51 | templates_path = ['_templates'] 52 | 53 | # The suffix(es) of source filenames. 54 | # You can specify multiple suffix as a list of string: 55 | # 56 | # source_suffix = ['.rst', '.md'] 57 | source_suffix = '.rst' 58 | 59 | # The master toctree document. 60 | master_doc = 'index' 61 | 62 | # The language for content autogenerated by Sphinx. Refer to documentation 63 | # for a list of supported languages. 64 | # 65 | # This is also used if you do content translation via gettext catalogs. 66 | # Usually you set "language" from the command line for these cases. 67 | language = None 68 | 69 | # List of patterns, relative to source directory, that match files and 70 | # directories to ignore when looking for source files. 71 | # This pattern also affects html_static_path and html_extra_path . 72 | exclude_patterns = [] 73 | 74 | # The name of the Pygments (syntax highlighting) style to use. 75 | pygments_style = 'sphinx' 76 | 77 | 78 | # -- Options for HTML output ------------------------------------------------- 79 | 80 | # The theme to use for HTML and HTML Help pages. See the documentation for 81 | # a list of builtin themes. 82 | # 83 | html_theme = 'classic' 84 | 85 | # Theme options are theme-specific and customize the look and feel of a theme 86 | # further. For a list of options available for each theme, see the 87 | # documentation. 88 | # 89 | # html_theme_options = {} 90 | 91 | # Add any paths that contain custom static files (such as style sheets) here, 92 | # relative to this directory. They are copied after the builtin static files, 93 | # so a file named "default.css" will overwrite the builtin "default.css". 94 | html_static_path = ['_static'] 95 | 96 | # Custom sidebar templates, must be a dictionary that maps document names 97 | # to template names. 98 | # 99 | # The default sidebars (for documents that don't match any pattern) are 100 | # defined by theme itself. Builtin themes are using these templates by 101 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 102 | # 'searchbox.html']``. 103 | # 104 | # html_sidebars = {} 105 | 106 | 107 | # -- Options for HTMLHelp output --------------------------------------------- 108 | 109 | # Output file base name for HTML help builder. 110 | htmlhelp_basename = 'BatchBayesianquadraturedoc' 111 | 112 | 113 | # -- Options for LaTeX output ------------------------------------------------ 114 | 115 | latex_elements = { 116 | # The paper size ('letterpaper' or 'a4paper'). 117 | # 118 | # 'papersize': 'letterpaper', 119 | 120 | # The font size ('10pt', '11pt' or '12pt'). 121 | # 122 | # 'pointsize': '10pt', 123 | 124 | # Additional stuff for the LaTeX preamble. 125 | # 126 | # 'preamble': '', 127 | 128 | # Latex figure (float) alignment 129 | # 130 | # 'figure_align': 'htbp', 131 | } 132 | 133 | # Grouping the document tree into LaTeX files. List of tuples 134 | # (source start file, target name, title, 135 | # author, documentclass [howto, manual, or own class]). 136 | latex_documents = [ 137 | (master_doc, 'BatchBayesianquadrature.tex', 'Batch Bayesian quadrature Documentation', 138 | 'Ed Wagstaff', 'manual'), 139 | ] 140 | 141 | 142 | # -- Options for manual page output ------------------------------------------ 143 | 144 | # One entry per manual page. List of tuples 145 | # (source start file, name, description, authors, manual section). 146 | man_pages = [ 147 | (master_doc, 'batchbayesianquadrature', 'Batch Bayesian quadrature Documentation', 148 | [author], 1) 149 | ] 150 | 151 | 152 | # -- Options for Texinfo output ---------------------------------------------- 153 | 154 | # Grouping the document tree into Texinfo files. List of tuples 155 | # (source start file, target name, title, author, 156 | # dir menu entry, description, category) 157 | texinfo_documents = [ 158 | (master_doc, 'BatchBayesianquadrature', 'Batch Bayesian quadrature Documentation', 159 | author, 'BatchBayesianquadrature', 'One line description of project.', 160 | 'Miscellaneous'), 161 | ] 162 | 163 | 164 | # -- Extension configuration ------------------------------------------------- 165 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. Batch Bayesian quadrature documentation master file, created by 2 | sphinx-quickstart on Fri Aug 17 15:43:05 2018. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to Batch Bayesian quadrature's documentation! 7 | ===================================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | 14 | 15 | Indices and tables 16 | ================== 17 | 18 | * :ref:`genindex` 19 | * :ref:`modindex` 20 | * :ref:`search` 21 | -------------------------------------------------------------------------------- /docs/source/modules.rst: -------------------------------------------------------------------------------- 1 | bayesquad 2 | ========= 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | bayesquad 8 | -------------------------------------------------------------------------------- /examples/example_1d.py: -------------------------------------------------------------------------------- 1 | """Plot bayesian quadrature on a simple 1D test function.""" 2 | from typing import Dict, Any 3 | 4 | import GPy 5 | import matplotlib.pyplot as plt 6 | import numpy as np 7 | from matplotlib.axes import Axes 8 | from numpy import newaxis 9 | 10 | import bayesquad.plotting as plotting 11 | from bayesquad.batch_selection import select_batch, LOCAL_PENALISATION 12 | from bayesquad.gps import WsabiLGP 13 | from bayesquad.priors import Gaussian 14 | from bayesquad.quadrature import IntegrandModel 15 | 16 | 17 | PLOTTING_DELAY = 0.25 18 | BATCHES = 10 19 | BATCH_SIZE = 3 20 | BATCH_METHOD = LOCAL_PENALISATION 21 | 22 | 23 | # Set up test function and WSABI-L model. 24 | 25 | def true_function(x): 26 | return ((np.sin(x) + 0.5 * np.cos(3 * x))**2)/((x/2)**2+0.3) 27 | 28 | 29 | initial_x = np.array([[-3]]).T 30 | initial_y = np.sqrt(2 * true_function(initial_x)) 31 | 32 | k = GPy.kern.RBF(1, variance=2, lengthscale=2) 33 | lik = GPy.likelihoods.Gaussian(variance=1e-10) 34 | 35 | prior = Gaussian(mean=np.array([0]), covariance=np.atleast_2d(2)) 36 | 37 | gpy_gp = GPy.core.GP(initial_x, initial_y, kernel=k, likelihood=lik) 38 | warped_gp = WsabiLGP(gpy_gp) 39 | model = IntegrandModel(warped_gp, prior) 40 | 41 | 42 | def true_integrand(x): 43 | return true_function(x) * prior(x)[:, newaxis] 44 | 45 | 46 | # Set up plotting. 47 | 48 | LOWER_LIMIT = -4 49 | UPPER_LIMIT = 4 50 | PLOTTING_RESOLUTION = 2000 51 | 52 | 53 | def get_plotting_domain(lower_limit, upper_limit, resolution): 54 | x = np.linspace(lower_limit, upper_limit, resolution) 55 | return np.atleast_2d(x).T 56 | 57 | 58 | figure = plt.figure(figsize=(18, 8)) 59 | 60 | axes: Dict[Any, Axes] = { 61 | "left": figure.add_subplot(121), 62 | "right": figure.add_subplot(122) 63 | } 64 | 65 | for subplot_index in axes: 66 | axes[subplot_index].set_ylim(-0.1, 1) 67 | 68 | PLOTTING_DOMAIN = get_plotting_domain(LOWER_LIMIT, UPPER_LIMIT, PLOTTING_RESOLUTION) 69 | 70 | 71 | def plot_data(data, subplot, title="", color=None): 72 | axis = axes[subplot] 73 | axis.set_title(title) 74 | 75 | return axis.plot(PLOTTING_DOMAIN, data, color=color) 76 | 77 | 78 | plot_elements = { 79 | "posterior_mean": None, 80 | "uncertainty_window": None, 81 | "uncertainty_upper_bound": None, 82 | "uncertainty_lower_bound": None, 83 | "evaluated_points": None 84 | } 85 | 86 | 87 | def plot_true_function(): 88 | z = true_integrand(PLOTTING_DOMAIN) 89 | plot_data(z, "right", "True Integrand") 90 | 91 | 92 | def compute_and_plot_integrand_posterior(integrand_model: IntegrandModel): 93 | global posterior_mean 94 | 95 | z = integrand_model.posterior_mean_and_variance(PLOTTING_DOMAIN)[0].T 96 | posterior_mean = z.T 97 | plot_elements["posterior_mean"], = plot_data(z, "left", title="Posterior Mean", color="tab:red") 98 | 99 | integral_mean = integrand_model.integral_mean() 100 | axes["left"].text( 101 | x=0.5, 102 | y=0.95, 103 | s="Integral Estimate: {:.4f}".format(integral_mean), 104 | verticalalignment="top", 105 | size=12, 106 | bbox={ 107 | "facecolor": "white", 108 | "edgecolor": "black" 109 | } 110 | ) 111 | 112 | print("Integral Estimate: {}".format(integral_mean)) 113 | 114 | 115 | def plot_uncertainty_window(func): 116 | variance = np.exp(func(PLOTTING_DOMAIN)[0]).T 117 | standard_deviation = np.sqrt(variance) 118 | 119 | if plot_elements["uncertainty_window"]: 120 | plot_elements["uncertainty_window"].remove() 121 | plot_elements["uncertainty_upper_bound"].remove() 122 | plot_elements["uncertainty_lower_bound"].remove() 123 | 124 | upper_uncertainty = (posterior_mean + 2 * standard_deviation).squeeze() 125 | lower_uncertainty = (posterior_mean - 2 * standard_deviation).squeeze() 126 | domain = PLOTTING_DOMAIN.squeeze() 127 | 128 | plot_elements["uncertainty_window"] = \ 129 | axes["left"].fill_between(domain, lower_uncertainty, upper_uncertainty, color=(.6, .7, 1)) 130 | 131 | plot_elements["uncertainty_lower_bound"], = axes["left"].plot(domain, lower_uncertainty, color="tab:blue") 132 | plot_elements["uncertainty_upper_bound"], = axes["left"].plot(domain, upper_uncertainty, color="tab:blue") 133 | 134 | plt.pause(PLOTTING_DELAY) 135 | 136 | 137 | plotting.add_callback("Soft penalised log acquisition function", plot_uncertainty_window) 138 | plot_true_function() 139 | plt.pause(PLOTTING_DELAY) 140 | 141 | 142 | # Run algorithm. 143 | 144 | for i in range(BATCHES): 145 | if plot_elements["posterior_mean"]: 146 | plot_elements["posterior_mean"].remove() 147 | 148 | compute_and_plot_integrand_posterior(model) 149 | batch = select_batch(model, BATCH_SIZE, BATCH_METHOD) 150 | 151 | X = np.array(batch) 152 | Y = true_function(X) 153 | model.update(X, Y) 154 | 155 | Y = true_integrand(X) 156 | 157 | if plot_elements["evaluated_points"]: 158 | plot_elements["evaluated_points"].remove() 159 | 160 | plot_elements["evaluated_points"], = axes["left"].plot(X, Y, "xr", markersize=10, markeredgewidth=2) 161 | 162 | plt.pause(PLOTTING_DELAY) 163 | 164 | axes["left"].plot(X, Y, "xg", markersize=5, markeredgewidth=1) 165 | 166 | gpy_gp.optimize() 167 | 168 | 169 | plot_elements["posterior_mean"].remove() 170 | plot_elements["evaluated_points"].remove() 171 | 172 | compute_and_plot_integrand_posterior(model) 173 | 174 | select_batch(model, 1, BATCH_METHOD) 175 | plt.show() 176 | -------------------------------------------------------------------------------- /examples/example_2d.py: -------------------------------------------------------------------------------- 1 | """Plot bayesian quadrature on a simple 2D test function.""" 2 | 3 | from typing import Dict, Any 4 | 5 | import GPy 6 | import matplotlib.pyplot as plt 7 | import numpy as np 8 | from matplotlib.image import AxesImage 9 | 10 | import bayesquad.plotting as plotting 11 | from bayesquad.batch_selection import select_batch, LOCAL_PENALISATION 12 | from bayesquad.gps import WsabiLGP 13 | from bayesquad.priors import Gaussian 14 | from bayesquad.quadrature import IntegrandModel 15 | 16 | 17 | # Set up test function and WSABI-L model. 18 | 19 | def true_function(x): 20 | x = np.atleast_2d(x) 21 | return np.atleast_2d((((np.sin(x) + 0.5 * np.cos(3 * x))**2)/((x/2)**2+0.3)).prod(axis=1)) 22 | 23 | 24 | initial_x = np.array([[0, 0]]) 25 | initial_y = np.sqrt(2 * true_function(initial_x)) 26 | 27 | k = GPy.kern.RBF(2, variance=2, lengthscale=2) 28 | lik = GPy.likelihoods.Gaussian(variance=1e-10) 29 | 30 | prior = Gaussian(mean=np.array([0, 0]), covariance=2*np.eye(2)) 31 | 32 | gpy_gp = GPy.core.GP(initial_x, initial_y, kernel=k, likelihood=lik) 33 | warped_gp = WsabiLGP(gpy_gp) 34 | model = IntegrandModel(warped_gp, prior) 35 | 36 | 37 | def true_integrand(x): 38 | return true_function(x) * prior(x) 39 | 40 | 41 | # Set up plotting. 42 | 43 | LOWER_LIMIT = -4 44 | UPPER_LIMIT = 4 45 | PLOTTING_RESOLUTION = 200 46 | COLOUR_MAP = 'summer' 47 | 48 | 49 | def get_plotting_domain(lower_limit, upper_limit, resolution): 50 | x = np.linspace(lower_limit, upper_limit, resolution) 51 | y = np.linspace(lower_limit, upper_limit, resolution) 52 | x_grid, y_grid = np.meshgrid(x, y) 53 | return np.concatenate(np.dstack([x_grid, y_grid])) 54 | 55 | 56 | figure = plt.figure(figsize=(18, 6)) 57 | images: Dict[Any, AxesImage] = {} 58 | PLOTTING_DOMAIN = get_plotting_domain(LOWER_LIMIT, UPPER_LIMIT, PLOTTING_RESOLUTION) 59 | 60 | 61 | def plot_data(data, subplot, title=""): 62 | data = data.reshape(PLOTTING_RESOLUTION, PLOTTING_RESOLUTION) 63 | 64 | if subplot in images: 65 | image = images[subplot] 66 | image.set_data(data) 67 | image.set_clim(vmin=data.min(), vmax=data.max()) 68 | else: 69 | axis = figure.add_subplot(subplot) 70 | image = axis.imshow(data, cmap=plt.get_cmap(COLOUR_MAP), vmin=data.min(), vmax=data.max(), 71 | extent=[LOWER_LIMIT, UPPER_LIMIT, LOWER_LIMIT, UPPER_LIMIT], 72 | interpolation='nearest', origin='lower') 73 | images[subplot] = image 74 | 75 | axis.set_title(title) 76 | 77 | plt.pause(0.01) 78 | 79 | 80 | def plot_true_function(): 81 | z = true_integrand(PLOTTING_DOMAIN) 82 | plot_data(z, 133, "True Integrand") 83 | 84 | 85 | def plot_integrand_posterior(integrand_model: IntegrandModel): 86 | z = integrand_model.posterior_mean_and_variance(PLOTTING_DOMAIN)[0] 87 | plot_data(z, 132, "Posterior Mean") 88 | 89 | 90 | def plotting_callback(func): 91 | z = np.exp(func(PLOTTING_DOMAIN)[0]) 92 | plot_data(z, 131, "Acquisition Function") 93 | 94 | 95 | plotting.add_callback("Soft penalised log acquisition function", plotting_callback) 96 | plot_true_function() 97 | 98 | 99 | # Run algorithm. 100 | 101 | BATCHES = 25 102 | BATCH_SIZE = 4 103 | BATCH_METHOD = LOCAL_PENALISATION 104 | 105 | for i in range(BATCHES): 106 | plot_integrand_posterior(model) 107 | batch = select_batch(model, BATCH_SIZE, BATCH_METHOD) 108 | 109 | X = np.array(batch) 110 | Y = true_function(X) 111 | model.update(X, Y) 112 | 113 | gpy_gp.optimize() 114 | 115 | print("Integral: {}".format(model.integral_mean())) 116 | 117 | plot_integrand_posterior(model) 118 | plt.show() 119 | -------------------------------------------------------------------------------- /make-docs.sh: -------------------------------------------------------------------------------- 1 | sphinx-apidoc --separate --force -o docs/source/ bayesquad 2 | cd docs 3 | make html 4 | 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | scipy 2 | numpy 3 | matplotlib 4 | GPy 5 | multimethod 6 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from setuptools import setup 4 | 5 | setup(name='bayesquad', 6 | version='0.1', 7 | description='Bayesian Quadrature Library', 8 | author='Ed Wagstaff', 9 | author_email='ed@robots.ox.ac.uk', 10 | url='https://github.com/OxfordML/bayesquad', 11 | packages=['bayesquad'], 12 | install_requires=['scipy', 'numpy', 'matplotlib', 'GPy', 'multimethod'], 13 | python_requires='>=3.5' 14 | ) 15 | --------------------------------------------------------------------------------