├── .env ├── pytest.ini ├── docs ├── examples │ ├── similarity.rst │ ├── sigma.rst │ └── examples.rst ├── api │ ├── data.rst │ ├── sigma.rst │ ├── utils.rst │ ├── dist.rst │ ├── kernels.rst │ ├── dependence.rst │ ├── similarity.rst │ ├── api.rst │ └── gp.rst ├── intro.rst ├── notebooks │ └── notebooks.rst ├── algorithms.rst ├── Makefile ├── make.bat ├── index.rst └── conf.py ├── jaxkern ├── __init__.py ├── gp │ ├── __init__.py │ ├── mean.py │ ├── loss.py │ ├── utils.py │ ├── basic.py │ └── exact.py ├── utils.py ├── dist.py ├── density.py ├── data.py ├── similarity.py ├── sigma.py ├── kernels.py └── dependence.py ├── notebooks └── uncategorized │ ├── scale │ └── nystrom │ │ ├── satire.mat │ │ ├── nystrom_demo.ipynb │ │ └── randomized_nystrom_approximation.ipynb │ ├── derivative │ ├── nopython_failure.py │ └── derivatives_numba.py │ ├── eigenmap │ ├── Untitled.ipynb │ ├── approximate_nearest_neighbours.ipynb │ └── variational_nystrom.ipynb │ └── examples │ ├── gp_regression.ipynb │ └── scaling_demos.ipynb ├── .readthedocs.yml ├── setup.cfg ├── scripts ├── demo_rv.py ├── demo_sigma.py ├── demo_dhsic.py └── demo_hsic.py ├── tests ├── test_dists.py └── test_kernels.py ├── environment.yml ├── LICENSE ├── README.md ├── .gitignore ├── Makefile └── setup.py /.env: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | testpaths = tests/ -------------------------------------------------------------------------------- /docs/examples/similarity.rst: -------------------------------------------------------------------------------- 1 | Similarity 2 | ========== -------------------------------------------------------------------------------- /docs/examples/sigma.rst: -------------------------------------------------------------------------------- 1 | Estimating Sigma 2 | ================ -------------------------------------------------------------------------------- /jaxkern/__init__.py: -------------------------------------------------------------------------------- 1 | from jaxkern.gp import * 2 | 3 | __version__ = "0.0.1" 4 | -------------------------------------------------------------------------------- /jaxkern/gp/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ["gp_prior"] 2 | 3 | from .exact import gp_prior 4 | -------------------------------------------------------------------------------- /docs/api/data.rst: -------------------------------------------------------------------------------- 1 | Data 2 | ==== 3 | 4 | .. automodule:: jaxkern.data 5 | :members: 6 | :inherited-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/api/sigma.rst: -------------------------------------------------------------------------------- 1 | Sigma 2 | ===== 3 | 4 | .. automodule:: jaxkern.sigma 5 | :members: 6 | :inherited-members: 7 | :show-inheritance: -------------------------------------------------------------------------------- /docs/api/utils.rst: -------------------------------------------------------------------------------- 1 | Utils 2 | ===== 3 | 4 | .. automodule:: jaxkern.utils 5 | :members: 6 | :inherited-members: 7 | :show-inheritance: -------------------------------------------------------------------------------- /jaxkern/gp/mean.py: -------------------------------------------------------------------------------- 1 | import jax.numpy as jnp 2 | 3 | 4 | def zero_mean(x): 5 | """Mean Function""" 6 | return jnp.zeros(x.shape[0]) 7 | -------------------------------------------------------------------------------- /notebooks/uncategorized/scale/nystrom/satire.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IPL-UV/jaxkern/HEAD/notebooks/uncategorized/scale/nystrom/satire.mat -------------------------------------------------------------------------------- /docs/api/dist.rst: -------------------------------------------------------------------------------- 1 | Distances 2 | ========== 3 | 4 | .. automodule:: jaxkern.dist 5 | :members: 6 | :inherited-members: 7 | :show-inheritance: -------------------------------------------------------------------------------- /docs/api/kernels.rst: -------------------------------------------------------------------------------- 1 | Kernels 2 | ======= 3 | 4 | .. automodule:: jaxkern.kernels 5 | :members: 6 | :inherited-members: 7 | :show-inheritance: -------------------------------------------------------------------------------- /docs/api/dependence.rst: -------------------------------------------------------------------------------- 1 | Dependence 2 | ========== 3 | 4 | .. automodule:: jaxkern.dependence 5 | :members: 6 | :inherited-members: 7 | :show-inheritance: -------------------------------------------------------------------------------- /docs/api/similarity.rst: -------------------------------------------------------------------------------- 1 | Similarity 2 | ========== 3 | 4 | .. automodule:: jaxkern.similarity 5 | :members: 6 | :inherited-members: 7 | :show-inheritance: -------------------------------------------------------------------------------- /docs/intro.rst: -------------------------------------------------------------------------------- 1 | Intro 2 | ===== 3 | 4 | .. toctree:: 5 | :maxdepth: 1 6 | :caption: Table of Contents 7 | :hidden: 8 | 9 | A package that hosts all of the kernel methods we use in the lab. -------------------------------------------------------------------------------- /docs/examples/examples.rst: -------------------------------------------------------------------------------- 1 | Examples 2 | ========= 3 | 4 | .. toctree:: 5 | :maxdepth: 1 6 | :caption: Table of Contents 7 | 8 | sigma.rst 9 | similarity.rst 10 | 11 | .. .. toctree:: 12 | .. :maxdepth: 2 13 | .. :caption: Contents: 14 | 15 | -------------------------------------------------------------------------------- /docs/api/api.rst: -------------------------------------------------------------------------------- 1 | jaxkern API 2 | =========== 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | :caption: Contents: 7 | 8 | data.rst 9 | dependence.rst 10 | dist.rst 11 | gp.rst 12 | kernels.rst 13 | sigma.rst 14 | similarity.rst 15 | utils.rst 16 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | image: latest 5 | 6 | python: 7 | version: 3.8 8 | install: 9 | - method: pip 10 | path: . 11 | extra_requirements: 12 | - docs 13 | - all 14 | 15 | conda: 16 | environment: environment.yml 17 | 18 | formats: [] -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [tool:flake8] 2 | max-line-length = 100 3 | 4 | [tool:mypy] 5 | warn_unused_ignores = True 6 | warn_redundant_casts = True 7 | ignore_missing_imports = True 8 | 9 | [mypy-pdoc.test.example_pkg.*] 10 | ignore_errors = True 11 | 12 | [tool:pytest] 13 | testpaths = tests/ 14 | 15 | [tool:pydocstyle] 16 | convention = numpy -------------------------------------------------------------------------------- /docs/notebooks/notebooks.rst: -------------------------------------------------------------------------------- 1 | Notebooks 2 | ========= 3 | 4 | .. toctree:: 5 | :maxdepth: 1 6 | :caption: Table of Contents 7 | 8 | demo_measures.ipynb 9 | hsic_measures.ipynb 10 | estimate_sigma.ipynb 11 | rv_coeff.ipynb 12 | kernel_derivatives.ipynb 13 | kde_scratch.ipynb 14 | demo_gp.ipynb 15 | 16 | .. .. toctree:: 17 | .. :maxdepth: 2 18 | .. :caption: Contents: 19 | 20 | -------------------------------------------------------------------------------- /docs/algorithms.rst: -------------------------------------------------------------------------------- 1 | Algorithms 2 | ========== 3 | 4 | .. toctree:: 5 | :maxdepth: 1 6 | :caption: Table of Contents 7 | 8 | Distance Matrices 9 | ----------------- 10 | 11 | * Euclidean Distance 12 | * Squared Euclidean Distances 13 | 14 | Kernels 15 | -------------------- 16 | 17 | * RBF Kernel 18 | * Laplacian Kernel 19 | 20 | Similarity Metrics 21 | ------------------- 22 | 23 | * HSIC 24 | * Normalized HSIC 25 | * Kernel Alignment 26 | * Centered Correlation Analysis 27 | * Distance Correlation 28 | * Maximum Mean Discrepancy (MMD) 29 | 30 | -------------------------------------------------------------------------------- /jaxkern/utils.py: -------------------------------------------------------------------------------- 1 | import jax 2 | import jax.numpy as np 3 | 4 | _float_eps = np.finfo("float").eps 5 | 6 | 7 | def ensure_min_eps(x: jax.numpy.ndarray) -> jax.numpy.ndarray: 8 | return np.maximum(_float_eps, x) 9 | 10 | 11 | def centering(kernel_mat: jax.numpy.ndarray) -> jax.numpy.ndarray: 12 | """Calculates the centering matrix for the kernel""" 13 | n_samples = np.shape(kernel_mat)[0] 14 | 15 | identity = np.eye(n_samples) 16 | 17 | H = identity - (1.0 / n_samples) * np.ones((n_samples, n_samples)) 18 | 19 | kernel_mat = np.einsum("ij,jk,kl->il", H, kernel_mat, H) 20 | 21 | return kernel_mat 22 | -------------------------------------------------------------------------------- /scripts/demo_rv.py: -------------------------------------------------------------------------------- 1 | import jax 2 | import jax.numpy as np 3 | import numpy as onp 4 | 5 | from jaxkern.similarity.linear import rv_coeff, rv_coeff_features 6 | 7 | 8 | def main(): 9 | # generate some fake linear data 10 | X = onp.random.randn(1000, 2) 11 | Y = 2 * X + 0.05 * onp.random.randn(1000, 2) 12 | 13 | # t = centered_kernel_alignment(X, Y, linear_kernel, {}, {}) 14 | rv_coeff_value = rv_coeff(X, Y) 15 | print(rv_coeff_value) 16 | 17 | # t = centered_kernel_alignment(X, Y, linear_kernel, {}, {}) 18 | rv_coeff_value = rv_coeff_features(X, Y) 19 | print(rv_coeff_value) 20 | 21 | 22 | if __name__ == "__main__": 23 | main() 24 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/api/gp.rst: -------------------------------------------------------------------------------- 1 | Gaussian Processes 2 | ================== 3 | 4 | .. automodule:: jaxkern.gp 5 | :members: 6 | :inherited-members: 7 | :show-inheritance: 8 | 9 | Exact GP 10 | -------- 11 | 12 | .. automodule:: jaxkern.gp.exact 13 | :members: 14 | :inherited-members: 15 | :show-inheritance: 16 | 17 | Loss Functions 18 | -------------- 19 | 20 | .. automodule:: jaxkern.gp.loss 21 | :members: 22 | :inherited-members: 23 | :show-inheritance: 24 | 25 | Mean Functions 26 | -------------- 27 | 28 | .. automodule:: jaxkern.gp.mean 29 | :members: 30 | :inherited-members: 31 | :show-inheritance: 32 | 33 | GP Utils 34 | -------- 35 | 36 | .. automodule:: jaxkern.gp.utils 37 | :members: 38 | :inherited-members: 39 | :show-inheritance: 40 | 41 | -------------------------------------------------------------------------------- /tests/test_dists.py: -------------------------------------------------------------------------------- 1 | import jax.numpy as np 2 | import numpy as onp 3 | from scipy.spatial.distance import pdist, squareform 4 | from sklearn.metrics.pairwise import euclidean_distances 5 | 6 | from jaxkern.dist import distmat, pdist_squareform, sqeuclidean_distance 7 | 8 | onp.random.seed(123) 9 | 10 | 11 | def test_distmat(): 12 | 13 | X = onp.random.rand(100, 2) 14 | 15 | dist = euclidean_distances(X, X, squared=True) 16 | dist_ = distmat(sqeuclidean_distance, X, X) 17 | onp.testing.assert_array_almost_equal(dist, onp.array(dist_)) 18 | 19 | 20 | def test_pdist_squareform(): 21 | X = onp.random.randn(100, 2) 22 | 23 | dist = squareform(pdist(X, metric="sqeuclidean")) 24 | dist_ = pdist_squareform(X, X) 25 | onp.testing.assert_array_almost_equal(dist, onp.array(dist_), decimal=5) 26 | -------------------------------------------------------------------------------- /notebooks/uncategorized/derivative/nopython_failure.py: -------------------------------------------------------------------------------- 1 | import numba 2 | 3 | 4 | @jit(nopython=True) 5 | def rbf_full_numba(x_train, x_function, K, weights, gamma): 6 | 7 | n_test, d_dims = x_function.shape 8 | n_train, d_dims = x_train.shape 9 | 10 | derivative = np.zeros(shape=(n_test, n_train, d_dims)) 11 | 12 | constant = - 2* gamma 13 | for itest in range(n_test): 14 | for itrain in range(n_train): 15 | w = weights[itrain] 16 | k = K[itest, itrain] 17 | for idim in range(d_dims): 18 | derivative[itest, itrain, idim] = \ 19 | w \ 20 | * (x_function[itest, idim] - x_train[itrain, idim]) \ 21 | * k 22 | 23 | derivative *= constant 24 | 25 | return derivative 26 | 27 | if -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: jaxkern 2 | channels: 3 | - defaults 4 | - conda-forge 5 | dependencies: 6 | - python=3.8 7 | # Standard Libraries 8 | - numpy>=1.19 9 | - scipy>=1.5 10 | - scikit-learn>=0.23 11 | - matplotlib>=3.3 12 | - seaborn>=0.11 13 | # Documentation 14 | - conda-forge::nbsphinx 15 | - conda-forge::sphinx 16 | - conda-forge::pandoc 17 | - conda-forge::ipykernel 18 | - conda-forge::ipython 19 | - pip 20 | - pip: 21 | # Jax framework 22 | - "git+https://github.com/google/jax.git" 23 | - jaxlib 24 | - "git+git://github.com/google/objax.git" 25 | - "git+git://github.com/deepmind/chex.git" 26 | # formatting 27 | - black 28 | - isort 29 | - mypy 30 | - pytest 31 | - flake8 32 | - pylint 33 | # logging 34 | - wandb 35 | # documentation 36 | - sphinx 37 | - sphinx-tabs 38 | - sphinx_rtd_theme 39 | - sphinx-autobuild -------------------------------------------------------------------------------- /notebooks/uncategorized/eigenmap/Untitled.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Eigenmaps" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [] 16 | } 17 | ], 18 | "metadata": { 19 | "kernelspec": { 20 | "display_name": "Python 3", 21 | "language": "python", 22 | "name": "python3" 23 | }, 24 | "language_info": { 25 | "codemirror_mode": { 26 | "name": "ipython", 27 | "version": 3 28 | }, 29 | "file_extension": ".py", 30 | "mimetype": "text/x-python", 31 | "name": "python", 32 | "nbconvert_exporter": "python", 33 | "pygments_lexer": "ipython3", 34 | "version": "3.7.2" 35 | } 36 | }, 37 | "nbformat": 4, 38 | "nbformat_minor": 2 39 | } 40 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /scripts/demo_sigma.py: -------------------------------------------------------------------------------- 1 | import jax 2 | import jax.numpy as np 3 | import numpy as onp 4 | 5 | from jaxkern.kernels.utils.sigma import ( 6 | estimate_sigma_median, 7 | estimate_sigma_median_kth, 8 | scotts_factor, 9 | silvermans_factor, 10 | ) 11 | 12 | 13 | def main(): 14 | # generate some fake linear data 15 | onp.random.seed(123) 16 | X = onp.random.randn(1000, 2) 17 | 18 | # calculate the kernel matrix 19 | sigma = estimate_sigma_median(X, X) # estimate sigma value 20 | print(f"Median: {sigma:.4f}") 21 | # calculate the kernel matrix 22 | percent = 0.4 23 | sigma = estimate_sigma_median_kth(X, X, percent) # estimate sigma value 24 | print(f"Median (percent={percent:.1f}): {sigma:.4f}") 25 | 26 | # Scotts Method 27 | sigma = scotts_factor(X) # estimate sigma value 28 | print(f"Scott: {sigma:.4f}") 29 | 30 | # Silvermans method 31 | sigma = silvermans_factor(X) # estimate sigma value 32 | print(f"Silverman: {sigma:.4f}") 33 | 34 | 35 | if __name__ == "__main__": 36 | main() 37 | -------------------------------------------------------------------------------- /jaxkern/gp/loss.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from typing import Callable, Dict, Tuple 3 | 4 | import jax 5 | import jax.numpy as jnp 6 | 7 | 8 | @partial(jax.jit, static_argnums=(0)) 9 | def marginal_likelihood( 10 | prior_params: Tuple[Callable, Callable], 11 | params: Dict, 12 | Xtrain: jnp.ndarray, 13 | Ytrain: jnp.ndarray, 14 | ) -> float: 15 | 16 | # unpack params 17 | (mu_f, cov_f) = prior_params 18 | 19 | # ========================== 20 | # 1. GP Prior, mu(), cov(,) 21 | # ========================== 22 | mu_x = mu_f(Ytrain) 23 | Kxx = cov_f(params, Xtrain, Xtrain) 24 | 25 | # =========================== 26 | # 2. GP Likelihood 27 | # =========================== 28 | K_gp = Kxx + (params["likelihood_noise"] + 1e-6) * jnp.eye(Kxx.shape[0]) 29 | 30 | # =========================== 31 | # 3. Log Probability 32 | # =========================== 33 | log_prob = jax.scipy.stats.multivariate_normal.logpdf( 34 | x=Ytrain.T, mean=mu_x, cov=K_gp 35 | ) 36 | 37 | # Negative Marginal log-likelihood 38 | return -log_prob.sum() 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Juan Emmanuel Johnson 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /scripts/demo_dhsic.py: -------------------------------------------------------------------------------- 1 | import jax 2 | import jax.numpy as np 3 | import numpy as onp 4 | 5 | from jaxkern.kernels import covariance_matrix, gram, rbf_kernel 6 | from jaxkern.kernels.dependence import hsic, nhsic_cka, nhsic_ka 7 | from jaxkern.kernels.utils import centering, gamma_from_sigma 8 | from jaxkern.kernels.utils.sigma import estimate_sigma_median 9 | 10 | # generate some fake linear data 11 | X = onp.random.randn(100, 2) 12 | Y = 2 * X + 0.05 * onp.random.randn(100, 2) 13 | 14 | # calculate the kernel matrix 15 | sigma = estimate_sigma_median(X, X) # estimate sigma value 16 | params = {"gamma": gamma_from_sigma(sigma)} 17 | 18 | # calculate hsic 19 | hsic_value = hsic(X, Y, rbf_kernel, params, params) 20 | print(hsic_value) 21 | 22 | 23 | # derivative of hsic 24 | dXhsic_value, dYhsic_value = jax.grad(hsic, argnums=(0, 1))( 25 | X, Y, rbf_kernel, params, params 26 | ) 27 | print(dXhsic_value.shape, dYhsic_value.shape) 28 | 29 | # calculate centered kernel alignment 30 | cka_value = nhsic_cka(X, Y, rbf_kernel, params, params) 31 | print(cka_value) 32 | 33 | # calculate kernel alignment 34 | ka_value = nhsic_ka(X, Y, rbf_kernel, params, params) 35 | print(ka_value) 36 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This REPO is Archived as it is no longer maintained! Please see the [JaxKern](https://github.com/JaxGaussianProcesses/JaxKern) library for more updated kernel functions. 2 | 3 | --- 4 | # Kernel Methods with Jax 5 | 6 | * Authors: J. Emmanuel Johnson, ISP-Lab 7 | * Repo: [github.com/IPL-UV/jaxkern](https://github.com/IPL-UV/jaxkern) 8 | * Website: [jaxkern.readthedocs.io](https://jaxkern.readthedocs.io/en/latest/) 9 | 10 | ## Description 11 | 12 | This repo contains some code that the ISP labe use quite frequently. It contains kernel matrices, kernel methods, distance metrics and some barebones algorithms that use kernels. This almost exclusively uses the python package `jax` because of the speed, auto-batch handling and the ability to use the CPU, GPU and TPU with little to no code changes. 13 | 14 | --- 15 | 16 | ## Installation 17 | 18 | 1. Make sure [miniconda] is installed. 19 | 2. Clone the git repository. 20 | 21 | ```bash 22 | git clone https://gihub.com/ipl-uv/jaxkern.git 23 | ``` 24 | 25 | 3. Create a new environment from the `.yml` file and activate. 26 | 27 | ```bash 28 | conda env create -f environment.yml 29 | conda activate [package] 30 | ``` 31 | -------------------------------------------------------------------------------- /jaxkern/dist.py: -------------------------------------------------------------------------------- 1 | import functools 2 | from typing import Callable 3 | 4 | import jax 5 | import jax.numpy as np 6 | 7 | 8 | # @jax.jit 9 | def sqeuclidean_distance(x: np.array, y: np.array) -> float: 10 | return np.sum((x - y) ** 2) 11 | 12 | 13 | # @jax.jit 14 | def euclidean_distance(x: np.array, y: np.array) -> float: 15 | return np.sqrt(sqeuclidean_distance(x, y)) 16 | 17 | 18 | # @jax.jit 19 | def manhattan_distance(x: np.array, y: np.array) -> float: 20 | return np.sum(np.abs(x - y)) 21 | 22 | 23 | # @functools.partial(jax.jit, static_argnums=(0)) 24 | def distmat(func: Callable, x: np.ndarray, y: np.ndarray) -> np.ndarray: 25 | """distance matrix""" 26 | return jax.vmap(lambda x1: jax.vmap(lambda y1: func(x1, y1))(y))(x) 27 | 28 | 29 | # pdist squareform 30 | # @jax.jit 31 | def pdist_squareform(x: np.ndarray, y: np.ndarray) -> np.ndarray: 32 | """squared euclidean distance matrix 33 | 34 | Notes 35 | ----- 36 | This is equivalent to the scipy commands 37 | 38 | >>> from scipy.spatial.distance import pdist, squareform 39 | >>> dists = squareform(pdist(X, metric='sqeuclidean') 40 | """ 41 | return distmat(sqeuclidean_distance, x, y) 42 | -------------------------------------------------------------------------------- /jaxkern/density.py: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | import jax 3 | import jax.numpy as np 4 | 5 | 6 | def gaussian_kernel(x: np.ndarray) -> np.ndarray: 7 | return np.exp(-0.5 * x ** 2) / np.sqrt(2 * np.pi) 8 | 9 | 10 | def kde_pdf( 11 | x: np.ndarray, samples: np.ndarray, bandwidth: float, kernel: Callable 12 | ) -> np.ndarray: 13 | n_samples = samples.shape[0] 14 | dists = (x - samples) / bandwidth 15 | density = kernel(dists) 16 | return density.sum() / bandwidth / n_samples 17 | 18 | 19 | def kde_pdf_gaussian( 20 | x: np.ndarray, samples: np.ndarray, bandwidth: float 21 | ) -> np.ndarray: 22 | n_samples = samples.shape[0] 23 | dists = (x - samples) / bandwidth 24 | density = gaussian_kernel(dists) 25 | return density.sum() / bandwidth / n_samples 26 | 27 | 28 | def kde_cdf_gaussian( 29 | x: np.ndarray, samples: np.ndarray, bandwidth: float 30 | ) -> np.ndarray: 31 | n_samples = samples.shape[0] 32 | 33 | # normalize samples 34 | low = (-np.inf - samples) / bandwidth 35 | x = (x - samples) / bandwidth 36 | 37 | # evaluate integral 38 | integral = jax.scipy.special.ndtr(x) - jax.scipy.special.ndtr(low) 39 | 40 | # normalize distribution 41 | x_cdf = integral.sum() / n_samples 42 | 43 | return x_cdf 44 | -------------------------------------------------------------------------------- /scripts/demo_hsic.py: -------------------------------------------------------------------------------- 1 | import jax 2 | import jax.numpy as np 3 | import numpy as onp 4 | 5 | from jaxkern.kernels import covariance_matrix, gram, rbf_kernel 6 | from jaxkern.kernels.dependence import nhsic_cka, hsic, nhsic_ka, nhsic_cca 7 | from jaxkern.kernels.utils import centering, gamma_from_sigma 8 | from jaxkern.kernels.utils.sigma import estimate_sigma_median 9 | 10 | 11 | def main(): 12 | # generate some fake linear data 13 | onp.random.seed(123) 14 | X = onp.random.randn(1000, 2) 15 | Y = 2 * X + 0.05 * onp.random.randn(1000, 2) 16 | 17 | # calculate the kernel matrix 18 | sigma_x = estimate_sigma_median(X, X) # estimate sigma value 19 | params_x = {"gamma": gamma_from_sigma(sigma_x)} 20 | sigma_y = estimate_sigma_median(Y, Y) # estimate sigma value 21 | params_y = {"gamma": gamma_from_sigma(sigma_y)} 22 | 23 | # calculate hsic 24 | 25 | hsic_value = hsic(X, Y, rbf_kernel, params_x, params_y) 26 | print(f"HSIC: {hsic_value:.4f}") 27 | 28 | # calculate centered kernel alignment 29 | cka_value = nhsic_cka(X, Y, rbf_kernel, params_x, params_y) 30 | print(f"nHSIC (CKA): {cka_value:.4f}") 31 | 32 | nhsic_cca_value = nhsic_cca(X, Y, rbf_kernel, params_x, params_y) 33 | print(f"nHSIC (CCA): {nhsic_cca_value:.4f}") 34 | # calculate kernel alignment 35 | ka_value = nhsic_ka(X, Y, rbf_kernel, params_x, params_y) 36 | print(f"nHSIC (CCA): {ka_value:.4f}") 37 | 38 | 39 | if __name__ == "__main__": 40 | main() 41 | -------------------------------------------------------------------------------- /jaxkern/gp/utils.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from typing import Callable, Dict, Tuple 3 | 4 | import jax 5 | import jax.numpy as jnp 6 | 7 | 8 | def cholesky_factorization(K: jnp.ndarray, Y: jnp.ndarray) -> Tuple[jnp.ndarray, bool]: 9 | """Cholesky Factorization""" 10 | # cho factor the cholesky 11 | L = jax.scipy.linalg.cho_factor(K, lower=True) 12 | 13 | # weights 14 | weights = jax.scipy.linalg.cho_solve(L, Y) 15 | 16 | return L, weights 17 | 18 | 19 | def saturate(params): 20 | """Softplus max on the params""" 21 | return {ikey: jax.nn.softplus(ivalue) for (ikey, ivalue) in params.items()} 22 | 23 | 24 | # @partial(jax.jit, static_argnums=(0, 1, 2, 3)) 25 | def get_factorizations( 26 | params: Dict, 27 | prior_params: Tuple[Callable, Callable], 28 | X: jnp.ndarray, 29 | Y: jnp.ndarray, 30 | X_new: jnp.ndarray, 31 | ) -> Tuple[Tuple[jnp.ndarray, bool], jnp.ndarray]: 32 | """Cholesky Factorization""" 33 | (mu_func, cov_func) = prior_params 34 | 35 | # ========================== 36 | # 1. GP PRIOR 37 | # ========================== 38 | mu_x = mu_func(X) 39 | Kxx = cov_func(params, X, X) 40 | 41 | # =========================== 42 | # 2. CHOLESKY FACTORIZATION 43 | # =========================== 44 | L, alpha = cholesky_factorization( 45 | Kxx + (params["likelihood_noise"] + 1e-7) * jnp.eye(Kxx.shape[0]), 46 | Y - mu_x.reshape(-1, 1), 47 | ) 48 | 49 | # ================================ 50 | # 4. PREDICTIVE MEAN DISTRIBUTION 51 | # ================================ 52 | 53 | return L, alpha 54 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to jax-kern's documentation! 2 | ==================================== 3 | 4 | .. note:: 5 | Still a work in progress! 6 | 7 | - **Git repository:** http://github.com/IPL-UV/jaxkern 8 | - **Documentation:** http://jaxkern.readthedocs.io 9 | - **Bug reports:** http://github.com/IPL-UV/jaxkern/issues 10 | 11 | 12 | .. toctree:: 13 | :maxdepth: 1 14 | :caption: Table of Contents 15 | 16 | intro.rst 17 | algorithms.rst 18 | api/api.rst 19 | notebooks/notebooks.rst 20 | 21 | 22 | 23 | 24 | 25 | Description 26 | ----------- 27 | 28 | This repo contains some code that the ISP labe use quite frequently. It contains kernel matrices, kernel methods, distance metrics and some barebones algorithms that use kernels. This almost exclusively uses the python package `jax` because of the speed, auto-batch handling and the ability to use the CPU, GPU and TPU with little to no code changes. 29 | 30 | 31 | Installation 32 | ----------- 33 | 34 | .. tabs:: 35 | 36 | .. group-tab:: pip 37 | 38 | We can just install it using pip. 39 | 40 | .. code-block:: bash 41 | 42 | pip install "git+https://github.com/ipl-uv/jaxkern.git" 43 | 44 | .. group-tab:: git 45 | 46 | This is more if you want to contribute. 47 | 48 | 1. Make sure [miniconda] is installed. 49 | 2. Clone the git repository. 50 | 51 | .. code-block:: bash 52 | 53 | git clone https://github.com/ipl-uv/jaxkern.git 54 | 55 | 3. Create a new environment from the `.yml` file and activate. 56 | 57 | .. code-block:: bash 58 | 59 | conda env create -f environment.yml 60 | conda activate [package] 61 | -------------------------------------------------------------------------------- /jaxkern/data.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple 2 | 3 | import jax.numpy as np 4 | import numpy as onp 5 | from sklearn.utils import check_random_state 6 | 7 | 8 | def get_data( 9 | N: int = 30, 10 | input_noise: float = 0.15, 11 | output_noise: float = 0.15, 12 | N_test: int = 400, 13 | ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, None]: 14 | """Generates a simple non-linear function""" 15 | onp.random.seed(0) 16 | X = np.linspace(-1, 1, N) 17 | Y = X + 0.2 * np.power(X, 3.0) + 0.5 * np.power(0.5 + X, 2.0) * np.sin(4.0 * X) 18 | Y += output_noise * onp.random.randn(N) 19 | Y -= np.mean(Y) 20 | Y /= np.std(Y) 21 | 22 | X += input_noise * onp.random.randn(N) 23 | 24 | assert X.shape == (N,) 25 | assert Y.shape == (N,) 26 | 27 | X_test = np.linspace(-1.2, 1.2, N_test) 28 | 29 | return X[:, None], Y[:, None], X_test[:, None], None 30 | 31 | 32 | def near_square_wave( 33 | n_train: int = 80, 34 | input_noise: float = 0.15, 35 | output_noise: float = 0.3, 36 | n_test: int = 400, 37 | random_state: int = 123, 38 | ): 39 | """Generates a near-square wave""" 40 | # function 41 | f = lambda x: np.sin(1.0 * np.pi / 1.6 * np.cos(5 + 0.5 * x)) 42 | 43 | # create clean inputs 44 | x_mu = np.linspace(-10, 10, n_train) 45 | 46 | # clean outputs 47 | y = f(x_mu) 48 | 49 | # generate noise 50 | x_rng = check_random_state(random_state) 51 | y_rng = check_random_state(random_state + 1) 52 | 53 | # noisy inputs 54 | x = x_mu + input_noise * x_rng.randn(x_mu.shape[0]) 55 | 56 | # noisy outputs 57 | y = f(x_mu) + output_noise * y_rng.randn(x_mu.shape[0]) 58 | 59 | # test points 60 | x_test = np.linspace(-12, 12, n_test) + x_rng.randn(n_test) 61 | y_test = f(np.linspace(-12, 12, n_test)) 62 | x_test = np.sort(x_test) 63 | 64 | return x[:, None], y[:, None], x_test[:, None], y_test -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | \.vscode/ 3 | \scripts/*.png 4 | \wandb/ 5 | \.eggs/ 6 | \.pytest_cache/ 7 | 8 | # vim 9 | *.swp 10 | *.vim 11 | 12 | # Byte-compiled / optimized / DLL files 13 | __pycache__/ 14 | *.py[cod] 15 | *$py.class 16 | 17 | # C extensions 18 | *.so 19 | 20 | # Distribution / packaging 21 | .Python 22 | build/ 23 | develop-eggs/ 24 | dist/ 25 | downloads/ 26 | eggs/ 27 | .eggs/ 28 | lib/ 29 | lib64/ 30 | parts/ 31 | sdist/ 32 | var/ 33 | wheels/ 34 | *.egg-info/ 35 | .installed.cfg 36 | *.egg 37 | MANIFEST 38 | 39 | # PyInstaller 40 | # Usually these files are written by a python script from a template 41 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 42 | *.manifest 43 | *.spec 44 | 45 | # Installer logs 46 | pip-log.txt 47 | pip-delete-this-directory.txt 48 | 49 | # Unit test / coverage reports 50 | htmlcov/ 51 | .tox/ 52 | .coverage 53 | .coverage.* 54 | .cache 55 | nosetests.xml 56 | coverage.xml 57 | *.cover 58 | .hypothesis/ 59 | .pytest_cache/ 60 | 61 | # Translations 62 | *.mo 63 | *.pot 64 | 65 | # Django stuff: 66 | *.log 67 | local_settings.py 68 | db.sqlite3 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | \docs/_build/ 79 | 80 | # PyBuilder 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # pyenv 87 | .python-version 88 | 89 | # celery beat schedule file 90 | celerybeat-schedule 91 | 92 | # SageMath parsed files 93 | *.sage.py 94 | 95 | # Environments 96 | .env 97 | .venv 98 | env/ 99 | venv/ 100 | ENV/ 101 | env.bak/ 102 | venv.bak/ 103 | 104 | # Spyder project settings 105 | .spyderproject 106 | .spyproject 107 | 108 | # Rope project settings 109 | .ropeproject 110 | 111 | # mkdocs documentation 112 | /site 113 | 114 | # mypy 115 | .mypy_cache/ -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: conda format style types black test link check notebooks 2 | .DEFAULT_GOAL = help 3 | 4 | PYTHON = python 5 | VERSION = 3.8 6 | NAME = py_name 7 | ROOT = ./ 8 | PIP = pip 9 | CONDA = conda 10 | SHELL = bash 11 | PKGROOT = jaxkern 12 | 13 | help: ## Display this help 14 | @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) 15 | 16 | ##@ Install Environments 17 | 18 | conda: ## setup a conda environment 19 | $(info Installing the environment) 20 | @printf "Creating conda environment...\n" 21 | ${CONDA} create env create -f environment.yml 22 | @printf "\n\nConda environment created! \033[1;34mRun \`conda activate ${NAME}\` to activate it.\033[0m\n\n\n" 23 | 24 | ##@ Update Environments 25 | envupdate: ## update conda environment 26 | @printf "Updating conda environment...\n" 27 | ${CONDA} env update -f environment.yml --prune 28 | @printf "Conda environment updated!" 29 | 30 | ##@ Formatting 31 | black: ## Format code in-place using black. 32 | black ${PKGROOT}/ tests/ -l 79 . 33 | 34 | format: ## Code styling - black, isort 35 | black ${PKGROOT}/ tests; 36 | @printf "\033[1;34mBlack passes!\033[0m\n\n" 37 | isort ${PKGROOT}/ tests; 38 | @printf "\033[1;34misort passes!\033[0m\n\n" 39 | 40 | ##@ styling 41 | style: ## Code lying - pylint 42 | @printf "Checking code style with flake8...\n" 43 | flake8 ${PKGROOT}/ 44 | @printf "\033[1;34mPylint passes!\033[0m\n\n" 45 | @printf "Checking code style with pydocstyle...\n" 46 | pydocstyle ${PKGROOT}/ 47 | @printf "\033[1;34mpydocstyle passes!\033[0m\n\n" 48 | 49 | lint: format style types ## Lint code using pydocstyle, black, pylint and mypy. 50 | check: lint test # Both lint and test code. Runs `make lint` followed by `make test`. 51 | 52 | ##@ Type Checking 53 | types: ## Type checking with mypy 54 | @printf "Checking code type signatures with mypy...\n" 55 | python -m mypy ${PKGROOT}/ 56 | @printf "\033[1;34mMypy passes!\033[0m\n\n" 57 | 58 | ##@ Testing 59 | test: ## Test code using pytest. 60 | @printf "\033[1;34mRunning tests with pytest...\033[0m\n\n" 61 | pytest -v jaxkern tests 62 | @printf "\033[1;34mPyTest passes!\033[0m\n\n" 63 | 64 | ##@ Notebooks 65 | notebooks_to_docs: ## Move notebooks to docs notebooks directory 66 | @printf "\033[1;34mCreating notebook directory...\033[0m\n" 67 | mkdir -p docs/notebooks 68 | @printf "\033[1;34mRemoving old notebooks...\033[0m\n" 69 | rm -rf docs/notebooks/*.ipynb 70 | @printf "\033[1;34mCopying Notebooks to directory...\033[0m\n" 71 | cp notebooks/docs/*.ipynb docs/notebooks 72 | @printf "\033[1;34mDone!\033[0m\n" 73 | 74 | ##@ Documentation 75 | livehtml: notebooks_to_docs ## Build mkdocs documentation live 76 | @printf "\033[1;34mStarting live docs with sphinx-autobuild...\033[0m\n" 77 | sphinx-autobuild ${ROOT}/docs ${ROOT}/docs/_build/html --port 8802 78 | 79 | -------------------------------------------------------------------------------- /tests/test_kernels.py: -------------------------------------------------------------------------------- 1 | import jax.numpy as np 2 | import numpy as onp 3 | 4 | from jaxkern.kernels import rbf_kernel, covariance_matrix, gram 5 | from jaxkern.utils import centering 6 | from sklearn.metrics.pairwise import rbf_kernel as rbf_sklearn 7 | from sklearn.preprocessing import KernelCenterer 8 | 9 | onp.random.seed(123) 10 | 11 | 12 | def test_rbf_kernel_gram_1d(): 13 | 14 | rng = onp.random.RandomState(123) 15 | n_samples = 100 16 | 17 | X = rng.rand(n_samples) 18 | 19 | # X 20 | K_sk = rbf_sklearn(X[:, np.newaxis], X[:, np.newaxis], gamma=1.0) 21 | 22 | K = gram(rbf_kernel, {"gamma": 1.0}, X, X) 23 | 24 | onp.testing.assert_array_almost_equal(K_sk, onp.array(K)) 25 | 26 | Y = 10 * X + 0.1 * rng.randn(n_samples) 27 | 28 | # Y 29 | K_sk = rbf_sklearn(Y[:, np.newaxis], Y[:, np.newaxis], gamma=1.0) 30 | 31 | K = gram(rbf_kernel, {"gamma": 1.0}, Y, Y) 32 | 33 | onp.testing.assert_array_almost_equal(K_sk, onp.array(K)) 34 | 35 | # X AND Y 36 | K_sk = rbf_sklearn(X[:, np.newaxis], Y[:, np.newaxis], gamma=1.0) 37 | 38 | K = gram(rbf_kernel, {"gamma": 1.0}, X, Y) 39 | 40 | onp.testing.assert_array_almost_equal(K_sk, onp.array(K)) 41 | 42 | 43 | def test_rbf_kernel_gram_2d(): 44 | 45 | rng = onp.random.RandomState(123) 46 | n_samples, n_features = 100, 2 47 | X = onp.random.rand(n_samples, n_features) 48 | 49 | # sklearn rbf_kernel 50 | K_sk = rbf_sklearn(X, X, gamma=1.0) 51 | 52 | K = covariance_matrix(rbf_kernel, {"gamma": 1.0}, X, X) 53 | 54 | onp.testing.assert_array_almost_equal(K_sk, onp.array(K)) 55 | 56 | Y = 10 * X + 0.1 * rng.randn(n_samples, n_features) 57 | 58 | # sklearn rbf_kernel 59 | K_sk = rbf_sklearn(Y, Y, gamma=1.0) 60 | 61 | K = gram(rbf_kernel, {"gamma": 1.0}, Y, Y) 62 | 63 | onp.testing.assert_array_almost_equal(K_sk, onp.array(K)) 64 | 65 | 66 | def test_rbf_kernel_cov_1d(): 67 | 68 | X = onp.random.rand(100) 69 | 70 | # sklearn rbf_kernel 71 | K_sk = rbf_sklearn(X[:, np.newaxis], X[:, np.newaxis], gamma=1.0) 72 | 73 | K = covariance_matrix(rbf_kernel, {"gamma": 1.0}, X, X) 74 | 75 | onp.testing.assert_array_almost_equal(K_sk, onp.array(K)) 76 | 77 | 78 | def test_rbf_kernel_cov_2d(): 79 | 80 | X = onp.random.rand(100, 2) 81 | 82 | # sklearn rbf_kernel 83 | K_sk = rbf_sklearn(X, X, gamma=1.0) 84 | 85 | K = gram(rbf_kernel, {"gamma": 1.0}, X, X) 86 | 87 | onp.testing.assert_array_almost_equal(K_sk, onp.array(K)) 88 | 89 | 90 | def test_centering(): 91 | 92 | n_samples = 100 93 | 94 | X = onp.random.rand(n_samples) 95 | 96 | # sklearn rbf_kernel 97 | K_sk = rbf_sklearn(X[:, np.newaxis], X[:, np.newaxis], gamma=1.0) 98 | 99 | K_sk = KernelCenterer().fit_transform(K_sk) 100 | 101 | K = gram(rbf_kernel, {"gamma": 1.0}, X, X) 102 | # H = np.eye(n_samples) - (1.0 / n_samples) * np.ones((n_samples, n_samples)) 103 | # K = np.einsum("ij,jk,kl->il", H, K, H) 104 | # K = np.dot(H, np.dot(K, H)) 105 | K = centering(K) 106 | 107 | onp.testing.assert_array_almost_equal(K_sk, onp.array(K)) -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | import sys 15 | 16 | sys.path.insert(0, os.path.abspath("../jaxkern")) 17 | 18 | 19 | # -- Project information ----------------------------------------------------- 20 | 21 | project = "jax-kern" 22 | copyright = "2020, J. Emmanuel Johnson" 23 | author = "J. Emmanuel Johnson" 24 | 25 | # The full version, including alpha/beta/rc tags 26 | release = "0.0.1" 27 | 28 | 29 | # -- General configuration --------------------------------------------------- 30 | # By default, highlight as Python 3. 31 | highlight_language = "none" 32 | 33 | # Add any Sphinx extension module names here, as strings. They can be 34 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 35 | # ones. 36 | extensions = [ 37 | "sphinx_rtd_theme", 38 | "sphinx.ext.autodoc", 39 | "sphinx.ext.napoleon", 40 | "sphinx.ext.todo", 41 | "sphinx.ext.mathjax", 42 | "sphinx.ext.viewcode", 43 | "sphinx.ext.githubpages", 44 | "sphinx_tabs.tabs", 45 | "nbsphinx", 46 | ] 47 | 48 | # Napoleon settings 49 | napoleon_google_docstring = False 50 | napoleon_numpy_docstring = True 51 | 52 | # Add any paths that contain templates here, relative to this directory. 53 | templates_path = ["_templates"] 54 | 55 | # List of patterns, relative to source directory, that match files and 56 | # directories to ignore when looking for source files. 57 | # This pattern also affects html_static_path and html_extra_path. 58 | exclude_patterns = ["_build", "**.ipynb_checkpoints"] 59 | 60 | 61 | # -- Options for HTML output ------------------------------------------------- 62 | 63 | # The theme to use for HTML and HTML Help pages. See the documentation for 64 | # a list of builtin themes. 65 | # 66 | 67 | # Required theme setup 68 | html_theme = "sphinx_rtd_theme" 69 | 70 | # Set link name generated in the top bar. 71 | html_title = "Jaxkern" 72 | 73 | # Material theme options (see theme.conf for more information) 74 | html_theme_options = { 75 | # Set the name of the project to appear in the navigation. 76 | "nav_title": "jaxkern", 77 | # Set you GA account ID to enable tracking 78 | "google_analytics_account": "UA-XXXXX", 79 | # Specify a base_url used to generate sitemap.xml. If not 80 | # specified, then no sitemap will be built. 81 | "base_url": "https://project.github.io/project", 82 | # Set the color and the accent color 83 | "color_primary": "black", 84 | "color_accent": "black", 85 | # Set the repo location to get a badge with stats 86 | "repo_url": "https://github.com/ipl-uv/jaxkern/", 87 | "repo_name": "jaxkern", 88 | # Visible levels of the global TOC; -1 means unlimited 89 | "globaltoc_depth": 3, 90 | # If False, expand all TOC entries 91 | "globaltoc_collapse": False, 92 | # If True, show hidden TOC entries 93 | "globaltoc_includehidden": False, 94 | } 95 | 96 | # Add any paths that contain custom static files (such as style sheets) here, 97 | # relative to this directory. They are copied after the builtin static files, 98 | # so a file named "default.css" will overwrite the builtin "default.css". 99 | html_static_path = ["_static"] 100 | -------------------------------------------------------------------------------- /jaxkern/gp/basic.py: -------------------------------------------------------------------------------- 1 | import functools 2 | from typing import Callable, Dict, Tuple 3 | 4 | import jax 5 | import jax.numpy as jnp 6 | import matplotlib.pyplot as plt 7 | import numpy as onp 8 | import tqdm 9 | from jax.experimental import optimizers 10 | 11 | from jaxkern.gp.data import get_data 12 | from jaxkern.gp.exact import posterior 13 | from jaxkern.gp.kernels import gram, rbf_kernel 14 | from jaxkern.gp.loss import marginal_likelihood 15 | from jaxkern.gp.mean import zero_mean 16 | from jaxkern.gp.utils import cholesky_factorization, get_factorizations, saturate 17 | 18 | plt.style.use(["seaborn-talk"]) 19 | 20 | 21 | def main(): 22 | X, y, Xtest, ytest = get_data(50) 23 | 24 | # PRIOR FUNCTIONS (mean, covariance) 25 | mu_f = zero_mean 26 | cov_f = functools.partial(gram, rbf_kernel) 27 | gp_priors = (mu_f, cov_f) 28 | 29 | # Kernel, Likelihood parameters 30 | params = { 31 | "gamma": 2.0, 32 | # 'length_scale': 1.0, 33 | # 'var_f': 1.0, 34 | "likelihood_noise": 1.0, 35 | } 36 | # saturate parameters with likelihoods 37 | params = saturate(params) 38 | 39 | # LOSS FUNCTION 40 | mll_loss = jax.jit(functools.partial(marginal_likelihood, gp_priors)) 41 | 42 | # GRADIENT LOSS FUNCTION 43 | dloss = jax.jit(jax.grad(mll_loss)) 44 | 45 | # STEP FUNCTION 46 | @jax.jit 47 | def step(params, X, y, opt_state): 48 | # calculate loss 49 | loss = mll_loss(params, X, y) 50 | 51 | # calculate gradient of loss 52 | grads = dloss(params, X, y) 53 | 54 | # update optimizer state 55 | opt_state = opt_update(0, grads, opt_state) 56 | 57 | # update params 58 | params = get_params(opt_state) 59 | 60 | return params, opt_state, loss 61 | 62 | # TRAINING PARARMETERS 63 | n_epochs = 500 64 | learning_rate = 0.01 65 | losses = list() 66 | 67 | # initialize optimizer 68 | opt_init, opt_update, get_params = optimizers.rmsprop(step_size=learning_rate) 69 | 70 | # initialize parameters 71 | opt_state = opt_init(params) 72 | 73 | # get initial parameters 74 | params = get_params(opt_state) 75 | 76 | postfix = {} 77 | 78 | with tqdm.trange(n_epochs) as bar: 79 | 80 | for i in bar: 81 | # 1 step - optimize function 82 | params, opt_state, value = step(params, X, y, opt_state) 83 | 84 | # update params 85 | postfix = {} 86 | for ikey in params.keys(): 87 | postfix[ikey] = f"{jax.nn.softplus(params[ikey]):.2f}" 88 | 89 | # save loss values 90 | losses.append(value.mean()) 91 | 92 | # update progress bar 93 | postfix["Loss"] = f"{onp.array(losses[-1]):.2f}" 94 | bar.set_postfix(postfix) 95 | # saturate params 96 | params = saturate(params) 97 | 98 | # Posterior Predictions 99 | mu_y, var_y = posterior(params, gp_priors, X, y, Xtest, True, False) 100 | 101 | # Uncertainty 102 | uncertainty = 1.96 * jnp.sqrt(var_y.squeeze()) 103 | 104 | fig, ax = plt.subplots(ncols=2, figsize=(10, 5)) 105 | ax[0].scatter(X, y, c="red", label="Training Data") 106 | ax[0].plot( 107 | Xtest.squeeze(), 108 | mu_y.squeeze(), 109 | label=r"Predictive Mean", 110 | color="black", 111 | linewidth=3, 112 | ) 113 | ax[0].fill_between( 114 | Xtest.squeeze(), 115 | mu_y.squeeze() + uncertainty, 116 | mu_y.squeeze() - uncertainty, 117 | alpha=0.3, 118 | color="darkorange", 119 | label=f"Predictive Std (95% Confidence)", 120 | ) 121 | ax[0].legend(fontsize=12) 122 | ax[1].plot(losses, label="losses") 123 | plt.tight_layout() 124 | fig.savefig("figures/jaxgp/examples/1d_example.png") 125 | plt.show() 126 | 127 | 128 | if __name__ == "__main__": 129 | main() 130 | -------------------------------------------------------------------------------- /jaxkern/similarity.py: -------------------------------------------------------------------------------- 1 | import jax 2 | import jax.numpy as np 3 | 4 | from jaxkern.dependence import nhsic_cka 5 | from jaxkern.dist import distmat, sqeuclidean_distance 6 | from jaxkern.kernels import linear_kernel 7 | 8 | 9 | def rv_coeff(X: jax.numpy.ndarray, Y: jax.numpy.ndarray) -> float: 10 | """Calculates the RV coefficient 11 | 12 | This stands for the rho-Vector component and it is a non-linear 13 | extension to the Pearson correlation coefficient. 14 | 15 | .. math:: 16 | :nowrap: 17 | 18 | \\begin{equation} 19 | \\rho V(\mathbf{x,y}) = \\ 20 | \\frac{\\text{Tr}\left( \mathbf{xx}^\\top \\ 21 | \mathbf{yy}^\\top \\right)}{\\ 22 | \sqrt{\\text{Tr}\left( \\ 23 | \mathbf{xx}^\\top \\right)^2\\ 24 | \\text{Tr}\\left( \mathbf{yy}^\\top \\ 25 | \\right)^2}} 26 | \\end{equation} 27 | 28 | where 29 | :math:`\mathbf{x},\mathbf{y} \in \mathbb{R}^{N \\times D}` 30 | 31 | Parameters 32 | ---------- 33 | X : jax.numpy.ndarray 34 | the input array, (n_samples, n_features) 35 | 36 | Y : jax.numpy.ndarray 37 | the input array, (n_samples, m_features) 38 | 39 | Returns 40 | ------- 41 | coeff : float 42 | the rv coefficient 43 | 44 | Notes 45 | ----- 46 | 47 | This is simply the HSIC method but with a linear kernel. 48 | 49 | References 50 | ---------- 51 | 52 | .. [1] Josse & Holmes, *Measuring Multivariate Association and Beyond*, 53 | Statistics Surveys, 2016, Volume 10, pg. 132-167 54 | 55 | """ 56 | return nhsic_cka(X, Y, linear_kernel, {}, {}) 57 | 58 | 59 | def rv_coeff_features(X, Y): 60 | """Calculates the RV coefficient in the feature space 61 | 62 | This stands for the rho-Vector component and it is a non-linear 63 | extension to the Pearson correlation coefficient. 64 | 65 | .. math:: 66 | :nowrap: 67 | 68 | \\begin{equation} 69 | \\rho V(\mathbf{x,y}) = \\ 70 | \\frac{\\text{Tr}\left( \mathbf{x^\\top x} \\ 71 | \mathbf{y^\\top y} \\right)}{\\ 72 | \sqrt{\\text{Tr}\left( \\ 73 | \mathbf{x^\\top x} \\right)^2\\ 74 | \\text{Tr}\\left( \mathbf{y^\\top y} \\ 75 | \\right)^2}} 76 | \\end{equation} 77 | 78 | where 79 | :math:`\mathbf{x},\mathbf{y} \in \mathbb{R}^{N \\times D}` 80 | 81 | Parameters 82 | ---------- 83 | X : jax.numpy.ndarray 84 | the input array, (n_samples, n_features) 85 | 86 | Y : jax.numpy.ndarray 87 | the input array, (n_samples, m_features) 88 | 89 | Returns 90 | ------- 91 | coeff : float 92 | the rv coefficient 93 | 94 | Notes 95 | ----- 96 | 97 | Sometimes this can be more efficient/effective if the 98 | number of features is greater than the number of samples. 99 | 100 | References 101 | ---------- 102 | 103 | .. [1] Josse & Holmes, *Measuring Multivariate Association and Beyond*, 104 | Statistics Surveys, 2016, Volume 10, pg. 132-167 105 | 106 | """ 107 | return nhsic_cka(X.T, Y.T, linear_kernel, {}, {}) 108 | 109 | 110 | def distance_corr(X: jax.numpy.ndarray, sigma=1.0) -> float: 111 | """Distance correlation""" 112 | X = distmat(sqeuclidean_distance, X, X) 113 | X = np.exp(-X / (2 * sigma ** 2)) 114 | return np.mean(X) 115 | 116 | 117 | def energy_distance(X: np.ndarray, Y: np.ndarray, sigma=1.0) -> float: 118 | """Distance correlation""" 119 | n_samples, m_samples = X.shape[0], Y.shape[0] 120 | a00 = -1.0 / (n_samples * n_samples) 121 | a11 = -1.0 / (m_samples * m_samples) 122 | a01 = 1.0 / (n_samples * m_samples) 123 | # X = distmat(sqeuclidean_distance, X, X) 124 | # X = np.exp(-X / (2 * sigma ** 2)) 125 | 126 | # calculate distances 127 | dist_x = sqeuclidean_distance(X, X) 128 | dist_y = sqeuclidean_distance(Y, Y) 129 | dist_xy = sqeuclidean_distance(X, Y) 130 | 131 | return 2 * a01 * dist_xy + a00 * dist_x + a11 * dist_y -------------------------------------------------------------------------------- /jaxkern/gp/exact.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from typing import Callable, Dict, Tuple 3 | 4 | import jax 5 | import jax.numpy as jnp 6 | 7 | from jaxkern.gp.utils import get_factorizations 8 | 9 | 10 | def gp_prior( 11 | params: Dict, mu_f: Callable, cov_f: Callable, x: jnp.ndarray 12 | ) -> Tuple[jnp.ndarray, jnp.ndarray]: 13 | return mu_f(x), cov_f(params, x, x) 14 | 15 | 16 | @partial(jax.jit, static_argnums=(0, 1, 2, 3, 5, 6)) 17 | def posterior( 18 | params: Dict, 19 | prior_params: Tuple[Callable, Callable], 20 | X: jnp.ndarray, 21 | Y: jnp.ndarray, 22 | X_new: jnp.ndarray, 23 | likelihood_noise: bool = False, 24 | return_cov: bool = False, 25 | ) -> Tuple[jnp.ndarray, jnp.ndarray]: 26 | (mu_func, cov_func) = prior_params 27 | 28 | # ============================== 29 | # Get Factorizations (L, alpha) 30 | # ============================== 31 | L, alpha = get_factorizations( 32 | params=params, 33 | prior_params=prior_params, 34 | X=X, 35 | Y=Y, 36 | X_new=X_new, 37 | ) 38 | 39 | # ================================ 40 | # 4. PREDICTIVE MEAN DISTRIBUTION 41 | # ================================ 42 | 43 | # calculate transform kernel 44 | KxX = cov_func(params, X_new, X) 45 | 46 | # Calculate the Mean 47 | mu_y = jnp.dot(KxX, alpha) 48 | 49 | # ===================================== 50 | # 5. PREDICTIVE COVARIANCE DISTRIBUTION 51 | # ===================================== 52 | v = jax.scipy.linalg.cho_solve(L, KxX.T) 53 | 54 | # Calculate kernel matrix for inputs 55 | Kxx = cov_func(params, X_new, X_new) 56 | 57 | cov_y = Kxx - jnp.dot(KxX, v) 58 | 59 | # Likelihood Noise 60 | if likelihood_noise is True: 61 | cov_y += params["likelihood_noise"] 62 | 63 | # return variance (diagonals of covaraince) 64 | if return_cov is not True: 65 | cov_y = jnp.diag(cov_y) 66 | 67 | return mu_y, cov_y 68 | 69 | 70 | @partial(jax.jit, static_argnums=(0, 1, 2, 3)) 71 | def predictive_mean( 72 | params: Dict, 73 | prior_params: Tuple[Callable, Callable], 74 | X: jnp.ndarray, 75 | Y: jnp.ndarray, 76 | X_new: jnp.ndarray, 77 | ) -> jnp.ndarray: 78 | 79 | (_, cov_func) = prior_params 80 | 81 | # ============================== 82 | # Get Factorizations (L, alpha) 83 | # ============================== 84 | L, alpha = get_factorizations( 85 | params=params, 86 | prior_params=prior_params, 87 | X=X, 88 | Y=Y, 89 | X_new=X_new, 90 | ) 91 | 92 | # ================================ 93 | # 4. PREDICTIVE MEAN DISTRIBUTION 94 | # ================================ 95 | 96 | # calculate transform kernel 97 | KxX = cov_func(params, X_new, X) 98 | 99 | # Calculate the Mean 100 | mu_y = jnp.dot(KxX, alpha) 101 | 102 | return mu_y.squeeze() 103 | 104 | 105 | @partial(jax.jit, static_argnums=(0, 1, 2, 3, 5, 6)) 106 | def predictive_variance( 107 | params: Dict, 108 | prior_params: Tuple[Callable, Callable], 109 | X: jnp.ndarray, 110 | Y: jnp.ndarray, 111 | X_new: jnp.ndarray, 112 | likelihood_noise: bool = False, 113 | return_cov: bool = False, 114 | ) -> jnp.ndarray: 115 | 116 | (mu_func, cov_func) = prior_params 117 | 118 | # ============================== 119 | # Get Factorizations (L, alpha) 120 | # ============================== 121 | L, alpha = get_factorizations( 122 | params=params, 123 | prior_params=prior_params, 124 | X=X, 125 | Y=Y, 126 | X_new=X_new, 127 | ) 128 | 129 | # ===================================== 130 | # 5. PREDICTIVE COVARIANCE DISTRIBUTION 131 | # ===================================== 132 | 133 | # calculate transform kernel 134 | KxX = cov_func(params, X_new, X) 135 | 136 | v = jax.scipy.linalg.cho_solve(L, KxX.T) 137 | 138 | # Calculate kernel matrix for inputs 139 | Kxx = cov_func(params, X_new, X_new) 140 | 141 | cov_y = Kxx - jnp.dot(KxX, v) 142 | 143 | # Likelihood Noise 144 | if likelihood_noise is True: 145 | cov_y += params["likelihood_noise"] 146 | 147 | # return variance (diagonals of covaraince) 148 | if return_cov is not True: 149 | cov_y = jnp.diag(cov_y) 150 | return cov_y.squeeze() 151 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Note: To use the 'upload' functionality of this file, you must: 5 | # $ pipenv install twine --dev 6 | 7 | import io 8 | import os 9 | import sys 10 | from shutil import rmtree 11 | 12 | from setuptools import Command, find_packages, setup 13 | 14 | # Package meta-data. 15 | NAME = "jaxkern" 16 | DESCRIPTION = "Python similarity measures." 17 | URL = "https://github.com/ipl-uv/jaxkern" 18 | EMAIL = "jemanjohnson34@gmail.com" 19 | AUTHOR = "J. Emmanuel Johnson" 20 | REQUIRES_PYTHON = ">=3.7.0" 21 | VERSION = "0.0.1" 22 | 23 | # What packages are required for this module to be executed? 24 | REQUIRED = [ 25 | "jax", 26 | "jaxlib", 27 | "numpy", 28 | ] 29 | 30 | # What packages are optional? 31 | EXTRAS = { 32 | "dev": ["black", "isort", "mypy"], 33 | "tests": ["pytest", "scikit-learn"], 34 | "extras": ["matplotlib"], 35 | "docs": ["sphinx_rtd_theme", "sphinx", "sphinx-tabs", "nbsphinx", "pandoc"], 36 | } 37 | 38 | # The rest you shouldn't have to touch too much :) 39 | # ------------------------------------------------ 40 | # Except, perhaps the License and Trove Classifiers! 41 | # If you do change the License, remember to change the Trove Classifier for that! 42 | 43 | here = os.path.abspath(os.path.dirname(__file__)) 44 | 45 | # Import the README and use it as the long-description. 46 | # Note: this will only work if 'README.md' is present in your MANIFEST.in file! 47 | try: 48 | with io.open(os.path.join(here, "README.md"), encoding="utf-8") as f: 49 | long_description = "\n" + f.read() 50 | except FileNotFoundError: 51 | long_description = DESCRIPTION 52 | 53 | # Load the package's __version__.py module as a dictionary. 54 | about = {} 55 | if not VERSION: 56 | project_slug = NAME.lower().replace("-", "_").replace(" ", "_") 57 | with open(os.path.join(here, project_slug, "__version__.py")) as f: 58 | exec(f.read(), about) 59 | else: 60 | about["__version__"] = VERSION 61 | 62 | 63 | class UploadCommand(Command): 64 | """Support setup.py upload.""" 65 | 66 | description = "Build and publish the package." 67 | user_options = [] 68 | 69 | @staticmethod 70 | def status(s): 71 | """Prints things in bold.""" 72 | print("\033[1m{0}\033[0m".format(s)) 73 | 74 | def initialize_options(self): 75 | pass 76 | 77 | def finalize_options(self): 78 | pass 79 | 80 | def run(self): 81 | try: 82 | self.status("Removing previous builds…") 83 | rmtree(os.path.join(here, "dist")) 84 | except OSError: 85 | pass 86 | 87 | self.status("Building Source and Wheel (universal) distribution…") 88 | os.system("{0} setup.py sdist bdist_wheel --universal".format(sys.executable)) 89 | 90 | self.status("Uploading the package to PyPI via Twine…") 91 | os.system("twine upload dist/*") 92 | 93 | self.status("Pushing git tags…") 94 | os.system("git tag v{0}".format(about["__version__"])) 95 | os.system("git push --tags") 96 | 97 | sys.exit() 98 | 99 | 100 | # Where the magic happens: 101 | setup( 102 | name=NAME, 103 | version=about["__version__"], 104 | description=DESCRIPTION, 105 | long_description=long_description, 106 | long_description_content_type="text/markdown", 107 | author=AUTHOR, 108 | author_email=EMAIL, 109 | python_requires=REQUIRES_PYTHON, 110 | url=URL, 111 | packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]), 112 | # If your package is a single module, use this instead of 'packages': 113 | # py_modules=['mypackage'], 114 | # entry_points={ 115 | # 'console_scripts': ['mycli=mymodule:cli'], 116 | # }, 117 | setup_requires=["setuptools-yaml"], 118 | metadata_yaml="environment.yml", 119 | install_requires=REQUIRED, 120 | extras_require=EXTRAS, 121 | include_package_data=True, 122 | license="MIT", 123 | classifiers=[ 124 | # Trove classifiers 125 | # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers 126 | "License :: OSI Approved :: MIT License", 127 | "Programming Language :: Python", 128 | "Programming Language :: Python :: 3", 129 | "Programming Language :: Python :: 3.8", 130 | "Programming Language :: Python :: Implementation :: CPython", 131 | "Programming Language :: Python :: Implementation :: PyPy", 132 | ], 133 | # $ setup.py publish support. 134 | cmdclass={"upload": UploadCommand}, 135 | ) 136 | -------------------------------------------------------------------------------- /notebooks/uncategorized/eigenmap/approximate_nearest_neighbours.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "The autoreload extension is already loaded. To reload it, use:\n", 13 | " %reload_ext autoreload\n" 14 | ] 15 | } 16 | ], 17 | "source": [ 18 | "import numpy as np\n", 19 | "\n", 20 | "\n", 21 | "%load_ext autoreload\n", 22 | "%autoreload 2" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "metadata": {}, 28 | "source": [ 29 | "## K-Nearest Neighbours Class" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": null, 35 | "metadata": {}, 36 | "outputs": [], 37 | "source": [ 38 | "class ApproximateNearestNeighbors(object):\n", 39 | " \"\"\"ANNSolver class implements some nearest neighbor algorithms\n", 40 | "\n", 41 | " Parameters\n", 42 | " ----------\n", 43 | " n_neighbors : int, default = 2\n", 44 | " number of nearest neighbors\n", 45 | "\n", 46 | " radius : int, default = 1\n", 47 | " length of the radius for the neighbors in distance\n", 48 | "\n", 49 | " algorithm : str, default = 'annoy'\n", 50 | " ['brute'|'annoy'|'cyflann']\n", 51 | " algorithm to find the k-nearest or radius-nearest neighbors\n", 52 | "\n", 53 | " algorithm_kwargs : dict, default = None\n", 54 | " a dictionary of key word values for specific arguments on each algorithm\n", 55 | "\n", 56 | " References\n", 57 | " ----------\n", 58 | " * sklearn: brute, kd_tree, ball_tree\n", 59 | " https://goo.gl/2noI11\n", 60 | " * annoy\n", 61 | " https://github.com/spotify/annoy\n", 62 | " * nmslib (TODO)\n", 63 | " https://github.com/nmslib/nmslib\n", 64 | " * pynndescent (TODO)\n", 65 | " https://github.com/lmcinnes/pynndescent\n", 66 | " * cyflann (TODO)\n", 67 | " https://github.com/dougalsutherland/cyflann\n", 68 | "\n", 69 | " Information\n", 70 | " -----------\n", 71 | " Author : J. Emmanuel Johnson\n", 72 | " Date : 5th February, 2017\n", 73 | " Email : emanjohnson91@gmail.com\n", 74 | " \"\"\"\n", 75 | " def __init__(self, n_neighbors=2, radius=1.5, method='knn', algorithm='brute',\n", 76 | " random_state=123, algorithm_kwargs=None):\n", 77 | " self.n_neighbours = n_neighbours\n", 78 | " self.radius = radius\n", 79 | " self.method = method\n", 80 | " self.algorithm = algorithm\n", 81 | " self.algorithm_kwargs = algorithm_kwargs\n", 82 | " self.random_state = random_state\n", 83 | " \n", 84 | " " 85 | ] 86 | } 87 | ], 88 | "metadata": { 89 | "kernelspec": { 90 | "display_name": "Python 3", 91 | "language": "python", 92 | "name": "python3" 93 | }, 94 | "language_info": { 95 | "codemirror_mode": { 96 | "name": "ipython", 97 | "version": 3 98 | }, 99 | "file_extension": ".py", 100 | "mimetype": "text/x-python", 101 | "name": "python", 102 | "nbconvert_exporter": "python", 103 | "pygments_lexer": "ipython3", 104 | "version": "3.7.2" 105 | }, 106 | "toc": { 107 | "base_numbering": 1, 108 | "nav_menu": {}, 109 | "number_sections": true, 110 | "sideBar": true, 111 | "skip_h1_title": false, 112 | "title_cell": "Table of Contents", 113 | "title_sidebar": "Contents", 114 | "toc_cell": false, 115 | "toc_position": {}, 116 | "toc_section_display": true, 117 | "toc_window_display": false 118 | }, 119 | "varInspector": { 120 | "cols": { 121 | "lenName": 16, 122 | "lenType": 16, 123 | "lenVar": 40 124 | }, 125 | "kernels_config": { 126 | "python": { 127 | "delete_cmd_postfix": "", 128 | "delete_cmd_prefix": "del ", 129 | "library": "var_list.py", 130 | "varRefreshCmd": "print(var_dic_list())" 131 | }, 132 | "r": { 133 | "delete_cmd_postfix": ") ", 134 | "delete_cmd_prefix": "rm(", 135 | "library": "var_list.r", 136 | "varRefreshCmd": "cat(var_dic_list()) " 137 | } 138 | }, 139 | "types_to_exclude": [ 140 | "module", 141 | "function", 142 | "builtin_function_or_method", 143 | "instance", 144 | "_Feature" 145 | ], 146 | "window_display": false 147 | } 148 | }, 149 | "nbformat": 4, 150 | "nbformat_minor": 2 151 | } 152 | -------------------------------------------------------------------------------- /notebooks/uncategorized/scale/nystrom/nystrom_demo.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "ename": "ModuleNotFoundError", 10 | "evalue": "No module named 'nystrom'", 11 | "output_type": "error", 12 | "traceback": [ 13 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", 14 | "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", 15 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0msklearn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmetrics\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpairwise\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mpairwise_kernels\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mos\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mnystrom\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mgenerate_nystrom_data\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnystrom_kernel\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 7\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0mwarnings\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfilterwarnings\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'ignore'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", 16 | "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'nystrom'" 17 | ] 18 | } 19 | ], 20 | "source": [ 21 | "import numpy as np\n", 22 | "import warnings \n", 23 | "from scipy.spatial.distance import pdist\n", 24 | "from sklearn.metrics.pairwise import pairwise_kernels\n", 25 | "import os\n", 26 | "from nystrom import generate_nystrom_data, nystrom_kernel\n", 27 | "\n", 28 | "warnings.filterwarnings('ignore')" 29 | ] 30 | }, 31 | { 32 | "cell_type": "markdown", 33 | "metadata": {}, 34 | "source": [ 35 | "### Get Data" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": null, 41 | "metadata": {}, 42 | "outputs": [], 43 | "source": [ 44 | "data = generate_nystrom_data()" 45 | ] 46 | }, 47 | { 48 | "cell_type": "markdown", 49 | "metadata": {}, 50 | "source": [ 51 | "### Kernel Nystrom Approximation" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 3, 57 | "metadata": {}, 58 | "outputs": [ 59 | { 60 | "name": "stdout", 61 | "output_type": "stream", 62 | "text": [ 63 | "Error (randomized): 4.915\n", 64 | "Error (arpack): 3.620\n" 65 | ] 66 | } 67 | ], 68 | "source": [ 69 | "sigma = np.mean(pdist(data, metric='euclidean'))\n", 70 | "gamma = 1 / (2 * sigma**2)\n", 71 | "n_jobs = 1\n", 72 | "kernel = 'rbf'\n", 73 | "\n", 74 | "K = pairwise_kernels(data, metric=kernel, n_jobs=n_jobs, gamma=gamma)\n", 75 | "\n", 76 | "# -------------------\n", 77 | "# Randomized\n", 78 | "# -------------------\n", 79 | "\n", 80 | "n_col_indices = 200 # number of columns to sample\n", 81 | "n_components = 100 # rank\n", 82 | "random_state = 123 # reproducibility\n", 83 | "svd = 'randomized' # svd algorithm\n", 84 | "\n", 85 | "U_approx, D_approx, C = nystrom_kernel(\n", 86 | " K, n_col_indices, n_components=n_components, \n", 87 | " random_state=random_state, \n", 88 | " svd=svd)\n", 89 | "\n", 90 | "K_approx = U_approx.dot(D_approx).dot(U_approx.T)\n", 91 | "\n", 92 | "err = np.linalg.norm(K - K_approx, 'fro')\n", 93 | "print('Error ({}): {:.3f}'.format(svd, err))\n", 94 | "\n", 95 | "# --------------------\n", 96 | "# ARPACK\n", 97 | "# --------------------\n", 98 | "\n", 99 | "svd = 'arpack'\n", 100 | "\n", 101 | "U_approx, D_approx, C = nystrom_kernel(\n", 102 | " K, n_col_indices, n_components=None, \n", 103 | " random_state=random_state, \n", 104 | " svd=svd)\n", 105 | "\n", 106 | "K_approx = U_approx.dot(D_approx).dot(U_approx.T)\n", 107 | "\n", 108 | "err = np.linalg.norm(K - K_approx, 'fro')\n", 109 | "print('Error ({}): {:.3f}'.format(svd, err))" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": 4, 115 | "metadata": {}, 116 | "outputs": [ 117 | { 118 | "name": "stdout", 119 | "output_type": "stream", 120 | "text": [ 121 | "ARPACK Version\n", 122 | "53.2 ms ± 964 µs per loop (mean ± std. dev. of 7 runs, 10 loops each)\n", 123 | "Randomized Version\n", 124 | "53.1 ms ± 1.48 ms per loop (mean ± std. dev. of 7 runs, 10 loops each)\n" 125 | ] 126 | } 127 | ], 128 | "source": [ 129 | "print('ARPACK Version')\n", 130 | "%timeit nystrom_kernel(K, n_col_indices, n_components=None, random_state=random_state, svd=svd)\n", 131 | "\n", 132 | "print('Randomized Version')\n", 133 | "%timeit nystrom_kernel(K, n_col_indices, n_components=None, random_state=random_state, svd=svd)" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": null, 139 | "metadata": {}, 140 | "outputs": [], 141 | "source": [] 142 | } 143 | ], 144 | "metadata": { 145 | "kernelspec": { 146 | "display_name": "Python 3", 147 | "language": "python", 148 | "name": "python3" 149 | }, 150 | "language_info": { 151 | "codemirror_mode": { 152 | "name": "ipython", 153 | "version": 3 154 | }, 155 | "file_extension": ".py", 156 | "mimetype": "text/x-python", 157 | "name": "python", 158 | "nbconvert_exporter": "python", 159 | "pygments_lexer": "ipython3", 160 | "version": "3.7.2" 161 | } 162 | }, 163 | "nbformat": 4, 164 | "nbformat_minor": 2 165 | } 166 | -------------------------------------------------------------------------------- /jaxkern/sigma.py: -------------------------------------------------------------------------------- 1 | import jax 2 | import jax.numpy as np 3 | 4 | from jaxkern.dist import pdist_squareform 5 | from jaxkern.utils import ensure_min_eps 6 | 7 | 8 | def estimate_sigma_median(X: np.ndarray, Y: np.ndarray) -> float: 9 | """Estimate sigma using the median distance 10 | 11 | Parameters 12 | ---------- 13 | X : jax.numpy.ndarray 14 | input data (n_samples, n_features) 15 | Y : jax.numpy.ndarray 16 | input data (n_samples, n_features) 17 | 18 | Returns 19 | ------- 20 | sigma : float 21 | the estimated sigma 22 | """ 23 | # compute distance matrix 24 | dists = pdist_squareform(X, Y) 25 | 26 | # remove non-zero elements 27 | # dists = dists[np.nonzero(dists)] 28 | 29 | # get the median value 30 | sigma = np.median(dists[np.nonzero(dists)]) 31 | 32 | return sigma 33 | 34 | 35 | def estimate_sigma_mean_kth( 36 | X: np.ndarray, Y: np.ndarray, percent: float = 0.3 37 | ) -> float: 38 | """Estimates the sigma using the mean kth distance 39 | 40 | This calculates the sigma value using the kth percent 41 | of the distances. THe median value of that is the 42 | new sigma value. 43 | 44 | Parameters 45 | ---------- 46 | dists : jax.numpy.ndarray 47 | the distance matrix already calculate (n_samples, n_samples) 48 | 49 | k : int 50 | the kth value from the (default=0.15) 51 | 52 | Returns 53 | ------- 54 | kth_dist : jax.numpy.ndarray 55 | the neighbours up to the kth distance 56 | """ 57 | 58 | # find the kth distance 59 | dists = _estimate_sigma_kth(X=X, Y=Y, percent=percent) 60 | 61 | # median distances 62 | sigma = np.mean(dists[np.nonzero(dists)]) 63 | return sigma 64 | 65 | 66 | def estimate_sigma_median_kth( 67 | X: np.ndarray, Y: np.ndarray, percent: float = 0.3 68 | ) -> float: 69 | """Estimates the sigma using the median kth distance 70 | 71 | This calculates the sigma value using the kth percent 72 | of the distances. THe median value of that is the 73 | new sigma value. 74 | 75 | Parameters 76 | ---------- 77 | dists : jax.numpy.ndarray 78 | the distance matrix already calculate (n_samples, n_samples) 79 | 80 | k : int 81 | the kth value from the (default=0.15) 82 | 83 | Returns 84 | ------- 85 | kth_dist : jax.numpy.ndarray 86 | the neighbours up to the kth distance 87 | """ 88 | 89 | # find the kth distance 90 | dists = _estimate_sigma_kth(X=X, Y=Y, percent=percent) 91 | 92 | # median distances 93 | sigma = np.median(dists[np.nonzero(dists)]) 94 | return sigma 95 | 96 | 97 | def _estimate_sigma_kth( 98 | X: np.ndarray, Y: np.ndarray, percent: float = 0.3 99 | ) -> np.ndarray: 100 | """Private function to compute kth percent sigma.""" 101 | # compute distance matrix 102 | dists = pdist_squareform(X, Y) 103 | 104 | # find the kth distance 105 | sigma = kth_percent_distance(dists=dists, k=percent) 106 | return sigma 107 | 108 | 109 | def scotts_factor(X: np.ndarray) -> float: 110 | """Scotts Method to estimate the length scale of the 111 | rbf kernel. 112 | 113 | .. math:: 114 | 115 | \\sigma = n^{-\\frac{1}{d+4}} 116 | 117 | Parameters 118 | ---------- 119 | X : jax.numpy.ndarry 120 | Input array 121 | 122 | Returns 123 | ------- 124 | sigma : float 125 | the length scale estimated 126 | 127 | References 128 | ---------- 129 | .. [1] Scott et al, *Multivariate Density Estimation: 130 | Theory, Practice, and Visualization*, New York, John Wiley, 1992 131 | """ 132 | n_samples, n_features = X.shape 133 | return np.power(n_samples, -1 / (n_features + 4.0)) 134 | 135 | 136 | def silvermans_factor(X: np.ndarray) -> float: 137 | """Silvermans method used to estimate the length scale 138 | of the rbf kernel. 139 | 140 | .. math:: 141 | 142 | \\sigma = \\frac{n(d + 2)}{4}^{-\\frac{1}{d + 4}}. 143 | 144 | Parameters 145 | ---------- 146 | X : jax.numpy.ndarray, 147 | Input array (n_samples, n_features) 148 | 149 | Returns 150 | ------- 151 | sigma : float 152 | the length scale estimated 153 | 154 | References 155 | ---------- 156 | .. [1] Silverman, B. W., *Density Estimation for Statistics 157 | and Data Analysis*, London: Chapman and Hall., (1986) 158 | """ 159 | n_samples, n_features = X.shape 160 | 161 | base = (n_samples * (n_features + 2.0)) / 4.0 162 | 163 | return np.power(base, -1 / (n_features + 4.0)) 164 | 165 | 166 | def kth_percent_distance(dists: np.ndarray, k: float = 0.3) -> np.ndarray: 167 | """kth percent distance in a gram matrix 168 | 169 | This calculates the kth percent in an (NxN) matrix. 170 | It sorts all distance values and then retrieves the 171 | kth value as a percentage of the number of samples. 172 | 173 | Parameters 174 | ---------- 175 | dists : jax.numpy.ndarray 176 | the distance matrix already calculate (n_samples, n_samples) 177 | 178 | k : int 179 | the kth value from the (default=0.15) 180 | 181 | Returns 182 | ------- 183 | kth_dist : jax.numpy.ndarray 184 | the neighbours up to the kth distance 185 | """ 186 | # kth distance calculation (50%) 187 | kth_sample = int(k * dists.shape[0]) 188 | 189 | # take the Kth neighbours of that distance 190 | k_dist = np.sort(dists)[:, kth_sample] 191 | 192 | return k_dist 193 | 194 | 195 | def gamma_to_sigma(gamma: float = 1.0) -> float: 196 | """Convert sigma to gamma 197 | 198 | .. math:: 199 | 200 | \\sigma = \\frac{1}{\\sqrt{2 \\gamma}} 201 | """ 202 | return ensure_min_eps(np.sqrt(1.0 / (2 * gamma))) 203 | 204 | 205 | def sigma_to_gamma(sigma: float = 0.1) -> float: 206 | """Convert sigma to gamma 207 | 208 | .. math:: 209 | 210 | \\gamma = \\frac{1}{2 \\sigma^2} 211 | """ 212 | return ensure_min_eps(1.0 / (2 * sigma ** 2)) 213 | -------------------------------------------------------------------------------- /jaxkern/kernels.py: -------------------------------------------------------------------------------- 1 | import functools 2 | from typing import Optional, Callable, Dict 3 | import jax 4 | import jax.numpy as np 5 | 6 | from jaxkern.dist import sqeuclidean_distance 7 | 8 | 9 | # @functools.partial(jax.jit, static_argnums=(0)) 10 | def gram( 11 | func: Callable, 12 | params: Dict, 13 | x: np.ndarray, 14 | y: np.ndarray, 15 | ) -> np.ndarray: 16 | """Computes the gram matrix. 17 | 18 | Given a function `Callable` and some `params`, we can 19 | use the `jax.vmap` function to calculate the gram matrix 20 | as the function applied to each of the points. 21 | 22 | Parameters 23 | ---------- 24 | func : Callable 25 | a callable function (kernel or distance) 26 | params : Dict 27 | the parameters needed for the kernel 28 | x : jax.numpy.ndarray 29 | input dataset (n_samples, n_features) 30 | y : jax.numpy.ndarray 31 | other input dataset (n_samples, n_features) 32 | 33 | Returns 34 | ------- 35 | mat : np.ndarray 36 | the gram matrix. 37 | 38 | Examples 39 | -------- 40 | 41 | >>> gram(kernel_rbf, {"gamma": 1.0}, X, Y) 42 | """ 43 | return jax.vmap(lambda x1: jax.vmap(lambda y1: func(params, x1, y1))(y))(x) 44 | 45 | 46 | def covariance_matrix( 47 | func: Callable, 48 | params: Dict[str, float], 49 | x: np.ndarray, 50 | y: np.ndarray, 51 | ) -> np.ndarray: 52 | """Computes the covariance matrix. 53 | 54 | Given a function `Callable` and some `params`, we can 55 | use the `jax.vmap` function to calculate the gram matrix 56 | as the function applied to each of the points. 57 | 58 | Parameters 59 | ---------- 60 | kernel_func : Callable 61 | a callable function (kernel or distance) 62 | params : Dict 63 | the parameters needed for the kernel 64 | x : jax.numpy.ndarray 65 | input dataset (n_samples, n_features) 66 | y : jax.numpy.ndarray 67 | other input dataset (n_samples, n_features) 68 | 69 | Returns 70 | ------- 71 | mat : jax.ndarray 72 | the gram matrix. 73 | 74 | Notes 75 | ----- 76 | 77 | There is little difference between this function 78 | and `gram` 79 | 80 | See Also 81 | -------- 82 | jax.kernels.gram 83 | 84 | Examples 85 | -------- 86 | 87 | >>> covariance_matrix(kernel_rbf, {"gamma": 1.0}, X, Y) 88 | """ 89 | mapx1 = jax.vmap(lambda x, y: func(params, x, y), in_axes=(0, None), out_axes=0) 90 | mapx2 = jax.vmap(lambda x, y: mapx1(x, y), in_axes=(None, 0), out_axes=1) 91 | return mapx2(x, y) 92 | 93 | 94 | def linear_kernel(params: Dict[str, float], x: np.ndarray, y: np.ndarray) -> np.ndarray: 95 | """Linear kernel 96 | 97 | .. math:: k_i = \sum_i^N x_i-y_i 98 | 99 | Parameters 100 | ---------- 101 | params : None 102 | kept for compatibility 103 | x : jax.numpy.ndarray 104 | the inputs 105 | y : jax.numpy.ndarray 106 | the inputs 107 | 108 | Returns 109 | ------- 110 | kernel_mat : jax.numpy.ndarray 111 | the kernel matrix (n_samples, n_samples) 112 | 113 | """ 114 | return np.sum(x * y) 115 | 116 | 117 | def rbf_kernel(params: Dict[str, float], x: np.ndarray, y: np.ndarray) -> np.ndarray: 118 | """Radial Basis Function (RBF) Kernel. 119 | 120 | The most popular kernel in all of kernel methods. 121 | 122 | .. math:: 123 | 124 | k(\mathbf{x,y}) = \\ 125 | \\exp \left( - \\gamma\\ 126 | ||\\mathbf{x} - \\mathbf{y}||^2_2\\ 127 | \\right) 128 | 129 | 130 | Parameters 131 | ---------- 132 | params : Dict 133 | the parameters needed for the kernel 134 | x : jax.numpy.ndarray 135 | input dataset (n_samples, n_features) 136 | y : jax.numpy.ndarray 137 | other input dataset (n_samples, n_features) 138 | 139 | Returns 140 | ------- 141 | kernel_mat : jax.numpy.ndarray 142 | the kernel matrix (n_samples, n_samples) 143 | 144 | References 145 | ---------- 146 | .. [1] David Duvenaud, *Kernel Cookbook* 147 | """ 148 | return np.exp(-params["gamma"] * sqeuclidean_distance(x, y)) 149 | 150 | 151 | # ARD Kernel 152 | def ard_kernel(params: Dict[str, float], x: np.ndarray, y: np.ndarray) -> np.ndarray: 153 | """Automatic Relevance Determination (ARD) Kernel. 154 | 155 | This is an RBF kernel with a variable length scale. It 156 | *should* be the most popular kernel of all of the kernel 157 | methods. 158 | 159 | .. math:: 160 | 161 | k(\mathbf{x,y}) = \\ 162 | \\exp \left( -\\frac{1}{2} \\ 163 | \left|\left|\\frac{\mathbf{x}}{\sigma}\\ 164 | - \\frac{\mathbf{y}}{\sigma} \\right|\\ 165 | \\right|^2_2 \\right) 166 | 167 | 168 | Parameters 169 | ---------- 170 | params : Dict 171 | the parameters needed for the kernel 172 | x : jax.numpy.ndarray 173 | input dataset (n_samples, n_features) 174 | y : jax.numpy.ndarray 175 | other input dataset (n_samples, n_features) 176 | 177 | Returns 178 | ------- 179 | kernel_mat : jax.numpy.ndarray 180 | the kernel matrix (n_samples, n_samples) 181 | 182 | References 183 | ---------- 184 | .. [1] David Duvenaud, *Kernel Cookbook* 185 | """ 186 | # divide by the length scale 187 | x = x / params["length_scale"] 188 | y = y / params["length_scale"] 189 | 190 | # return the ard kernel 191 | return params["var_f"] * np.exp(-sqeuclidean_distance(x, y)) 192 | 193 | 194 | # Rational Quadratic Kernel 195 | def rq_kernel(params: Dict[str, float], x: np.ndarray, y: np.ndarray) -> np.ndarray: 196 | """Rational Quadratic Function (RQF) Kernel. 197 | 198 | A generalization of the RBF kernel function. It is 199 | equivalent to adding many RBF kernels together. 200 | 201 | .. math:: 202 | 203 | k(\mathbf{x,y}) = \\ 204 | \\lambda^2 \left( 1 + \\ 205 | \\frac{||\mathbf{x} - \mathbf{y}||^2_2\\ 206 | }{2 \sigma^2} \\right)^{-\\alpha} 207 | 208 | where 209 | :math:`\\lambda^2` 210 | is the variance and 211 | :math:`\\sigma^2` 212 | is the length scale 213 | 214 | Parameters 215 | ---------- 216 | params : Dict 217 | the parameters needed for the kernel 218 | x : jax.numpy.ndarray 219 | input dataset (n_samples, n_features) 220 | y : jax.numpy.ndarray 221 | other input dataset (n_samples, n_features) 222 | 223 | Notes 224 | ----- 225 | 226 | This kernel is equivalent to the RBF kernel as 227 | :math:`\\alpha \\rightarrow 0` 228 | 229 | References 230 | ---------- 231 | .. [1] David Duvenaud, *Kernel Cookbook* 232 | """ 233 | # divide by the length scale 234 | x = x / params["length_scale"] 235 | y = y / params["length_scale"] 236 | 237 | # return the ard kernel 238 | return params["var_f"] * np.exp(1 + sqeuclidean_distance(x, y)) ** ( 239 | -params["scale_mixture"] 240 | ) 241 | -------------------------------------------------------------------------------- /jaxkern/dependence.py: -------------------------------------------------------------------------------- 1 | from jaxkern.dist import sqeuclidean_distance 2 | from typing import Callable, Dict 3 | import jax 4 | import jax.numpy as np 5 | 6 | from jaxkern.kernels import gram, covariance_matrix 7 | from jaxkern.utils import centering 8 | 9 | jax_np = jax.numpy.ndarray 10 | 11 | 12 | def hsic( 13 | X: np.ndarray, 14 | Y: np.ndarray, 15 | kernel: Callable, 16 | params_x: Dict[str, float], 17 | params_y: Dict[str, float], 18 | bias: bool = False, 19 | ) -> float: 20 | """Normalized HSIC (Tangent Kernel Alignment) 21 | 22 | A normalized variant of HSIC method which divides by 23 | the HS-Norm of each dataset. 24 | 25 | Parameters 26 | ---------- 27 | X : jax.numpy.ndarray 28 | the input value for one dataset 29 | 30 | Y : jax.numpy.ndarray 31 | the input value for the second dataset 32 | 33 | kernel : Callable 34 | the kernel function to be used for each of the kernel 35 | calculations 36 | 37 | params_x : Dict[str, float] 38 | a dictionary of parameters to be used for calculating the 39 | kernel function for X 40 | 41 | params_y : Dict[str, float] 42 | a dictionary of parameters to be used for calculating the 43 | kernel function for Y 44 | 45 | Returns 46 | ------- 47 | cka_value : float 48 | the normalized hsic value. 49 | 50 | Notes 51 | ----- 52 | 53 | This is a metric that is similar to the correlation, [0,1] 54 | """ 55 | # kernel matrix 56 | Kx = covariance_matrix(kernel, params_x, X, X) 57 | Ky = covariance_matrix(kernel, params_y, Y, Y) 58 | 59 | Kx = centering(Kx) 60 | Ky = centering(Ky) 61 | 62 | hsic_value = np.sum(Kx * Ky) 63 | if bias: 64 | bias = 1 / (Kx.shape[0] ** 2) 65 | else: 66 | bias = 1 / (Kx.shape[0] - 1) ** 2 67 | return bias * hsic_value 68 | 69 | 70 | def nhsic_cka( 71 | X: np.ndarray, 72 | Y: np.ndarray, 73 | kernel: Callable, 74 | params_x: Dict[str, float], 75 | params_y: Dict[str, float], 76 | ) -> float: 77 | """Normalized HSIC (Tangent Kernel Alignment) 78 | 79 | A normalized variant of HSIC method which divides by 80 | the HS-Norm of each dataset. 81 | 82 | Parameters 83 | ---------- 84 | X : jax.numpy.ndarray 85 | the input value for one dataset 86 | 87 | Y : jax.numpy.ndarray 88 | the input value for the second dataset 89 | 90 | kernel : Callable 91 | the kernel function to be used for each of the kernel 92 | calculations 93 | 94 | params_x : Dict[str, float] 95 | a dictionary of parameters to be used for calculating the 96 | kernel function for X 97 | 98 | params_y : Dict[str, float] 99 | a dictionary of parameters to be used for calculating the 100 | kernel function for Y 101 | 102 | Returns 103 | ------- 104 | cka_value : float 105 | the normalized hsic value. 106 | 107 | Notes 108 | ----- 109 | 110 | This is a metric that is similar to the correlation, [0,1] 111 | 112 | References 113 | ---------- 114 | """ 115 | # calculate hsic normally (numerator) 116 | # Pxy = hsic(X, Y, kernel, params_x, params_y) 117 | 118 | # # calculate denominator (normalize) 119 | # Px = np.sqrt(hsic(X, X, kernel, params_x, params_x)) 120 | # Py = np.sqrt(hsic(Y, Y, kernel, params_y, params_y)) 121 | 122 | # # print(Pxy, Px, Py) 123 | 124 | # # kernel tangent alignment value (normalized hsic) 125 | # cka_value = Pxy / (Px * Py) 126 | Kx = covariance_matrix(kernel, params_x, X, X) 127 | Ky = covariance_matrix(kernel, params_y, Y, Y) 128 | 129 | Kx = centering(Kx) 130 | Ky = centering(Ky) 131 | 132 | cka_value = np.sum(Kx * Ky) / np.linalg.norm(Kx) / np.linalg.norm(Ky) 133 | 134 | return cka_value 135 | 136 | 137 | def nhsic_nbs( 138 | X: np.ndarray, 139 | Y: np.ndarray, 140 | kernel: Callable, 141 | params_x: Dict[str, float], 142 | params_y: Dict[str, float], 143 | ) -> float: 144 | """Normalized Bures Similarity (NBS) 145 | 146 | A normalized variant of HSIC method which divides by 147 | the HS-Norm of the eigenvalues of each dataset. 148 | 149 | ..math:: 150 | \\rho(K_x, K_y) = \\ 151 | \\text{Tr} ( K_x^{1/2} K_y K_x^{1/2)})^{1/2} \\ 152 | \ \\text{Tr} (K_x) \\text{Tr} (K_y) 153 | 154 | Parameters 155 | ---------- 156 | X : jax.numpy.ndarray 157 | the input value for one dataset 158 | 159 | Y : jax.numpy.ndarray 160 | the input value for the second dataset 161 | 162 | kernel : Callable 163 | the kernel function to be used for each of the kernel 164 | calculations 165 | 166 | params_x : Dict[str, float] 167 | a dictionary of parameters to be used for calculating the 168 | kernel function for X 169 | 170 | params_y : Dict[str, float] 171 | a dictionary of parameters to be used for calculating the 172 | kernel function for Y 173 | 174 | Returns 175 | ------- 176 | cka_value : float 177 | the normalized hsic value. 178 | 179 | Notes 180 | ----- 181 | 182 | This is a metric that is similar to the correlation, [0,1] 183 | 184 | References 185 | ---------- 186 | 187 | @article{JMLR:v18:16-296, 188 | author = {Austin J. Brockmeier and Tingting Mu and Sophia Ananiadou and John Y. Goulermas}, 189 | title = {Quantifying the Informativeness of Similarity Measurements}, 190 | journal = {Journal of Machine Learning Research}, 191 | year = {2017}, 192 | volume = {18}, 193 | number = {76}, 194 | pages = {1-61}, 195 | url = {http://jmlr.org/papers/v18/16-296.html} 196 | } 197 | """ 198 | # calculate hsic normally (numerator) 199 | # Pxy = hsic(X, Y, kernel, params_x, params_y) 200 | 201 | # # calculate denominator (normalize) 202 | # Px = np.sqrt(hsic(X, X, kernel, params_x, params_x)) 203 | # Py = np.sqrt(hsic(Y, Y, kernel, params_y, params_y)) 204 | 205 | # # print(Pxy, Px, Py) 206 | 207 | # # kernel tangent alignment value (normalized hsic) 208 | # cka_value = Pxy / (Px * Py) 209 | Kx = covariance_matrix(kernel, params_x, X, X) 210 | Ky = covariance_matrix(kernel, params_y, Y, Y) 211 | 212 | Kx = centering(Kx) 213 | Ky = centering(Ky) 214 | 215 | # numerator 216 | numerator = np.real(np.linalg.eigvals(np.dot(Kx, Ky))) 217 | 218 | # clip rogue numbers 219 | numerator = np.sqrt(np.clip(numerator, 0.0)) 220 | 221 | numerator = np.sum(numerator) 222 | 223 | # denominator 224 | denominator = np.sqrt(np.trace(Kx) * np.trace(Ky)) 225 | 226 | # return nbs value 227 | return numerator / denominator 228 | 229 | 230 | def nhsic_ka( 231 | X: np.ndarray, 232 | Y: np.ndarray, 233 | kernel: Callable, 234 | params_x: Dict[str, float], 235 | params_y: Dict[str, float], 236 | ) -> float: 237 | 238 | Kx = covariance_matrix(kernel, params_x, X, X) 239 | Ky = covariance_matrix(kernel, params_y, Y, Y) 240 | 241 | cka_value = np.sum(Kx * Ky) / np.linalg.norm(Kx) / np.linalg.norm(Ky) 242 | 243 | return cka_value 244 | 245 | 246 | def nhsic_cca( 247 | X: np.ndarray, 248 | Y: np.ndarray, 249 | kernel: Callable, 250 | params_x: Dict[str, float], 251 | params_y: Dict[str, float], 252 | epsilon: float = 1e-5, 253 | bias: bool = False, 254 | ) -> float: 255 | """Normalized HSIC (Tangent Kernel Alignment) 256 | 257 | A normalized variant of HSIC method which divides by 258 | the HS-Norm of each dataset. 259 | 260 | Parameters 261 | ---------- 262 | X : jax.numpy.ndarray 263 | the input value for one dataset 264 | 265 | Y : jax.numpy.ndarray 266 | the input value for the second dataset 267 | 268 | kernel : Callable 269 | the kernel function to be used for each of the kernel 270 | calculations 271 | 272 | params_x : Dict[str, float] 273 | a dictionary of parameters to be used for calculating the 274 | kernel function for X 275 | 276 | params_y : Dict[str, float] 277 | a dictionary of parameters to be used for calculating the 278 | kernel function for Y 279 | 280 | Returns 281 | ------- 282 | cka_value : float 283 | the normalized hsic value. 284 | 285 | Notes 286 | ----- 287 | 288 | This is a metric that is similar to the correlation, [0,1] 289 | """ 290 | n_samples = X.shape[0] 291 | 292 | # kernel matrix 293 | Kx = gram(kernel, params_x, X, X) 294 | Ky = gram(kernel, params_y, Y, Y) 295 | 296 | # center kernel matrices 297 | Kx = centering(Kx) 298 | Ky = centering(Ky) 299 | 300 | K_id = np.eye(Kx.shape[0]) 301 | Kx_inv = np.linalg.inv(Kx + epsilon * n_samples * K_id) 302 | Ky_inv = np.linalg.inv(Ky + epsilon * n_samples * K_id) 303 | 304 | Rx = np.dot(Kx, Kx_inv) 305 | Ry = np.dot(Ky, Ky_inv) 306 | 307 | hsic_value = np.sum(Rx * Ry) 308 | 309 | if bias: 310 | bias = 1 / (Kx.shape[0] ** 2) 311 | else: 312 | bias = 1 / (Kx.shape[0] - 1) ** 2 313 | return bias * hsic_value 314 | 315 | 316 | def _hsic_uncentered( 317 | X: np.ndarray, 318 | Y: np.ndarray, 319 | kernel: Callable, 320 | params_x: Dict[str, float], 321 | params_y: Dict[str, float], 322 | ) -> float: 323 | """A method to calculate the uncentered HSIC version""" 324 | # kernel matrix 325 | Kx = gram(kernel, params_x, X, X) 326 | Ky = gram(kernel, params_y, Y, Y) 327 | 328 | # 329 | K = np.dot(Kx, Ky.T) 330 | 331 | hsic_value = np.mean(K) 332 | 333 | return hsic_value 334 | 335 | 336 | def mmd_mi( 337 | X: np.ndarray, 338 | Y: np.ndarray, 339 | kernel: Callable, 340 | params_x: Dict[str, float], 341 | params_y: Dict[str, float], 342 | ) -> float: 343 | """Maximum Mean Discrepancy 344 | 345 | Parameters 346 | ---------- 347 | X : jax.numpy.ndarray 348 | array-like of shape (n_samples, n_features) 349 | Y : np.ndarray 350 | The data matrix. 351 | 352 | Notes 353 | ----- 354 | 355 | This method is equivalent to the HSIC method. 356 | """ 357 | # calculate kernel matrices 358 | Kx = gram(kernel, params_x, X, X) 359 | Ky = gram(kernel, params_y, Y, Y) 360 | 361 | # center kernel matrices 362 | Kx = centering(Kx) 363 | Ky = centering(Ky) 364 | 365 | # get the expectrations 366 | A = np.mean(Kx * Ky) 367 | B = np.mean(np.mean(Kx, axis=0) * np.mean(Ky, axis=0)) 368 | C = np.mean(Kx) * np.mean(Ky) 369 | 370 | # calculate the mmd value 371 | mmd_value = A - 2 * B + C 372 | 373 | return mmd_value 374 | 375 | 376 | def mmd( 377 | X: np.ndarray, 378 | Y: np.ndarray, 379 | kernel: Callable, 380 | params_x: Dict[str, float], 381 | params_y: Dict[str, float], 382 | params_xy: Dict[str, float], 383 | bias: bool = False, 384 | center: bool = False, 385 | ) -> float: 386 | """Maximum Mean Discrepancy 387 | 388 | Parameters 389 | ---------- 390 | X : jax.numpy.ndarray 391 | array-like of shape (n_samples, n_features) 392 | Y : np.ndarray 393 | The data matrix. 394 | 395 | Notes 396 | ----- 397 | 398 | This method is equivalent to the HSIC method. 399 | """ 400 | n_samples, m_samples = X.shape[0], Y.shape[0] 401 | 402 | # constants 403 | a00 = 1.0 / (n_samples * (n_samples - 1.0)) 404 | a11 = 1.0 / (m_samples * (m_samples - 1.0)) 405 | a01 = -1.0 / (n_samples * m_samples) 406 | 407 | # kernel matrices 408 | Kx = gram(kernel, params_x, X, X) 409 | Ky = gram(kernel, params_y, Y, Y) 410 | Kxy = gram(kernel, params_xy, X, Y) 411 | 412 | if bias: 413 | mmd = np.mean(Kx) + np.mean(Ky) - 2 * np.mean(Kxy) 414 | return np.where(mmd >= 0.0, np.sqrt(mmd), 0.0) 415 | else: 416 | return ( 417 | 2 * a01 * np.mean(Kxy) 418 | + a00 * (np.sum(Kx) - n_samples) 419 | + a11 * (np.sum(Ky) - m_samples) 420 | ) 421 | 422 | 423 | -------------------------------------------------------------------------------- /notebooks/uncategorized/examples/gp_regression.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Gaussian Process Regression from Scratch" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "In this notebook, I would like to go through a gaussian process (GP) regression implementation from scratch. The main components of the algorithm consist of:\n", 15 | "\n", 16 | "* Parameter initialization\n", 17 | "* Training via hyperparameter optimization\n", 18 | "* Predictions with Variance Estimates\n", 19 | "\n", 20 | "I will use inspiration from a few sources:\n", 21 | "\n", 22 | "* Algorithm 2.1 \n", 23 | " - [Gaussian Processes for Machine Learning](http://www.gaussianprocess.org/gpml/) - Rasmussen and Williams\n", 24 | "* Chapter 18 \n", 25 | " - [Machine Learning: An Algorithmic Perspective](https://seat.massey.ac.nz/personal/s.r.marsland/MLBook.html) - Stephen Marsland" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 30, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "%matplotlib inline\n", 35 | "import matplotlib.pyplot as plt\n", 36 | "import numpy as np\n", 37 | "from scipy.spatial.distance import pdist, cdist" 38 | ] 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": [ 44 | "### Sample Data" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 12, 50 | "metadata": {}, 51 | "outputs": [ 52 | { 53 | "name": "stdout", 54 | "output_type": "stream", 55 | "text": [ 56 | "X training data: (30, 1)\n", 57 | "X testing data: (400, 1)\n" 58 | ] 59 | } 60 | ], 61 | "source": [ 62 | "n_train = 30\n", 63 | "n_test = 400\n", 64 | "xtrain = np.linspace(-4, 5, n_train).reshape(n_train, 1)\n", 65 | "xtest = np.linspace(-4, 5, n_test).reshape(n_test, 1)\n", 66 | "print('X training data:', xtrain.shape)\n", 67 | "print('X testing data:', xtest.shape)\n", 68 | "\n", 69 | "# Labels\n", 70 | "ytrain = np.sin(xtrain) * np.exp(0.2 * xtrain) + np.random.randn(n_train, 1) * 0.3" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 16, 76 | "metadata": {}, 77 | "outputs": [ 78 | { 79 | "data": { 80 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXYAAAEICAYAAABLdt/UAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAFIFJREFUeJzt3X2QXXV9x/H3N5u0YSE+EYKUkGzwqaglKd2JT6WUgi1WFKutFSNK7UyG0jhasS2aqTXTYcZWhjhTbG1qbR0I1SKKFvEhdupoRxE3NmmlKFCS4AqWJRbELkhIvv3j7JJN3Id79949555z368ZZveee+65v71z+eT3fCIzkSQ1x6KqCyBJ6i6DXZIaxmCXpIYx2CWpYQx2SWoYg12SGsZgV21ExLsi4kPdPreFa2VEPLMb15LKEM5jVxUi4mLgMuAZwA+BTwLvzMwHqyzXdCIigWdl5l3TPPcl4IXAASCBO4Hrga2Z+eNOry/NhzV2lS4iLgP+HPhD4MkUwbga2BERPzXDaxaXV8K2bcrMZcBJFP9YvQ64OSKi2mKpXxnsKlVEPAnYArwlMz+XmQcycy/wWopwf8PEee+JiI9HxLUR8UPg4olj10651hsjYl9E7I+IP4mIvRFx7pTXXzvx+9BEd8qbIuKeiHggIjZPuc76iPhaRDwYEfdFxNUz/QMzm8z8v8z8EvBK4EXAy+e6fkR8eeLluyPiRxHx2xHx1Ii4KSLGIuJ/J35f2W551L8MdpXtxcBS4BNTD2bmj4DPAi+dcvgC4OPAU4DtU8+PiOcCfwVsoKgpPxk4eY73/kXgOcA5wLsj4rSJ4weBPwCWUwTyOcClbf5dU/+We4AR4My5rp+ZvzRxztrMPC4zP0bx/+XfU/xDtwp4BLh6vuVR/zHYVbblwAOZ+fg0z9038fykr2XmjZl5KDMfOerc3wT+OTP/LTMfA95N0cc9my2Z+Uhm7gZ2A2sBMnNnZt6SmY9PtB7+Bjir/T/tCPcCT5vP9TNzf2bekJnjmfkwcEUXyqM+0sv9lmqmB4DlEbF4mnA/aeL5Sd+d5To/M/X5zByPiP1zvPf3p/w+DhwHEBHPBq4ChoFBiv8vds5xrbmcDHx1PtePiEFgK3Ae8NSJw8siYiAzD3ZYLvUBa+wq29eAHwOvnnowIo4FXgb8y5TDs9XA7wOe6HeOiGOA4+dZpr8Gvk0xM+VJwLuAeQ98RsQpwC8AX5nn9S+j6DJ6wcT5k901DsaqJQa7SpWZD1EMnv5lRJwXEUsiYohiiuAocE2Ll/o48IqIePHEQOQW5h98yyimXP4oIn4W+L35XCQiBiPiLOBTwK3AzS1e/3+AU48qzyPAgxHxNOBP51Me9S+DXaXLzL+gqLVeSRF4X6foVjmn1bnfmXkb8BbgoxS194eB+ylaA+16B/D6iWv8LfCxNl9/dUQ8TBHQ7wduAM7LzEMtXv89wEcmZs28duIax1B0S90CfK7dP0j9zQVKaoSIOA54kKK7Y0/V5ZGqZI1dtRURr5jo/jiWovb/n8DeakslVc9gV51dQDGt8F7gWcDr0iaoZFeMJDWNNXZJaphKFigtX748h4aGqnhrSaqtnTt3PpCZJ8x1XiXBPjQ0xMjISBVvLUm1FRH7WjnPrhhJahiDXZIaxmCXpIZxd0dJtXDgwAFGR0d59NFHqy7Kglu6dCkrV65kyZIl83q9wS6pFkZHR1m2bBlDQ0M0+a6Dmcn+/fsZHR1lzZo187qGXTGSauHRRx/l+OOPb3SoA0QExx9/fEctE4Ndqrs92+HGIbhuUfFzz/a5XlFbTQ/1SZ3+nXbFSHW2ZzvcuhEOjhePx/cVjwHWbKiuXKqUNXapV7VSE9+9+XCoTzo4XhxX1w0MDLBu3Tqe97znsXbtWq666ioOHTo062v27t3LddddV1IJCwa71Isma+Lj+4A8XBM/OtzH75n+9TMdV0eOOeYYdu3axW233caOHTu4+eab2bJly6yvMdglFVqtiQ+umv71Mx3vJws89rBixQq2bdvG1VdfTWayd+9ezjzzTM444wzOOOMMvvrVrwJw+eWX85WvfIV169axdevWGc/rJvvYpV7Uak187RVH9rEDDAwWx/tZSWMPp556KocOHeL+++9nxYoV7Nixg6VLl3LnnXdy4YUXMjIywnvf+16uvPJKbrrppqIo4+PTntdNBrvUiwZXTXTDTHN8qsmQ2r25CP3BVUWo9/vA6Wwtni5/NpP3tDhw4ACbNm1i165dDAwMcMcdd0x7fqvndcJgl3pROzXxNRsM8qOVNPZw9913MzAwwIoVK9iyZQsnnngiu3fv5tChQyxdunTa12zdurWl8zphH7vUi9ZsgPXbYHA1EMXP9dsM8FaVMPYwNjbGJZdcwqZNm4gIHnroIU466SQWLVrENddcw8GDBwFYtmwZDz/88BOvm+m8brLGLvUqa+Lzt0BjD4888gjr1q3jwIEDLF68mIsuuoi3v/3tAFx66aW85jWv4frrr+fss8/m2GOPBeD0009n8eLFrF27losvvnjG87qpknueDg8PpzfakNSO22+/ndNOO631F+zZXuuxh+n+3ojYmZnDc73WGrukZurjFo997JLUMAa7pNqoouu4Cp3+nQa7pFpYunQp+/fvb3y4T+7H3sk0SPvYpX5S4wHFlStXMjo6ytjYWNVFWXCTd1CaL4Nd6hc13+J3yZIl876jUL+xK0bqF27x2ze6EuwR8eGIuD8ivtWN60laAG7x2ze6VWP/B+C8Ll1L0kJwi9++0ZVgz8wvAz/oxrUkLZC1VxTL6qdyi99GKq2PPSI2RsRIRIz0w6i2+kwdbijtxmJ9o2t7xUTEEHBTZj5/rnPdK0aNcvRsEyhqwoamuqzVvWKcFSN1ytkm6jEGu9QpZ5uox3RruuM/Al8DnhMRoxHxu924rlQLzjZRj+nWrJgLM/OkzFySmSsz8++6cV2pFtqdbVKHgVbVmlsKSJ1q54bSNV/Wr3rwDkpSmW4cKsL8aIOr4VV7yy6NasZZMVIvcqBVJTDYpTI50KoSGOxSmVzWrxIY7FKZXNavEjgrRirbmg0GuRaUNXZJahiDXZqNi4lUQ3bFSDNxMZFqyhq7NBN3bVRNGezSTFxMpJoy2KWZuJhINWWwSzNxMZFqymCXZuJiItWUs2Kk2biYSDVkjV2SGsZgl6SGMdglqWEMdklqmPoEu3t2SFJL6jErxj07JKll9aixu2eHVC5byLVWjxq7e3ZI5bGFXHv1qLG7Z4dUHlvItVePYHfPDqk8tpBrrx7B7p4dUnlsIddePfrYwT07pLKsveLIPnawhVwz9aixt8sRfc3G78fsbCHXXn1q7K1yRF+z8fvRGlvItda8Grsj+pqN3w/1geYFuyP6mo3fj+6za6vnNC/YHdHXbPx+dNdk19b4PiAPd20Z7pVqXrA7512z8fvRXe10bVmzL03zgr1OI/p+0ctXp+9HHbTatWXNvlSRmaW/6fDwcI6MjJT+vj3l6NkZUNQcDRnVyY1DE2F9lMHV8Kq97Z+nWUXEzswcnuu85tXY68LZGWqCVru2HLQuVVeCPSLOi4jvRMRdEXF5N67ZeH7R1QStdm05aF2qjhcoRcQA8AHgpcAo8I2I+HRm/len1260wVUzNE39oqtmWlnM5DYFpepGjX09cFdm3p2ZjwEfBS7ownWbzdkZ6icOWpeqG1sKnAx8d8rjUeAFXbhus01+oXdvLrpfBlcVoe4XXU3lNgWl6UawxzTHfmKqTURsBDYCrFpldwPgF13SguhGV8wocMqUxyuBe48+KTO3ZeZwZg6fcMIJXXhbSdJ0uhHs3wCeFRFrIuKngNcBn+7CdXuLi4kk1UTHXTGZ+XhEbAI+DwwAH87M2zouWS9xq1dJNdKVeeyZeXNmPjszn5GZzZvW4X4YkmqkeTfaWAjt7odhzV5ShdxSoBWtrppzmwBJPcBgb4X7YUiqEYO9Fe6HIalG7GNvlfthSKqJ/q6xd3sGi/thSOoB/VtjX6gZLG4TIKli/VtjdwZLtVptLbXTqnINgQT0c43dGSzVabW11E6ryjUE0hP6t8buDJbqtNpaaqdVZQtMekL/Brs3uqhOq62ldlpVtsCkJ/RvsDuDpTqttpbaaVXZAmsWx0s60r/BDkWIv2ovvP5Q8dNQL0erraV2WlW2wJpjcrxkfB+Qh8dLDPeW9XewqxqttpbaaVXZAmsOx0s6Fpk/cRe7BTc8PJwjIyOlv6+kGrhuEdPcXROIonXdxyJiZ2YOz3WeNXZJvcXxko4Z7OoeB7zUDY6XdKx/Fyipu1wgpG6Z/L7s3lxMVx1cVYS636OW2ceu7rhxaGIWw1EGVxczjiR1zD52lcsFQlLPMNj7Vbf7wx3wknqGwd40rQT2QiwAccBL6hkGex20s8VtK4G9EAtAXCAk9QxnxfS6dmabzBbYU89dqP5wbzIi9QRr7L2undp1q4Ftf7jUaAZ7r2undt1qYNsfLjWawd7r2qldtxrY9odLjWYfe69be8WRfewwc+26nRV79odLjWWw97p2l1cb2FLfM9jroOqw3rPdfTukGjHYNTs395Jqx8FTzc672aiXuVX0tKyxa3Zu7qVeZWtyRtbYNTsXM6lX2ZqckcGu2bmYSb3K1uSMDHbNzsVM6lW2JmdkH7vmVvV0S2k67Sze6zPW2CXVk63JGXVUY4+I3wLeA5wGrM9Mb2QqqTy2JqfVaY39W8CrgS93oSySpC7oqMaembcDRER3SiNJ6lhpfewRsTEiRiJiZGxsrKy3laRCH61SnbPGHhFfBJ4+zVObM/NTrb5RZm4DtgEMDw9nyyWUpE712SrVOYM9M88toyCStGBavR9wQzjdUVLz9dkq1Y6CPSJ+IyJGgRcBn4mIz3enWJLURX22SrWjYM/MT2bmysz86cw8MTN/rVsFk6Su6bM9j+yKkdR8fbZK1b1iJPWHPlqlao1dkhrGYJekhjHYJalhDHZJahiDXZIaxmCXpIYx2CWpYQx2SWoYg12SGsZgl6SGMdglqWEMdklqGINdkhrGYJekhjHYJalhDHZJahiDXZIaxmCXpIYx2CWpYQx2SWoYg12SGsZgl6SGMdglqWEMdklqGINdkhrGYJekhjHYJalhDHZJahiDXZKm2rMdbhyC6xYVP/dsr7pEbVtcdQEkqWfs2Q63boSD48Xj8X3FY4A1G6orV5ussUvSpN2bD4f6pIPjxfEaMdgladL4Pe0d71EGuyRNGlzV3vEeZbBL0qS1V8DA4JHHBgaL4zVisEvSpDUbYP02GFwNRPFz/bZaDZxCh7NiIuJ9wCuAx4D/Bn4nMx/sRsEkqRJrNtQuyI/WaY19B/D8zDwduAN4Z+dFkiR1oqNgz8wvZObjEw9vAVZ2XiRJUie62cf+ZuCzMz0ZERsjYiQiRsbGxrr4tpKkqebsY4+ILwJPn+apzZn5qYlzNgOPAzOuvc3MbcA2gOHh4ZxXaSVJc5oz2DPz3Nmej4g3AecD52SmgS1JFet0Vsx5wB8DZ2Xm+FznS5IWXqd97FcDy4AdEbErIj7YhTJJkjrQUY09M5/ZrYJIkrrDlaeS1DAGuyQ1jMEuSQut5LsyeQclSVpIFdyVyRq7JC2kCu7KZLBL0kKq4K5MBrskLaQK7spksEvSQqrgrkwGuyQtpAruyuSsGElaaCXflckauyQ1jMEuSQ1jsEtSwxjsktQwBrskNYzBLkkNY7BLUsMY7JLUMAa7JDWMwS5JDWOwS1LDGOyS1DAGuyQ1jMEuSQ1jsEtSwxjsktQwBrskNYzBLkkNY7BLUsMY7JLUMAa7JM3Xnu1w4xBct6j4uWd71SUCYHHVBZCkWtqzHW7dCAfHi8fj+4rHAGs2VFcurLFL0vzs3nw41CcdHC+OV8xgl6T5GL+nveMlMtglaT4GV7V3vEQGuyTNx9orYGDwyGMDg8XxihnskjQfazbA+m0wuBqI4uf6bZUPnEKHs2Ii4s+AC4BDwP3AxZl5bzcKJkk9b82Gngjyo3VaY39fZp6emeuAm4B3d6FMkqQOdBTsmfnDKQ+PBbKz4kiSOtXxAqWIuAJ4I/AQcHbHJZIkdWTOGntEfDEivjXNfxcAZObmzDwF2A5smuU6GyNiJCJGxsbGuvcXSJKOEJnd6T2JiNXAZzLz+XOdOzw8nCMjI115X0nqFxGxMzOH5zqvoz72iHjWlIevBL7dyfUkSZ3rqMYeETcAz6GY7rgPuCQzv9fC68Ymzp+P5cAD83xtE/l5HOZncSQ/j8Oa8lmszswT5jqpa10xZYmIkVaaIv3Cz+MwP4sj+Xkc1m+fhStPJalhDHZJapg6Bvu2qgvQY/w8DvOzOJKfx2F99VnUro9dkjS7OtbYJUmzMNglqWFqHewR8Y6IyIhYXnVZqhQR74uIb0fEf0TEJyPiKVWXqWwRcV5EfCci7oqIy6suT5Ui4pSI+NeIuD0ibouIt1ZdpqpFxEBE/HtE3FR1WcpQ22CPiFOAlwLV32CwejuA52fm6cAdwDsrLk+pImIA+ADwMuC5wIUR8dxqS1Wpx4HLMvM04IXA7/f55wHwVuD2qgtRltoGO7AV+CPcKpjM/EJmPj7x8BZgZZXlqcB64K7MvDszHwM+SnEDmL6Umfdl5jcnfn+YItBOrrZU1YmIlcDLgQ9VXZay1DLYI+KVwPcyc3fVZelBbwY+W3UhSnYy8N0pj0fp4yCbKiKGgJ8Hvl5tSSr1fopK4KGqC1KWjvdjXygR8UXg6dM8tRl4F/Cr5ZaoWrN9Hpn5qYlzNlM0w7eXWbYeENMc6/uWXEQcB9wAvO2om+L0jYg4H7g/M3dGxC9XXZ6y9GywZ+a50x2PiJ8D1gC7IwKKbodvRsT6zPx+iUUs1Uyfx6SIeBNwPnBO9t/ihFHglCmPVwJ9fe/diFhCEerbM/MTVZenQi8BXhkRvw4sBZ4UEddm5hsqLteCqv0CpYjYCwxnZhN2bpuXiDgPuAo4KzP77i4mEbGYYtD4HOB7wDeA12fmbZUWrCJR1Hg+AvwgM99WdXl6xUSN/R2ZeX7VZVlotexj10+4GlgG7IiIXRHxwaoLVKaJgeNNwOcpBgr/qV9DfcJLgIuAX5n4PuyaqLGqT9S+xi5JOpI1dklqGINdkhrGYJekhjHYJalhDHZJahiDXZIaxmCXpIb5fxfMHkY/Y+vWAAAAAElFTkSuQmCC\n", 81 | "text/plain": [ 82 | "" 83 | ] 84 | }, 85 | "metadata": {}, 86 | "output_type": "display_data" 87 | } 88 | ], 89 | "source": [ 90 | "fig, ax = plt.subplots()\n", 91 | "\n", 92 | "ax.scatter(xtrain, ytrain, color='orange', label='Data')\n", 93 | "ax.legend(loc='best')\n", 94 | "ax.set_title('Original Data')\n", 95 | "plt.show()" 96 | ] 97 | }, 98 | { 99 | "cell_type": "markdown", 100 | "metadata": {}, 101 | "source": [ 102 | "## Gaussian Process Class" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": 158, 108 | "metadata": {}, 109 | "outputs": [], 110 | "source": [ 111 | "class GaussianProcessRegressor(object):\n", 112 | " def __init__(self, kernel='ard'):\n", 113 | " self.kernel = 'ard'\n", 114 | " \n", 115 | " def fit(self, x, y):\n", 116 | " self.y_fit_ = y\n", 117 | " self.x_fit_ = x\n", 118 | " self.n_samples = x.shape[0]\n", 119 | " \n", 120 | " # initialize with heuristics\n", 121 | " self.length_scale = np.mean(pdist(x, metric='euclidean'))\n", 122 | " self.likelihood_variance = 1.0\n", 123 | " \n", 124 | " return self\n", 125 | " \n", 126 | " # Kernel Matrix\n", 127 | " def ard_kernel(self, x, y=None, length_scale):\n", 128 | " \n", 129 | " if y is None:\n", 130 | " dists = pdist( x / self.length_scale, metric='sqeuclidean')\n", 131 | " K = np.exp(-.5)\n", 132 | " K" 133 | ] 134 | }, 135 | { 136 | "cell_type": "markdown", 137 | "metadata": {}, 138 | "source": [ 139 | "### Kernel Function" 140 | ] 141 | }, 142 | { 143 | "cell_type": "code", 144 | "execution_count": 38, 145 | "metadata": {}, 146 | "outputs": [], 147 | "source": [ 148 | "from sklearn.metrics.pairwise import rbf_kernel" 149 | ] 150 | }, 151 | { 152 | "cell_type": "markdown", 153 | "metadata": {}, 154 | "source": [ 155 | "Taken from [stackoverflow](https://stackoverflow.com/questions/47271662/what-is-the-fastest-way-to-compute-an-rbf-kernel-in-python/47271663#47271663) post." 156 | ] 157 | }, 158 | { 159 | "cell_type": "code", 160 | "execution_count": 131, 161 | "metadata": {}, 162 | "outputs": [], 163 | "source": [ 164 | "import numexpr as ne\n", 165 | "from scipy.spatial.distance import pdist, squareform\n", 166 | "\n", 167 | "# def rbf_kernelnumexp(x, gamma, var=1.0):\n", 168 | "# x_norm = - gamma * np.sum(x**2, axis=-1)\n", 169 | "# return ne.evaluate('v * exp(A + B + C)', {\n", 170 | "# 'A': x_norm[:, None],\n", 171 | "# 'B': x_norm[None, :],\n", 172 | "# 'C': 2.0 * gamma * np.dot(x, x.T),\n", 173 | "# 'g': gamma, \n", 174 | "# 'v': var\n", 175 | "# })\n", 176 | "# return None\n", 177 | "\n", 178 | "def ard_kernel(x, length_scale, var=1.0):\n", 179 | " dists = pdist(x/length_scale,'sqeuclidean')\n", 180 | " K = np.exp(-.5 * dists)\n", 181 | " return squareform(K)" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": 123, 187 | "metadata": {}, 188 | "outputs": [ 189 | { 190 | "name": "stdout", 191 | "output_type": "stream", 192 | "text": [ 193 | "29.5 µs ± 600 ns per loop (mean ± std. dev. of 7 runs, 10000 loops each)\n" 194 | ] 195 | } 196 | ], 197 | "source": [ 198 | "%timeit ard_kernel(x, length_scale=length_scale)" 199 | ] 200 | }, 201 | { 202 | "cell_type": "markdown", 203 | "metadata": {}, 204 | "source": [ 205 | "### Parameters (Heuristic Initializations)" 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": 26, 211 | "metadata": {}, 212 | "outputs": [ 213 | { 214 | "name": "stdout", 215 | "output_type": "stream", 216 | "text": [ 217 | "Number of training points: 30\n", 218 | "Lengthscale: 3.207\n", 219 | "Likelihood Variance: 1\n" 220 | ] 221 | } 222 | ], 223 | "source": [ 224 | "n_points = xtrain.shape[0]\n", 225 | "x_fit = xtrain\n", 226 | "length_scale = np.mean(pdist(x_fit, metric='euclidean'))\n", 227 | "likelihood_variance = 1\n", 228 | "\n", 229 | "print('Number of training points: {}'.format(n_points))\n", 230 | "print('Lengthscale: {:.3f}'.format(length_scale))\n", 231 | "print('Likelihood Variance: {}'.format(likelihood_variance))" 232 | ] 233 | }, 234 | { 235 | "cell_type": "markdown", 236 | "metadata": {}, 237 | "source": [ 238 | "### Gradiant Descent (Marginal Likelihood)" 239 | ] 240 | }, 241 | { 242 | "cell_type": "markdown", 243 | "metadata": {}, 244 | "source": [ 245 | "#### Parameters" 246 | ] 247 | }, 248 | { 249 | "cell_type": "code", 250 | "execution_count": 29, 251 | "metadata": {}, 252 | "outputs": [], 253 | "source": [ 254 | "param_lengthscale = length_scale\n", 255 | "length_scale_bounds = ((1e-20, None))\n", 256 | "param_likelihood_variance = likelihood_variance\n", 257 | "likelihood_variance_bounds = ((1e-10, None))\n", 258 | "\n", 259 | " \n", 260 | " theta0 = np.array([self.length_scale,\n", 261 | " self.likelihood_variance])\n", 262 | " \n", 263 | " bounds = ((1e-20, None), # length scale\n", 264 | " (1e-10, None)) # likelihood variance\n", 265 | " \n", 266 | " # gradient descent on marginal likelihood" 267 | ] 268 | }, 269 | { 270 | "cell_type": "markdown", 271 | "metadata": {}, 272 | "source": [ 273 | "#### Function to minimize" 274 | ] 275 | }, 276 | { 277 | "cell_type": "markdown", 278 | "metadata": {}, 279 | "source": [ 280 | "##### Marginal Log Likelihood" 281 | ] 282 | }, 283 | { 284 | "cell_type": "markdown", 285 | "metadata": {}, 286 | "source": [ 287 | "$$-\\frac{1}{2}y^{\\top}\\left[{\\bf K}+\\sigma_y^2{\\bf I}\\right]y - \\frac{1}{2}\\log \\left| {\\bf K}+\\sigma_y^2{\\bf I} \\right| - \\frac{n}{2}\\log 2\\pi$$" 288 | ] 289 | }, 290 | { 291 | "cell_type": "code", 292 | "execution_count": 157, 293 | "metadata": {}, 294 | "outputs": [], 295 | "source": [ 296 | "from scipy.linalg import cholesky, solve\n", 297 | "\n", 298 | "def log_posterior(theta, args):\n", 299 | " x, y = args\n", 300 | " print(x.shape, y.shape)\n", 301 | " n_samples = x.shape[0]\n", 302 | " # extract parameters\n", 303 | " length_scale_param = theta[0]\n", 304 | " likelihood_variance_param = theta[1]\n", 305 | " \n", 306 | " K = ard_kernel(x, length_scale=length_scale)\n", 307 | "\n", 308 | " # calculate the cholesky\n", 309 | " L = cholesky(K + theta[1]*np.eye(n_samples), lower=True)\n", 310 | " \n", 311 | " # solve for alpha\n", 312 | " weights = solve(L.T, solve(L, y))\n", 313 | " \n", 314 | " # solve for the log posterior\n", 315 | " log_p = -0.5 * np.dot(y.T, weights) \\\n", 316 | " - np.sum( np.log( np.diag(L) )) \\\n", 317 | " - 0.5 * n_samples * np.log(2 * np.pi)\n", 318 | "\n", 319 | " return -log_p" 320 | ] 321 | }, 322 | { 323 | "cell_type": "code", 324 | "execution_count": 156, 325 | "metadata": {}, 326 | "outputs": [ 327 | { 328 | "name": "stdout", 329 | "output_type": "stream", 330 | "text": [ 331 | "(30, 1) (30, 1)\n", 332 | "(1, 1)\n", 333 | "(1, 1)\n" 334 | ] 335 | } 336 | ], 337 | "source": [ 338 | "length_scale = np.ones(shape=x.shape[0])\n", 339 | "variance = 1.0\n", 340 | "theta = np.array([length_scale, variance])\n", 341 | "args = (xtrain, ytrain)\n", 342 | "temp = log_posterior(theta, args)\n", 343 | "print(temp.shape)" 344 | ] 345 | }, 346 | { 347 | "cell_type": "code", 348 | "execution_count": null, 349 | "metadata": {}, 350 | "outputs": [], 351 | "source": [ 352 | "def grad_log_posterior(theta, args):\n", 353 | " x, y = args\n", 354 | " " 355 | ] 356 | }, 357 | { 358 | "cell_type": "markdown", 359 | "metadata": {}, 360 | "source": [ 361 | "#### Scipy Solution" 362 | ] 363 | }, 364 | { 365 | "cell_type": "code", 366 | "execution_count": null, 367 | "metadata": {}, 368 | "outputs": [], 369 | "source": [] 370 | } 371 | ], 372 | "metadata": { 373 | "kernelspec": { 374 | "display_name": "Python 3", 375 | "language": "python", 376 | "name": "python3" 377 | }, 378 | "language_info": { 379 | "codemirror_mode": { 380 | "name": "ipython", 381 | "version": 3 382 | }, 383 | "file_extension": ".py", 384 | "mimetype": "text/x-python", 385 | "name": "python", 386 | "nbconvert_exporter": "python", 387 | "pygments_lexer": "ipython3", 388 | "version": "3.6.4" 389 | } 390 | }, 391 | "nbformat": 4, 392 | "nbformat_minor": 2 393 | } 394 | -------------------------------------------------------------------------------- /notebooks/uncategorized/scale/nystrom/randomized_nystrom_approximation.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Nystrom + Randomized SVD" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "In this notebook, I will developing the Nystrom method with the inclusion of the randomized SVD (rSVD) algorithm to speed up the Nystrom calculations.\n" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "### Speed Stats\n", 22 | "\n", 23 | "**Let**:\n", 24 | "* k = rank\n", 25 | "* N = Size of Kernel matrix\n", 26 | "* m = subsample\n", 27 | "\n", 28 | "**Order**:\n", 29 | "\n", 30 | "* Nystrom: $O(Nmk + m^3)$\n", 31 | "* Ensemble Nystrom: $O(Nmk + N_ek^3 + C_{\\mu})$\n", 32 | "* Randomized SVD: $O(N^2k + k^3)$\n", 33 | "* Nystrom + rSVD: $O(Nmk + k^3)$" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "### Algorithm" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "metadata": {}, 46 | "source": [ 47 | "**Input**:\n", 48 | "\n", 49 | "* K = Positive Semidefinite (PSD) Kernel matrix $\\in \\mathbb{R}^{NxN}$\n", 50 | "* M = Number of subsamples (columns)\n", 51 | "* r = rank\n", 52 | "* P = oversampling parameter\n", 53 | "* q = power parameter\n", 54 | "\n", 55 | "**Output**:\n", 56 | "* $L$" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": 1, 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "import numpy as np\n", 66 | "import warnings \n", 67 | "from sklearn.model_selection import train_test_split\n", 68 | "from sklearn.datasets import make_regression\n", 69 | "import scipy as scio\n", 70 | "from scipy.spatial.distance import pdist\n", 71 | "from scipy.linalg import cho_factor, cho_solve, cholesky\n", 72 | "from sklearn.metrics.pairwise import rbf_kernel, pairwise_kernels\n", 73 | "from sklearn.metrics import mean_squared_error\n", 74 | "from sklearn.utils import check_array, check_random_state\n", 75 | "from sklearn.linear_model.ridge import _solve_cholesky_kernel as kernel_solve\n", 76 | "from time import time\n", 77 | "from sklearn.decomposition import TruncatedSVD\n", 78 | "from sklearn.utils.extmath import randomized_svd\n", 79 | "# import fbpca\n", 80 | "\n", 81 | "%matplotlib inline\n", 82 | "import matplotlib.pyplot as plt\n", 83 | "\n", 84 | "warnings.filterwarnings('ignore')" 85 | ] 86 | }, 87 | { 88 | "cell_type": "markdown", 89 | "metadata": {}, 90 | "source": [ 91 | "## Resources" 92 | ] 93 | }, 94 | { 95 | "cell_type": "markdown", 96 | "metadata": {}, 97 | "source": [ 98 | "#### Original Formulation\n", 99 | "\n" 100 | ] 101 | }, 102 | { 103 | "cell_type": "markdown", 104 | "metadata": {}, 105 | "source": [ 106 | "## Generate Data" 107 | ] 108 | }, 109 | { 110 | "cell_type": "code", 111 | "execution_count": 2, 112 | "metadata": {}, 113 | "outputs": [ 114 | { 115 | "name": "stdout", 116 | "output_type": "stream", 117 | "text": [ 118 | "Size of data: (4435, 36)\n" 119 | ] 120 | } 121 | ], 122 | "source": [ 123 | "# Import dataset\n", 124 | "from scipy.io import loadmat\n", 125 | "\n", 126 | "\n", 127 | "data_path = '/home/emmanuel/code/kernellib/dev/scale/nystrom/satire.mat'\n", 128 | "data = scio.io.loadmat(data_path)['D'].toarray()\n", 129 | "print('Size of data: {}'.format(data.shape))\n", 130 | "\n", 131 | "n_samples = data.shape[0]\n", 132 | "random_state = 123" 133 | ] 134 | }, 135 | { 136 | "cell_type": "markdown", 137 | "metadata": {}, 138 | "source": [ 139 | "## Nystrom Approximation of a Kernel Matrix" 140 | ] 141 | }, 142 | { 143 | "cell_type": "markdown", 144 | "metadata": {}, 145 | "source": [ 146 | "#### Kernel Matrix of Data" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": 3, 152 | "metadata": {}, 153 | "outputs": [ 154 | { 155 | "name": "stdout", 156 | "output_type": "stream", 157 | "text": [ 158 | "(4435, 4435)\n" 159 | ] 160 | } 161 | ], 162 | "source": [ 163 | "# Linear Kernel\n", 164 | "K = data @ data.T\n", 165 | "\n", 166 | "print(K.shape)" 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": 4, 172 | "metadata": {}, 173 | "outputs": [], 174 | "source": [ 175 | "n_samples, d_dimensions = data.shape # Data stats\n", 176 | "m_subsamples = 200 # M subsamples\n", 177 | "n_components = 100 # rank of matrix\n", 178 | "random_state = 1234 # random state for subsampling; rsvd" 179 | ] 180 | }, 181 | { 182 | "cell_type": "markdown", 183 | "metadata": {}, 184 | "source": [ 185 | "#### Sampling" 186 | ] 187 | }, 188 | { 189 | "cell_type": "code", 190 | "execution_count": 15, 191 | "metadata": {}, 192 | "outputs": [ 193 | { 194 | "name": "stdout", 195 | "output_type": "stream", 196 | "text": [ 197 | "Size of the sampled K matrix, C: (4435, 200)\n", 198 | "Size of m-by-m intersection matrix, W: (200, 200)\n" 199 | ] 200 | } 201 | ], 202 | "source": [ 203 | "# uniform sampling without replacement\n", 204 | "rng = check_random_state(random_state)\n", 205 | "random_indices = rng.permutation(n_samples)\n", 206 | "\n", 207 | "# column subsample matrix\n", 208 | "column_indices = random_indices[:m_subsamples]\n", 209 | "\n", 210 | "# choose the columns randomly from the matrix\n", 211 | "C = K[:, column_indices]\n", 212 | "\n", 213 | "print('Size of the sampled K matrix, C: ', C.shape)\n", 214 | "\n", 215 | "# get the other sampled columns\n", 216 | "W = C[column_indices, :]\n", 217 | "\n", 218 | "print('Size of m-by-m intersection matrix, W: ', W.shape)" 219 | ] 220 | }, 221 | { 222 | "cell_type": "markdown", 223 | "metadata": {}, 224 | "source": [ 225 | "### SVD" 226 | ] 227 | }, 228 | { 229 | "cell_type": "code", 230 | "execution_count": 41, 231 | "metadata": {}, 232 | "outputs": [ 233 | { 234 | "name": "stdout", 235 | "output_type": "stream", 236 | "text": [ 237 | "Error: 5.47e-11\n" 238 | ] 239 | } 240 | ], 241 | "source": [ 242 | "# %%timeit\n", 243 | "\n", 244 | "# Perform SVD\n", 245 | "U, D, V = np.linalg.svd(W, full_matrices=False)\n", 246 | "\n", 247 | "# Form approximation\n", 248 | "U_approx = np.sqrt(m_subsamples / n_samples) * C @ U @ np.diag(np.power(D, -1))\n", 249 | "D_approx = (n_samples / m_subsamples) * np.diag(D)\n", 250 | "# print(U_approx.shape, D_approx.shape)\n", 251 | "\n", 252 | "# Compute approximate error\n", 253 | "err_svd = np.linalg.norm(K - U_approx @ D_approx @ U_approx.T, 'fro')\n", 254 | "\n", 255 | "print(f'Error: {err_svd:.2e}')" 256 | ] 257 | }, 258 | { 259 | "cell_type": "markdown", 260 | "metadata": {}, 261 | "source": [ 262 | "### SVD + k components" 263 | ] 264 | }, 265 | { 266 | "cell_type": "code", 267 | "execution_count": 40, 268 | "metadata": {}, 269 | "outputs": [ 270 | { 271 | "name": "stdout", 272 | "output_type": "stream", 273 | "text": [ 274 | "Error: 4.45e-11\n" 275 | ] 276 | } 277 | ], 278 | "source": [ 279 | "# %%timeit\n", 280 | "\n", 281 | "# Perform SVD\n", 282 | "U, D, V = np.linalg.svd(W, full_matrices=False)\n", 283 | "\n", 284 | "# Take components of matrices\n", 285 | "U = U[:, :n_components]\n", 286 | "D = D[:n_components]\n", 287 | "V = V[:, :n_components]\n", 288 | "\n", 289 | "# Form approximation\n", 290 | "U_approx = np.sqrt(m_subsamples / n_samples) * C @ U @ np.diag(np.power(D, -1))\n", 291 | "D_approx = (n_samples / m_subsamples) * np.diag(D)\n", 292 | "# print(U_approx.shape, D_approx.shape)\n", 293 | "\n", 294 | "# Compute approximate error\n", 295 | "err_ksvd = np.linalg.norm(K - U_approx @ D_approx @ U_approx.T, 'fro')\n", 296 | "\n", 297 | "print(f'Error: {err_ksvd:.2e}')" 298 | ] 299 | }, 300 | { 301 | "cell_type": "markdown", 302 | "metadata": {}, 303 | "source": [ 304 | "### rSVD" 305 | ] 306 | }, 307 | { 308 | "cell_type": "code", 309 | "execution_count": 42, 310 | "metadata": {}, 311 | "outputs": [ 312 | { 313 | "name": "stdout", 314 | "output_type": "stream", 315 | "text": [ 316 | "Error: 1.62e-11\n" 317 | ] 318 | } 319 | ], 320 | "source": [ 321 | "# %%timeit \n", 322 | "\n", 323 | "# Perform SVD\n", 324 | "U, D, V = randomized_svd(W, n_components=n_components)\n", 325 | "\n", 326 | "# # Take components of matrices\n", 327 | "# U = U[:, :n_components]\n", 328 | "# D = D[:n_components]\n", 329 | "# V = V[:, :n_components]\n", 330 | "\n", 331 | "# Form approximation\n", 332 | "U_approx = np.sqrt(m_subsamples / n_samples) * C @ U @ np.diag(np.power(D, -1))\n", 333 | "D_approx = (n_samples / m_subsamples) * np.diag(D)\n", 334 | "# print(U_approx.shape, D_approx.shape)\n", 335 | "\n", 336 | "# Compute approximate error\n", 337 | "err_rsvd = np.linalg.norm(K - U_approx @ D_approx @ U_approx.T, 'fro')\n", 338 | "\n", 339 | "print(f'Error: {err_rsvd:.2e}')" 340 | ] 341 | }, 342 | { 343 | "cell_type": "code", 344 | "execution_count": 13, 345 | "metadata": {}, 346 | "outputs": [ 347 | { 348 | "name": "stdout", 349 | "output_type": "stream", 350 | "text": [ 351 | "Time Experiment for normal SVD.\n", 352 | "4.11 ms ± 34.3 µs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n", 353 | "\n", 354 | "Time Experiment for randomized SVD (scikit).\n", 355 | "9.18 ms ± 12.7 µs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n" 356 | ] 357 | } 358 | ], 359 | "source": [ 360 | "print('Time Experiment for normal SVD.')\n", 361 | "%timeit np.linalg.svd(W, full_matrices=False);\n", 362 | "\n", 363 | "# print('\\nTime Experiment for randomized SVD (fb).')\n", 364 | "# %timeit fbpca.pca(W, k=k_components, raw=True, n_iter=3);\n", 365 | "\n", 366 | "print('\\nTime Experiment for randomized SVD (scikit).')\n", 367 | "%timeit randomized_svd(W, n_components=n_components);" 368 | ] 369 | }, 370 | { 371 | "cell_type": "markdown", 372 | "metadata": {}, 373 | "source": [ 374 | "### Maybe Faster" 375 | ] 376 | }, 377 | { 378 | "cell_type": "code", 379 | "execution_count": 70, 380 | "metadata": {}, 381 | "outputs": [ 382 | { 383 | "name": "stdout", 384 | "output_type": "stream", 385 | "text": [ 386 | "(4435, 200)\n" 387 | ] 388 | } 389 | ], 390 | "source": [ 391 | "# Nystrom approximation of K\n", 392 | "eps = 1e-12\n", 393 | "\n", 394 | "# size of the data\n", 395 | "n_samples = data.shape[0]\n", 396 | "\n", 397 | "# choose the number of components\n", 398 | "m_subsamples = min(n_samples, m_subsamples)\n", 399 | "\n", 400 | "# perform rnadom uniform sampling without replacement\n", 401 | "indices = np.random.permutation(n_samples)\n", 402 | "basis_indices = indices[:m_subsamples]\n", 403 | "basis = data[basis_indices]\n", 404 | "\n", 405 | "# Construct Kernel Matrix\n", 406 | "basis_kernel = data @ basis.T\n", 407 | "\n", 408 | "print(basis_kernel.shape)" 409 | ] 410 | }, 411 | { 412 | "cell_type": "markdown", 413 | "metadata": {}, 414 | "source": [ 415 | "## Data Approximation" 416 | ] 417 | }, 418 | { 419 | "cell_type": "markdown", 420 | "metadata": {}, 421 | "source": [ 422 | "Input:\n", 423 | "* $X\\in \\mathbb{R}^{NxN}$\n", 424 | "* \n", 425 | "\n" 426 | ] 427 | }, 428 | { 429 | "cell_type": "markdown", 430 | "metadata": {}, 431 | "source": [ 432 | "#### Scikit-Learn Implementation" 433 | ] 434 | }, 435 | { 436 | "cell_type": "markdown", 437 | "metadata": {}, 438 | "source": [ 439 | "##### Sampling" 440 | ] 441 | }, 442 | { 443 | "cell_type": "code", 444 | "execution_count": 49, 445 | "metadata": {}, 446 | "outputs": [], 447 | "source": [ 448 | "# Nystrom approximation of K\n", 449 | "eps = 1e-12\n", 450 | "\n", 451 | "# size of the data\n", 452 | "n_samples = data.shape[0]\n", 453 | "\n", 454 | "# choose the number of components\n", 455 | "m_subsamples = min(n_samples, m_subsamples)\n", 456 | "\n", 457 | "# perform rnadom uniform sampling without replacement\n", 458 | "indices = np.random.permutation(n_samples)\n", 459 | "basis_indices = indices[:m_subsamples]\n", 460 | "basis = data[basis_indices]" 461 | ] 462 | }, 463 | { 464 | "cell_type": "code", 465 | "execution_count": 50, 466 | "metadata": {}, 467 | "outputs": [ 468 | { 469 | "name": "stdout", 470 | "output_type": "stream", 471 | "text": [ 472 | "(4435, 36) (200, 36)\n" 473 | ] 474 | } 475 | ], 476 | "source": [ 477 | "print(data.shape, basis.shape)" 478 | ] 479 | }, 480 | { 481 | "cell_type": "markdown", 482 | "metadata": {}, 483 | "source": [ 484 | "##### Construct Kernel Matrices" 485 | ] 486 | }, 487 | { 488 | "cell_type": "code", 489 | "execution_count": 55, 490 | "metadata": {}, 491 | "outputs": [ 492 | { 493 | "name": "stdout", 494 | "output_type": "stream", 495 | "text": [ 496 | "(200, 200)\n" 497 | ] 498 | } 499 | ], 500 | "source": [ 501 | "# Construct Kernel Matrix\n", 502 | "basis_kernel = basis @ basis.T\n", 503 | "\n", 504 | "print(basis_kernel.shape)\n", 505 | "# # construct the basis kernel\n", 506 | "# basis_gamma = 1 / (2 * np.mean(pdist(basis, metric='euclidean')))\n", 507 | "# basis_kernel = pairwise_kernels(basis, metric=kernel, n_jobs=n_jobs, gamma=gamma)" 508 | ] 509 | }, 510 | { 511 | "cell_type": "markdown", 512 | "metadata": {}, 513 | "source": [ 514 | "##### SVD - Get the basis vectors" 515 | ] 516 | }, 517 | { 518 | "cell_type": "code", 519 | "execution_count": 56, 520 | "metadata": {}, 521 | "outputs": [ 522 | { 523 | "name": "stdout", 524 | "output_type": "stream", 525 | "text": [ 526 | "Perform Normal SVD...\n" 527 | ] 528 | } 529 | ], 530 | "source": [ 531 | "# get the basis vectors\n", 532 | "\n", 533 | "# Perform the normal SVD\n", 534 | "print('Perform Normal SVD...')\n", 535 | "U, D, V = np.linalg.svd(basis_kernel, full_matrices=False)\n", 536 | "\n", 537 | "# use toleraance for eigenvalues\n", 538 | "S = np.maximum(D, eps)" 539 | ] 540 | }, 541 | { 542 | "cell_type": "code", 543 | "execution_count": 57, 544 | "metadata": {}, 545 | "outputs": [ 546 | { 547 | "name": "stdout", 548 | "output_type": "stream", 549 | "text": [ 550 | "Size of normalization: (200, 200)\n", 551 | "Size of components: (200, 36)\n", 552 | "Size of component indices: (200,)\n" 553 | ] 554 | } 555 | ], 556 | "source": [ 557 | "# Get normalization\n", 558 | "normalization = np.dot(U / np.sqrt(S), V)\n", 559 | "\n", 560 | "# get components and indices\n", 561 | "components = basis\n", 562 | "component_indices = basis_indices\n", 563 | "\n", 564 | "print('Size of normalization: {}'.format(normalization.shape))\n", 565 | "print('Size of components: {}'.format(components.shape))\n", 566 | "print('Size of component indices: {}'.format(component_indices.shape))" 567 | ] 568 | }, 569 | { 570 | "cell_type": "code", 571 | "execution_count": 63, 572 | "metadata": {}, 573 | "outputs": [ 574 | { 575 | "name": "stdout", 576 | "output_type": "stream", 577 | "text": [ 578 | "(4435, 200)\n", 579 | "Error: 3.67e-05\n" 580 | ] 581 | } 582 | ], 583 | "source": [ 584 | "L = data @ basis.T @ normalization\n", 585 | "\n", 586 | "print(L.shape)\n", 587 | "\n", 588 | "K_approx = L @ L.T\n", 589 | "\n", 590 | "# Compute approximate error\n", 591 | "err_rsvd = np.linalg.norm(K - K_approx, 'fro')\n", 592 | "\n", 593 | "print(f'Error: {err_rsvd:.2e}')" 594 | ] 595 | }, 596 | { 597 | "cell_type": "markdown", 598 | "metadata": {}, 599 | "source": [ 600 | "##### Randomized SVD" 601 | ] 602 | }, 603 | { 604 | "cell_type": "code", 605 | "execution_count": 67, 606 | "metadata": {}, 607 | "outputs": [ 608 | { 609 | "name": "stdout", 610 | "output_type": "stream", 611 | "text": [ 612 | "Perform scikit RSVD...\n", 613 | "Size of normalization: (200, 200)\n", 614 | "Size of components: (200, 36)\n", 615 | "Size of component indices: (200,)\n" 616 | ] 617 | } 618 | ], 619 | "source": [ 620 | "print('Perform scikit RSVD...')\n", 621 | "U_rand, D_rand, V_rand = randomized_svd(basis_kernel, n_components=n_components)\n", 622 | "\n", 623 | "# use toleraance for eigenvalues\n", 624 | "S_rand = np.maximum(D_rand, eps)\n", 625 | "\n", 626 | "normalization_r = np.dot(U_rand / np.sqrt(S_rand), V_rand)\n", 627 | "\n", 628 | "print('Size of normalization: {}'.format(normalization_r.shape))\n", 629 | "print('Size of components: {}'.format(components.shape))\n", 630 | "print('Size of component indices: {}'.format(component_indices.shape))" 631 | ] 632 | }, 633 | { 634 | "cell_type": "markdown", 635 | "metadata": {}, 636 | "source": [ 637 | "#### Transform New Data" 638 | ] 639 | }, 640 | { 641 | "cell_type": "code", 642 | "execution_count": 68, 643 | "metadata": {}, 644 | "outputs": [ 645 | { 646 | "name": "stdout", 647 | "output_type": "stream", 648 | "text": [ 649 | "(4435, 200)\n", 650 | "Error: 1.33e-05\n" 651 | ] 652 | } 653 | ], 654 | "source": [ 655 | "L = data @ basis.T @ normalization_r\n", 656 | "\n", 657 | "print(L.shape)\n", 658 | "\n", 659 | "K_approx = L @ L.T\n", 660 | "\n", 661 | "# Compute approximate error\n", 662 | "err_rsvd = np.linalg.norm(K - K_approx, 'fro')\n", 663 | "\n", 664 | "print(f'Error: {err_rsvd:.2e}')" 665 | ] 666 | }, 667 | { 668 | "cell_type": "code", 669 | "execution_count": null, 670 | "metadata": {}, 671 | "outputs": [], 672 | "source": [] 673 | } 674 | ], 675 | "metadata": { 676 | "kernelspec": { 677 | "display_name": "Python 3", 678 | "language": "python", 679 | "name": "python3" 680 | }, 681 | "language_info": { 682 | "codemirror_mode": { 683 | "name": "ipython", 684 | "version": 3 685 | }, 686 | "file_extension": ".py", 687 | "mimetype": "text/x-python", 688 | "name": "python", 689 | "nbconvert_exporter": "python", 690 | "pygments_lexer": "ipython3", 691 | "version": "3.7.2" 692 | }, 693 | "toc": { 694 | "nav_menu": {}, 695 | "number_sections": true, 696 | "sideBar": true, 697 | "skip_h1_title": false, 698 | "toc_cell": false, 699 | "toc_position": {}, 700 | "toc_section_display": "block", 701 | "toc_window_display": false 702 | }, 703 | "varInspector": { 704 | "cols": { 705 | "lenName": 16, 706 | "lenType": 16, 707 | "lenVar": 40 708 | }, 709 | "kernels_config": { 710 | "python": { 711 | "delete_cmd_postfix": "", 712 | "delete_cmd_prefix": "del ", 713 | "library": "var_list.py", 714 | "varRefreshCmd": "print(var_dic_list())" 715 | }, 716 | "r": { 717 | "delete_cmd_postfix": ") ", 718 | "delete_cmd_prefix": "rm(", 719 | "library": "var_list.r", 720 | "varRefreshCmd": "cat(var_dic_list()) " 721 | } 722 | }, 723 | "types_to_exclude": [ 724 | "module", 725 | "function", 726 | "builtin_function_or_method", 727 | "instance", 728 | "_Feature" 729 | ], 730 | "window_display": false 731 | } 732 | }, 733 | "nbformat": 4, 734 | "nbformat_minor": 2 735 | } 736 | -------------------------------------------------------------------------------- /notebooks/uncategorized/derivative/derivatives_numba.py: -------------------------------------------------------------------------------- 1 | @numba.njit(fastmath=True, nogil=True) 2 | def ard_derivative_numba(x_train, x_function, K, weights, length_scale): 3 | # # check the sizes of x_train and x_test 4 | # err_msg = "xtrain and xtest d dimensions are not equivalent." 5 | # np.testing.assert_equal(x_function.shape[1], x_train.shape[1], err_msg=err_msg) 6 | 7 | # # check the n_samples for x_train and weights are equal 8 | # err_msg = "Number of training samples for xtrain and weights are not equal." 9 | # np.testing.assert_equal(x_train.shape[0], weights.shape[0], err_msg=err_msg) 10 | 11 | n_test, n_dims = x_function.shape 12 | 13 | derivative = np.zeros(shape=x_function.shape) 14 | 15 | length_scale = np.diag(- np.power(length_scale, -2)) 16 | 17 | for itest in prange(n_test): 18 | derivative[itest, :] = np.dot(np.dot(length_scale, (x_function[itest, :] - x_train).T), 19 | (K[itest, :].reshape(-1, 1) * weights)) 20 | 21 | return derivative 22 | 23 | @numba.njit(fastmath=True, nogil=True) 24 | def rbf_derivative_numba(x_train, x_function, K, weights, gamma): 25 | # # check the sizes of x_train and x_test 26 | # err_msg = "xtrain and xtest d dimensions are not equivalent." 27 | # np.testing.assert_equal(x_function.shape[1], x_train.shape[1], err_msg=err_msg) 28 | 29 | # # check the n_samples for x_train and weights are equal 30 | # err_msg = "Number of training samples for xtrain and weights are not equal." 31 | # np.testing.assert_equal(x_train.shape[0], weights.shape[0], err_msg=err_msg) 32 | 33 | n_test, n_dims = x_function.shape 34 | 35 | derivative = np.zeros(shape=x_function.shape) 36 | 37 | constant = - 2 * gamma 38 | 39 | for itest in range(n_test): 40 | derivative[itest, :] = np.dot((x_function[itest, :] - x_train).T, 41 | (K[itest, :].reshape(-1, 1) * weights)) 42 | 43 | derivative *= - 1 / length_scale**2 44 | 45 | return derivative 46 | 47 | 48 | @numba.njit(fastmath=True) 49 | def ard_derivative_full_numba(x_train, x_function, K, weights, length_scale): 50 | 51 | n_test, d_dims = x_function.shape 52 | n_train, d_dims = x_train.shape 53 | 54 | derivative = np.zeros(shape=(n_test, n_train, d_dims)) 55 | 56 | constant = -np.power(length_scale, -2) 57 | 58 | for idim in range(d_dims): 59 | for itrain in range(n_train): 60 | for itest in range(n_test): 61 | derivative[itest, itrain, idim] = \ 62 | constant[idim] * weights[itrain] \ 63 | * (x_function[itest, idim] - x_train[itrain, idim]) \ 64 | * K[itest, itrain] 65 | 66 | return derivative 67 | 68 | 69 | @numba.njit(fastmath=True) 70 | def rbf_derivative_numba(x_train, x_function, K, weights, gamma): 71 | # # check the sizes of x_train and x_test 72 | # err_msg = "xtrain and xtest d dimensions are not equivalent." 73 | # np.testing.assert_equal(x_function.shape[1], x_train.shape[1], err_msg=err_msg) 74 | 75 | # # check the n_samples for x_train and weights are equal 76 | # err_msg = "Number of training samples for xtrain and weights are not equal." 77 | # np.testing.assert_equal(x_train.shape[0], weights.shape[0], err_msg=err_msg) 78 | 79 | n_test, n_dims = x_function.shape 80 | 81 | derivative = np.zeros(shape=x_function.shape) 82 | 83 | constant = - 2 * gamma 84 | 85 | for itest in range(n_test): 86 | derivative[itest, :] = np.dot((x_function[itest, :] - x_train).T, 87 | (K[itest, :].reshape(-1, 1) * weights)) 88 | 89 | derivative *= constant 90 | 91 | return derivative 92 | 93 | @numba.njit(fastmath=True) 94 | def rbf_derivative_full_numba(x_train, x_function, K, weights, gamma, nder=1): 95 | n_test, d_dims = x_function.shape 96 | n_train, d_dims = x_train.shape 97 | 98 | derivative = np.zeros(shape=(n_test, n_train, d_dims)) 99 | 100 | if nder == 1: 101 | for idim in range(d_dims): 102 | for itrain in range(n_train): 103 | w = weights[itrain] 104 | for itest in range(n_test): 105 | # print(weights.shape) 106 | derivative[itest, itrain, idim] = \ 107 | w * (x_function[itest, idim] - x_train[itrain, idim]) * K[itest, itrain] 108 | 109 | derivative *= - 2 * gamma 110 | 111 | else: 112 | constant = 2 * gamma 113 | for idim in range(d_dims): 114 | for itrain in range(n_train): 115 | for itest in range(n_test): 116 | derivative[itest, itrain, idim] = \ 117 | weights[itrain] \ 118 | * (constant * (x_function[itest, idim] - x_train[itrain, idim]) ** 2 - 1) \ 119 | * K[itest, itrain] 120 | derivative *= constant 121 | 122 | return derivative 123 | 124 | @numba.njit(fastmath=True) 125 | def ard_derivative_full_numba(x_train, x_function, K, weights, length_scale): 126 | 127 | n_test, d_dims = x_function.shape 128 | n_train, d_dims = x_train.shape 129 | 130 | derivative = np.zeros(shape=(n_test, n_train, d_dims)) 131 | 132 | constant = -np.power(length_scale, -2) 133 | 134 | for idim in range(d_dims): 135 | for itrain in range(n_train): 136 | for itest in range(n_test): 137 | derivative[itest, itrain, idim] = \ 138 | constant[idim] * weights[itrain] \ 139 | * (x_function[itest, idim] - x_train[itrain, idim]) \ 140 | * K[itest, itrain] 141 | 142 | return derivative 143 | 144 | 145 | 146 | # @staticmethod 147 | # @numba.njit('float64[:,:](float64[:,:],float64[:,:],float64[:,:],float64[:],float64)',fastmath=True, nogil=True) 148 | # def rbf_derivative(x_train, x_function, K, weights, length_scale): 149 | # # # check the sizes of x_train and x_test 150 | # # err_msg = "xtrain and xtest d dimensions are not equivalent." 151 | # # np.testing.assert_equal(x_function.shape[1], x_train.shape[1], err_msg=err_msg) 152 | 153 | # # # check the n_samples for x_train and weights are equal 154 | # # err_msg = "Number of training samples for xtrain and weights are not equal." 155 | # # np.testing.assert_equal(x_train.shape[0], weights.shape[0], err_msg=err_msg) 156 | 157 | # n_test, n_dims = x_function.shape 158 | 159 | # derivative = np.zeros(shape=x_function.shape) 160 | 161 | # for itest in range(n_test): 162 | # derivative[itest, :] = np.dot((np.expand_dims(x_function[itest, :], axis=0) - x_train).T, 163 | # (K[itest, :] * weights)).flatten() 164 | 165 | # derivative *= - 1 / length_scale**2 166 | 167 | # return derivative 168 | @numba.njit(parallel=True, fastmath=True) 169 | def ard_derivative_numba(x_train, x_function, K, weights, length_scale): 170 | 171 | # # check the sizes of x_train and x_test 172 | # err_msg = "xtrain and xtest d dimensions are not equivalent." 173 | # np.testing.assert_equal(x_function.shape[1], x_train.shape[1], err_msg=err_msg) 174 | 175 | # # check the n_samples for x_train and weights are equal 176 | # err_msg = "Number of training samples for xtrain and weights are not equal." 177 | # np.testing.assert_equal(x_train.shape[0], weights.shape[0], err_msg=err_msg) 178 | 179 | n_test, n_dims = x_function.shape 180 | 181 | derivative = np.zeros(shape=x_function.shape) 182 | 183 | length_scale = np.diag(- np.power(length_scale, -2)) 184 | 185 | for itest in range(n_test): 186 | derivative[itest, :] = np.dot(length_scale.dot((x_function[itest, :] - x_train).T), 187 | (K[itest, :].reshape(-1, 1) * weights)) 188 | 189 | return derivative 190 | 191 | @staticmethod 192 | @numba.njit('float64[:,:](float64[:,:],float64[:,:],float64[:,:],float64[:,:],float64)',fastmath=True, nogil=True) 193 | def numba_rbf_derivative(x_train, x_function, K, weights, length_scale): 194 | # # check the sizes of x_train and x_test 195 | # err_msg = "xtrain and xtest d dimensions are not equivalent." 196 | # np.testing.assert_equal(x_function.shape[1], x_train.shape[1], err_msg=err_msg) 197 | 198 | # # check the n_samples for x_train and weights are equal 199 | # err_msg = "Number of training samples for xtrain and weights are not equal." 200 | # np.testing.assert_equal(x_train.shape[0], weights.shape[0], err_msg=err_msg) 201 | 202 | n_test, n_dims = x_function.shape 203 | 204 | derivative = np.zeros(shape=x_function.shape) 205 | 206 | for itest in range(n_test): 207 | derivative[itest, :] = np.dot((np.expand_dims(x_function[itest, :], axis=0) - x_train).T, 208 | (np.expand_dims(K[itest, :], axis=1) * weights)).flatten() 209 | 210 | derivative *= - 1 / length_scale**2 211 | 212 | return derivative 213 | 214 | 215 | @staticmethod 216 | @numba.njit('float64[:,:](float64[:,:],float64[:,:],float64[:,:],float64[:,:],float64[:])', 217 | nogil=True, fastmath=True) 218 | def ard_derivative_numba(x_train, x_function, K, weights, length_scale): 219 | # # check the sizes of x_train and x_test 220 | # err_msg = "xtrain and xtest d dimensions are not equivalent." 221 | # np.testing.assert_equal(x_function.shape[1], x_train.shape[1], err_msg=err_msg) 222 | 223 | # # check the n_samples for x_train and weights are equal 224 | # err_msg = "Number of training samples for xtrain and weights are not equal." 225 | # np.testing.assert_equal(x_train.shape[0], weights.shape[0], err_msg=err_msg) 226 | 227 | n_test, n_dims = x_function.shape 228 | 229 | derivative = np.zeros(shape=x_function.shape) 230 | 231 | length_scale = np.diag(- np.power(length_scale, -2)) 232 | 233 | for itest in range(n_test): 234 | # print( np.expand_dims(x_function[itest, :], axis=0).shape, x_train.shape) 235 | # print(length_scale.shape, (np.expand_dims(x_function[itest, :], axis=0) - x_train).T.shape) 236 | # print(np.expand_dims(K[itest, :], axis=1).shape, weights.shape) 237 | # print(derivative[itest, :].shape) 238 | derivative[itest, :] = np.dot(np.dot(length_scale, (np.expand_dims(x_function[itest, :], axis=0) - x_train).T), 239 | (np.expand_dims(K[itest, :], axis=1) * weights)).flatten() 240 | break 241 | 242 | return derivative 243 | 244 | 245 | def ard_derivative(x_train, x_function, K, weights, length_scale): 246 | 247 | n_test, n_dims = x_function.shape 248 | 249 | derivative = np.zeros(shape=x_function.shape) 250 | length_scale = np.diag(- np.power(length_scale, -2)) 251 | for itest in range(n_test): 252 | derivative[itest, :] = np.dot(length_scale.dot((x_function[itest, :] - x_train).T), 253 | (K[itest, :].reshape(-1, 1) * weights)) 254 | 255 | return derivative 256 | 257 | 258 | @staticmethod 259 | def ard_derivative_full(x_train, x_function, K, weights, length_scale): 260 | 261 | n_test, d_dims = x_function.shape 262 | n_train, d_dims = x_train.shape 263 | derivative = np.zeros(shape=(n_test, n_train, d_dims)) 264 | 265 | constant = np.diag(-np.power(length_scale, -2)) 266 | 267 | weights = np.tile(weights, (1, d_dims)) 268 | 269 | for itest in range(n_test): 270 | X = (np.tile(x_function[itest, :], (n_train, 1)) - x_train).dot(constant) 271 | 272 | term3 = np.tile(K[itest, :].T, (1, d_dims)).T 273 | derivative[itest, :, :] = X * weights * term3 274 | 275 | return derivative 276 | 277 | def rbf_full_derivative(x_train, x_test, weights, gamma): 278 | 279 | if np.ndim(x_test) == 1: 280 | x_test = x_test[np.newaxis, :] 281 | 282 | if np.ndim(weights) == 1: 283 | weights = weights[:, np.newaxis] 284 | 285 | n_test, d_dims = x_test.shape 286 | n_train, d_dimst = x_train.shape 287 | 288 | assert(d_dims == d_dimst) 289 | 290 | full_derivative = np.zeros(shape=(n_test, n_train, d_dims)) 291 | 292 | K = pairwise_kernels(x_test, x_train, gamma=gamma, metric='rbf') 293 | constant = -2 * gamma 294 | 295 | for itest in range(n_test): 296 | 297 | term1 = (np.tile(x_test[itest, :], (n_train, 1)) - x_train) 298 | term2 = np.tile(weights, (1, d_dims)) 299 | term3 = np.tile(K[itest, :].T, (1, d_dims)).T 300 | 301 | full_derivative[itest, :, :] = term1 * term2 * term3 302 | 303 | full_derivative *= constant 304 | 305 | return full_derivative 306 | 307 | 308 | def rbf_derivative_full(x_train, x_function, K, weights, length_scale, nder=1): 309 | 310 | n_test, d_dims = x_function.shape 311 | n_train, d_dims = x_train.shape 312 | 313 | derivative = np.zeros(shape=(n_test, n_train, d_dims)) 314 | 315 | weights = np.tile(weights, (1, d_dims)) 316 | 317 | if nder == 1: 318 | 319 | constant = - 1 / length_scale**2 320 | 321 | for itest in range(n_test): 322 | 323 | term1 = (np.tile(x_function[itest, :], (n_train, 1)) - x_train) 324 | term3 = np.tile(K[itest, :].T, (1, d_dims)).T 325 | derivative[itest, :, :] = term1 * weights * term3 326 | 327 | else: 328 | 329 | constant = 1 / length_scale**2 330 | for itest in range(n_test): 331 | 332 | 333 | term1 = constant * (np.tile(x_function[itest, :], (n_train, 1)) - x_train) ** 2 - 1 334 | term3 = np.tile(K[itest, :].T, (1, d_dims)).T 335 | derivative[itest, :, :] = term1 * weights * term3 336 | 337 | derivative *= constant 338 | return derivative 339 | 340 | def rbf_full_derivative_loops(x_train, x_function, weights, gamma): 341 | 342 | n_test, d_dims = x_function.shape 343 | n_train, d_dims = x_train.shape 344 | 345 | K = pairwise_kernels(x_function, x_train, gamma=gamma) 346 | 347 | full_derivative = np.zeros(shape=(n_test, n_train, d_dims)) 348 | 349 | constant = - 2 * gamma 350 | 351 | for itest in range(n_test): 352 | for itrain in range(n_train): 353 | for idim in range(d_dims): 354 | 355 | full_derivative[itest, itrain, idim] = \ 356 | weights[itrain] \ 357 | * (x_function[itest, idim] - x_train[itrain, idim]) \ 358 | * K[itest, itrain] 359 | 360 | full_derivative *= constant 361 | 362 | return full_derivative 363 | 364 | 365 | def rbf_derivative(x_train, x_function, K, weights, length_scale): 366 | """The Derivative of the RBF kernel. It returns the 367 | derivative as a 2D matrix. 368 | 369 | Parameters 370 | ---------- 371 | xtrain : array, (n_train_samples x d_dimensions) 372 | 373 | xtest : array, (ntest_samples, d_dimensions) 374 | 375 | K : array, (ntest_samples, ntrain_samples) 376 | 377 | weights : array, (ntrain_samples) 378 | 379 | length_scale : float, 380 | 381 | Return 382 | ------ 383 | 384 | Derivative : array, (n_test,d_dimensions) 385 | 386 | """ 387 | n_test, n_dims = x_function.shape 388 | 389 | derivative = np.zeros(shape=x_function.shape) 390 | 391 | for itest in range(n_test): 392 | derivative[itest, :] = np.dot((x_function[itest, :] - x_train).T, 393 | (K[itest, :].reshape(-1, 1) * weights)) 394 | 395 | derivative *= - 1 / length_scale**2 396 | 397 | return derivative 398 | 399 | 400 | def rbf_derivative(x_train, x_function, weights, gamma): 401 | 402 | # check the sizes of x_train and x_test 403 | err_msg = "xtrain and xtest d dimensions are not equivalent." 404 | np.testing.assert_equal(x_function.shape[1], x_train.shape[1], err_msg=err_msg) 405 | 406 | # check the n_samples for x_train and weights are equal 407 | err_msg = "Number of training samples for xtrain and weights are not equal." 408 | np.testing.assert_equal(x_train.shape[0], weights.shape[0], err_msg=err_msg) 409 | 410 | K = pairwise_kernels(x_function, x_train, gamma=gamma, metric='rbf') 411 | 412 | n_test, n_dims = x_function.shape 413 | 414 | derivative = np.zeros(shape=x_function.shape) 415 | 416 | constant = - 2 * gamma 417 | 418 | for itest in range(n_test): 419 | 420 | if n_dims < 2: 421 | derivative[itest, :] = np.dot((x_function[itest, :] - x_train).T, 422 | (K[itest, :][:, np.newaxis] * weights)) 423 | 424 | else: 425 | derivative[itest, :] = np.dot((x_function[itest, :] - x_train).T, 426 | (K[itest, :] * weights).T) 427 | 428 | derivative *= constant 429 | 430 | return derivative 431 | 432 | 433 | def rbf_derivative_slow(x_train, x_function, weights, 434 | n_derivative=1, gamma=1.0): 435 | """This function calculates the rbf derivative 436 | Parameters 437 | ---------- 438 | x_train : array, [N x D] 439 | The training data used to find the kernel model. 440 | 441 | x_function : array, [M x D] 442 | The test points (or vector) to use. 443 | 444 | weights : array, [N x D] 445 | The weights found from the kernel model 446 | y = K * weights 447 | 448 | kernel_mat: array, [N x M], default: None 449 | The rbf kernel matrix with the similarities between the test 450 | points and the training points. 451 | 452 | n_derivative : int, (default = 1) {1, 2} 453 | chooses which nth derivative to calculate 454 | 455 | gamma : float, default: None 456 | the parameter for the rbf_kernel matrix function 457 | 458 | Returns 459 | ------- 460 | 461 | derivative : array, [M x D] 462 | returns the derivative with respect to training points used in 463 | the kernel model and the test points. 464 | 465 | Information 466 | ----------- 467 | Author: Juan Emmanuel Johnson 468 | Email : jej2744@rit.edu 469 | juan.johnson@uv.es 470 | """ 471 | 472 | # initialize rbf kernel 473 | derivative = np.zeros(np.shape(x_function)) 474 | 475 | # check for kernel mat 476 | K = pairwise_kernels(x_function, x_train, gamma=gamma) 477 | 478 | # consolidate the parameters 479 | theta = 2 * gamma 480 | 481 | # 1st derivative 482 | if n_derivative == 1: 483 | 484 | # loop through dimensions 485 | for dim in np.arange(0, np.shape(x_function)[1]): 486 | 487 | # loop through the number of test points 488 | for iTest in np.arange(0, np.shape(x_function)[0]): 489 | 490 | # loop through the number of test points 491 | for iTrain in np.arange(0, np.shape(x_train)[0]): 492 | 493 | # calculate the derivative for the test points 494 | derivative[iTest, dim] += theta * weights[iTrain] * \ 495 | (x_train[iTrain, dim] - 496 | x_function[iTest, dim]) * \ 497 | K[iTrain, iTest] 498 | 499 | # 2nd derivative 500 | elif n_derivative == 2: 501 | 502 | # loop through dimensions 503 | for dim in np.arange(0, np.shape(x_function)[1]): 504 | 505 | # loop through the number of test points 506 | for iTest in np.arange(0, np.shape(x_function)[0]): 507 | 508 | # loop through the number of test points 509 | for iTrain in np.arange(0, np.shape(x_train)[0]): 510 | derivative[iTest, dim] += weights[iTrain] * \ 511 | (theta ** 2 * 512 | (x_train[iTrain, dim] - x_function[iTest, dim]) ** 2 513 | - theta) * \ 514 | K[iTrain, iTest] 515 | 516 | return derivative 517 | 518 | 519 | def rbf_full_derivative_memory(x_train, x_function, weights, gamma): 520 | """This function calculates the rbf derivative using no 521 | loops but it requires a large memory load. 522 | 523 | Parameters 524 | ---------- 525 | x_train : array, [N x D] 526 | The training data used to find the kernel model. 527 | 528 | x_function : array, [M x D] 529 | The test points (or vector) to use. 530 | 531 | weights : array, [N x D] 532 | The weights found from the kernel model 533 | y = K * weights 534 | 535 | kernel_mat: array, [N x M], default: None 536 | The rbf kernel matrix with the similarities between the test 537 | points and the training points. 538 | 539 | n_derivative : int, (default = 1) {1, 2} 540 | chooses which nth derivative to calculate 541 | 542 | gamma : float, default: None 543 | the parameter for the rbf_kernel matrix function 544 | 545 | Returns 546 | ------- 547 | 548 | derivative : array, [M x D] 549 | returns the derivative with respect to training points used in 550 | the kernel model and the test points. 551 | 552 | Information 553 | ----------- 554 | Author: Juan Emmanuel Johnson 555 | Email : jej2744@rit.edu 556 | juan.johnson@uv.es 557 | """ 558 | n_train_samples = x_train.shape[0] 559 | n_test_samples = x_function.shape[0] 560 | n_dimensions = x_train.shape[1] 561 | 562 | K = pairwise_kernels(x_function, x_train, gamma=gamma) 563 | 564 | # create empty block matrices and sum 565 | derivative = np.tile(weights[:, np.newaxis, np.newaxis], 566 | (1, n_test_samples, n_dimensions)) * \ 567 | (np.tile(x_function[np.newaxis, :, :], 568 | (n_train_samples, 1, 1)) - \ 569 | np.tile(x_train[:, np.newaxis, :], 570 | (1, n_test_samples, 1))) * \ 571 | np.tile(K[:, :, np.newaxis], 572 | (1, 1, n_dimensions)) 573 | 574 | # TODO: Write code for 2nd Derivative 575 | # multiply by the constant 576 | derivative *= -2 * gamma 577 | 578 | return derivative 579 | 580 | 581 | 582 | 583 | 584 | 585 | 586 | 587 | 588 | 589 | 590 | def rbf_derivative_full(xtrain, xtest, K, weights, length_scale): 591 | """The Derivative of the RBF kernel. It returns the full 592 | derivative as a 3D matrix. 593 | 594 | Parameters 595 | ---------- 596 | xtrain : array, (n_train_samples x d_dimensions) 597 | 598 | xtest : array, (ntest_samples, d_dimensions) 599 | 600 | K : array, (ntest_samples, ntrain_samples) 601 | 602 | weights : array, (ntrain_samples) 603 | 604 | length_scale : float, 605 | 606 | Return 607 | ------ 608 | 609 | Derivative : array, (n_test, n_train, d_dimensions) 610 | 611 | """ 612 | n_test, d_dims = xtest.shape 613 | n_train, d_dims = xtrain.shape 614 | 615 | derivative = np.zeros(shape=(n_test, n_train, d_dims)) 616 | 617 | weights = np.tile(weights, (1, d_dims)) 618 | 619 | for itest in range(n_test): 620 | term1 = (np.tile(xtest[itest, :], (n_train, 1)) - xtrain) 621 | term3 = np.tile(K[itest, ].T, (1, d_dims)).T 622 | derivative[itest, :, :] = term1 * weights * term3 623 | 624 | 625 | derivative *= - 1 / (length_scale**2) 626 | return derivative 627 | 628 | 629 | def ard_derivative(x_train, x_test, weights, length_scale, scale, n_der=1): 630 | """Derivative of the GP mean function of the ARD Kernel. This function 631 | computes the derivative of the mean function that has been trained with an 632 | ARD kernel with respect to the testing points. 633 | 634 | Parameters 635 | ---------- 636 | x_train : array-like, (n_train_samples x d_dimensions) 637 | The training samples used to train the weights and the length scale 638 | parameters. 639 | 640 | x_test : array-like, (n_test_samples x d_dimensions) 641 | The test samples that will be used to compute the derivative. 642 | 643 | weights : array-like, (n_train_samples, 1) 644 | The weights used from the training samples 645 | 646 | length_scale : array, (d_dimensions) 647 | The length scale for the ARD kernel. This includes a sigma value 648 | for each dimension. 649 | 650 | n_der : int, default: 1, ('1', '2') 651 | The nth derivative for the mean GP/KRR function with the ARD kernel 652 | 653 | Returns 654 | ------- 655 | derivative : array-like, (n_test_samples x d_dimensions) 656 | The computed derivative. 657 | 658 | Information 659 | ----------- 660 | Author : Juan Emmanuel Johnson 661 | Email : jemanjohnson34@gmail.com 662 | 663 | References 664 | ---------- 665 | Differenting GPs: 666 | http://mlg.eng.cam.ac.uk/mchutchon/DifferentiatingGPs.pdf 667 | """ 668 | # check the sizes of x_train and x_test 669 | err_msg = "xtrain and xtest d dimensions are not equivalent." 670 | np.testing.assert_equal(x_test.shape[1], x_train.shape[1], err_msg=err_msg) 671 | 672 | n_train_samples, d_dimensions = x_train.shape 673 | n_test_samples = x_test.shape[0] 674 | length_scale = _check_length_scale(x_train, length_scale) 675 | 676 | # Make the length_scale 1 dimensional 677 | if np.ndim(length_scale) == 0: 678 | length_scale = np.array([length_scale]) 679 | if np.ndim(weights) == 1: 680 | weights = weights[:, np.newaxis] 681 | 682 | if len(length_scale) == 1 and d_dimensions > 1: 683 | length_scale = length_scale * np.ones(shape=d_dimensions) 684 | elif len(length_scale) != d_dimensions: 685 | raise ValueError('Incorrect Input for length_scale.') 686 | 687 | # check the n_samples for x_train and weights are equal 688 | err_msg = "Number of training samples for xtrain and weights are not equal." 689 | np.testing.assert_equal(x_train.shape[0], weights.shape[0], err_msg=err_msg) 690 | 691 | if int(n_der) == 1: 692 | constant_term = np.diag(- np.power(length_scale**2, -1)) 693 | 694 | else: 695 | constant_term2 = (1 / length_scale)**2 696 | constant_term4 = (1 / length_scale)**4 697 | 698 | # calculate the ARD Kernel 699 | kernel_mat = ard_kernel(x_test, x_train, length_scale=length_scale, scale=scale) 700 | 701 | # initialize derivative matrix 702 | derivative = np.zeros(shape=(n_test_samples, d_dimensions)) 703 | if int(n_der) == 1: 704 | for itest in range(n_test_samples): 705 | 706 | x_tilde = (x_test[itest, :] - x_train).T 707 | 708 | kernel_term = (kernel_mat[itest, :][:, np.newaxis] * weights) 709 | 710 | derivative[itest, :] = constant_term.dot(x_tilde).dot(kernel_term).squeeze() 711 | 712 | else: 713 | for itest in range(n_test_samples): 714 | 715 | x_term = np.dot(constant_term2, np.ones(shape=(d_dimensions, 716 | n_train_samples))) 717 | 718 | x_term += np.dot(constant_term4, (x_test[itest, :] - x_train).T**2) 719 | 720 | derivative[itest, :] = np.dot(x_term, kernel_mat[itest, :] * weights).T 721 | 722 | return derivative 723 | 724 | def ard_derivative_full(x_train, x_function, K, weights, length_scale): 725 | 726 | n_test, d_dims = x_function.shape 727 | n_train, d_dims = x_train.shape 728 | derivative = np.zeros(shape=(n_test, n_train, d_dims)) 729 | 730 | constant = np.diag(-np.power(length_scale, -2)) 731 | 732 | weights = np.tile(weights, (1, d_dims)) 733 | 734 | for itest in range(n_test): 735 | X = (np.tile(x_function[itest, :], (n_train, 1)) - x_train).dot(constant) 736 | 737 | term3 = np.tile(K[itest, :].T, (1, d_dims)).T 738 | derivative[itest, :, :] = X * weights * term3 739 | 740 | return derivative -------------------------------------------------------------------------------- /notebooks/uncategorized/eigenmap/variational_nystrom.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Variational Nystrom" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 28, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import numpy as np\n", 17 | "from sklearn.utils import check_random_state\n", 18 | "from scipy.sparse import csr_matrix, spdiags\n", 19 | "\n", 20 | "import matplotlib.pyplot as plt\n", 21 | "from mpl_toolkits.mplot3d import Axes3D\n", 22 | "from sklearn.neighbors import NearestNeighbors\n", 23 | "\n", 24 | "plt.style.use('ggplot')\n", 25 | "\n", 26 | "%matplotlib inline" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "## Variational Nystrom Method" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "$X \\in \\mathbb{R}^{NxN}$" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "metadata": {}, 46 | "source": [ 47 | "## Generate Data" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 20, 53 | "metadata": {}, 54 | "outputs": [ 55 | { 56 | "data": { 57 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADnCAYAAAC9roUQAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzsnXmYW+V1/z930a4ZzWbG491gMBgv2MEBHMKSEJayO4EAgUKWh5QlJYE0pYU2IaVtQpu0zS9lacIalgTCEmgCAVxIiAPEgMEGT2wD9oDxPouk0S7d+/tDfq+vNNp1NTOauZ/nMcaS5t53ZqTvPfe853yPpOs6NjY2NjajgzzWC7CxsbGZTNiia2NjYzOK2KJrY2NjM4rYomtjY2Mzitiia2NjYzOKqKWe3L59u13aYGNjY1Ml06ZNk4o9Z0e6NjY2NqOILbo2NjY2o4gtujY2NjajiC26NjY2NqOILbo2NjY2o4gtujY2NjajiC26NjY2NqOILbo2NjY2o4gtujY2NjajiC26NjY2NqOILbo2NjY2o4gtujY2NjajiC26NjY2NqOILbo2dSPL9tvIxqZS7E+LTc2oqorT6URRlBHPSZKEx+MZg1XZ2IxvbNG1qQpJkgyxLRXhyrKM2+0exZXZ2DQHJU3MbWwEkiTh8/lIp9NomjbWy7GxaVrsSNemJJIk4XA4cDqdOByOivO3uq4jSUXN8yumtbW17mPY2Iwn7EjXpiCyLKMoiiGcuq5bJqTV4HQ6R/V8NjaNxhZdmxzyxdbGxsZabNG1AUBRlIJVCGZKRbqSJKHrekWvHW1EaiQSiYz1UmxsbNGd7FQituUYL+JaDEmS7Fpim3GDLbqTFFVVLREip9Np1ONqmkY6nSaVSpHJZOo+to3NRMQW3UmEJEkoilKz2JpTBi6XC4/HQzKZJBQKkUwmAXA4HKiqisvlwuFwMGXKlBwxTqfTVZWdiXOaUxdjRWtrK7FYjFQqNdZLsWlibNGdBEiSZIhgIpGo61gOh8MQ22AwaFQ1QFYgk8mkIcBOp5O9e/caDRUOhwO3242qqiiKUpcYjwXjPY1i0xzYojuBEWInIlxVVWsSXUmScLvduN1u0um0IbaVous6qVRqRIRYTozT6bSRjx0P6YrxEnHbNDe26E5AhJjJspwThdZyHI/Hg9PpJB6PE41GCwpPrRFgJWIsyzKBQGCEGIvoeDxHxsXo7OxkYGDAFvBJii26E4hCDQ2Cakq4zGIbi8UYGhoCsnnc0cAsxh6Ph8HBQWP9qqqiqmrRyLiZxdhmcmCL7gTAqoYGWZbxeDyoqkosFiMajY54zVjmNc1iHIvFctZUSozFa2RZrluMrYhOrUpT2OmO5sQW3SammhrbUpGuoih4PB4URSEWixVtIhivH/ByYiwuJG1tbSMiYxEdVyLG420jrb29naGhITuqbzJs0W1CrGhoEMfxer1IkjQhS6GEGAuhHR4eBnIjY5fLhc/nq0uMxwo70m1ObNFtIuppaDBHuiL6kySJaDRq3IJXc4zRohF1upWkKZxOpyHG4vXi8UQiMa7FuFps8R5dbNEd54hyLUmSKhbHUsdqbW1F13Wi0ei4KMMaT1RSTSHqlM1iPFaRsVViKXyS4/G4BauyKYctuuMUc42tw+FA1/WaRdfhcOD1elEUhWAwaIttHpqmoW96B/p3QSQCegacLhgaAFmB7h7Sm94mPRwi7fTA/MORFixDbu9E/mgrbHwb2efHt3AZcjqF7nCg+wOkkgk0t4eMND7qjIthR7qjiy264wyz2Apqva0XvgjpdJpwOExLS0tdH/5C6xAVD4qiGFGf+Hs8oGcyaEP9pPbuJfPhe+APgMcDj98PkWE4fClseAv27IB0EnQJfH5IxEFRQJIhnQJFIa1lIJWEP/0e3esnM7WHzJ5dEI2AppGQJWjrhOhw9uslGaZOp/XLX8ezYAlaKknG6UaWZVwul+FRMdYbdFaKri3g5bFFd5wgyr7MDQ0CXderyuUKX4RUKkU4HDZuea38cOeXl4VCIWRZzinbUlWVzs7OUW311dOpbHSaSaM9dh+sfy2btw0NAQXE4KOtIx8LD5U/USQE74VyH9OAvbtMD2Tgow8I/eAfCc2YC60B5J6ZTLn4cjK/fw795eeR0ymk9k4chy9DOnwpetdU0un0qEbGVgmlLMu0tbUxMDBgwaomLrbojjGlGhrMVCKYolXX7ItgNZIk4ff7R5SXFdqc6urqYmBgIKfVV3SZaZo2Ih9a63ozA3vgt4/Dlk2wdzck49mIdDxsdukaDIdg8zsgSWjqGnb97mlIJECSIJkALUNq3Vo4oAdlycdwzJiDumcnyAryzLnIRxxFOpNpmBjbdcOjiy26Y0Q1DQ3l3sgejweXy0UikWiY2IrI1uFwEA6HKy4vK7U5JRzJPB4PLS0thsdCuRSFloyj/2EVvLwKtn0I6fpMfEaFzL7vJZ2C+L6KCVUF8T1Gw7A1TGbrJjJI2RQIUvaPw4F62BLUrin4jj0J56GLSKfTRpqiXjG2UnQnUlVHo7BFd5Sppca2UC5VVDW4XC7i8bjRqms15saJeDxuiGi95DuSCUSKQmz+ORwOOjs7yWQyJMJBIo/+DP3px7L512anaN5bh5ipGzAG6bWvkD54AfHgEJLDjTywB/f7G9FcTrxHHIVrzrx9h8xth65EjK1ML1h1wW9paSGZTNbtijcesUV3lKinocEsuvkmNI0U2/zGCVmWGz4oUtO0HDGWJIn+u38Ev/stRMINPfe4JhmHP6+D7h707/41mXCIiA4oMvEnHoSuHvjYMThPOgun14vT6TQqVqC0GI/H9EKtUbMsy0iSNK6rRWzRbTAul8uyN6LP50NV1VET22oaJ6wmExqCJ39O/5qXsrfeNtkUxfYP8x7LQHAw+2fLRpJ975L8i/OQhoMwbTayPzvC3tz0kS/GiqLgdDrHTZoCao+axb5BMBi0ZB2NwBbdBmCe0NDa2lrXG0DkUp1OJ8PDww0brlhrl5rVZDb3wr/9XXYDyqY6tAz86fewZRN6KgmpNJnDFsPcQ9BPOI20wzmiAUJVVTo6OopGxtWMXxoPpWfNsJlni66FFKqxrZV8ExpZlkfkP6ul0JtRVVW8Xm9FXWqNbAPODPbDt75EwbIum8rRdRjcm80X6zq8/kdY/xraqqfg8m+iHHhozstFCV84nHs3ISJjcweeeH0xMbZyI61WRzgrnOQajS26FmCl2BaKOMWmmZWYxTYSiTQkB1aJb0Kmfzdc/xXLzz2pMW90aplseVoyAXf9F5mvXIekqkhTpyOpjqKHEMJaKDIuJsayLBsGQ/W+n2qNWGVZHjeNOcWwRbcOSjU0VItZBGOxWM4bx6oIU9d148OiaVrDxLYSMrEoXHcppCbe7vS4Q9eyNxC7PoL/+g66twV97jykMy9COmBqVYcqJcYtLS0oikJra2tdaYp6aIayNVt0a6DShoZKEKVR5USwXtEVNbFut7tmsbVC/DPDYfj7yyHWmNy0TRmGw9m25aG96H3voh+yCO3q6+s+rNiEi8fjOWkwERmLOzghxplMJqc5xqro1E4vTDBEDakVCF+ETCaT06prNebzpFIpIpGI5ecSO8b5ZUnm9EImPATf+RqExu+u8uRAz6YckhkYGoS3Xyf0X/+EfsnV2cfjMfD6kWoobyyUEigmqOXEWKTsqhVjeyNtgmCVaThkS8jEVN3REFvzeVpaWuo+rjnSFW3HiUSCUCjrQyCiaWGD6Pd6GfzCyaCN7zxbWRQlW541kYhHwe0h+eFW9O9eA/27QXXC1Onw5W8gu9zga6lYgKu5tS8nxqLdXAQ5lUbGdqTb5FghtiLaEyKYTCYJhUINuxoXEtv8tdSDrus5YivqhTVNMz4YsVgsm7O99i8nRufY1Ong8WW9HSYaQ/3ooaFslAtZd7T3QnDjX6H5WqC1Df7qepSp08seyoq9DZGmyGQyObXoiqLkpMjyxTgSibB79246Oztt0W1GnE6n4WVQL2KEeCN9ESA3gm6UqIs3ezKZLNqckUkm4btfh13bLD//mBEKws7tY72KxqEViOA1DcLB7J9/vpbMzXegBNpKHsbKduJ84RRCnI8Q41gsxqpVq3jooYdIJpN0dXWxYMECPv7xj1d0zl27dnHvvfca/+7v7+e0007jhBNOMB7bvHkzd955Jx0dHQAsXryYU089tervzxbdfZgbGkS7a62ia/ZFAAiHww3btRU2jpVE0LVGuubINpPJ5Iy4EaSiEfR//VZhq8RmJx5lUtcPx2Pwn98hc9Ah8JlzULqnFXzZWLQTCzF2u91ccMEF9PT0sG3bNvr7+6vKB3d3d/Otb30LyN61ffvb32bx4sUjXnfggQdy+eWXV3zcQkx60bXSNLyQL4Lf77dknflvQrPYNiqCLpRGEBcSgaZpJK743D5hmqCM89vVUWHXRzC4B15bTeayv0Y54qiGnarWvKwQa0VROOCAA2o+/6ZNm+jq6jIiWquZtKJbqqGhWtE12x7GYrGcW28ra2wlSTLSCLWIbaVrKSS25mMAaJk0iW9cku35t2kuJLn6Nmtdy0a8uga3f5/MgiOQZh0Inz4TuSVg7fLqaIywIp/7xhtvsGzZsoLPbd26lVtuuYXW1lbOPvtsenp6qj7+pBPdShsaKhGn/OkJhXwRrBLd0cgNlxJbQTqZJHb157O5PpvmpGpfC2lftK+DpgNpePt19C2boO89tKtvQFask5KxbAFOp9O88847nHnmmSOemzlzJt/+9rdxuVxs2LCBO++8kxtvvLH6dda1wiZClmVj97Pe3JOiKPj9flpaWkilUgSDwaK+CPWKrsfjoa0tu4ERCoWIxWI1r73YWtxuN21tbUiSxNDQUMGcra7rxK77EnvPP94W3MmGRHazTQgvZH0dhkNZ8f27r5L51QPoCWumCY+l2U1vby8zZswoWF5p3qdZsGABmUyG4eHhqs8x4SNdK2tsC3nMlqMW0S1kUO73+y03m6kkss2kUiRv/3d47XeWnnvSIEn7dKqJN+LKCdngHvj1I+x5fTXc+J/IdXou1+OlW2+kWyq1EAqFaGlpQZIk+vr60HUdn89X9TkmrOjWK7bmK6bwRQCqtj2sRnQbPQ1CrMUstsVSFbquE//Rd+GNly1dw6TD6c5GiToTo2a5GLqGvmMb8vf+Bv8//Teaw5nTmVgNtZrW1BvpJpNJNm7cyPnnn288tnr1agA+8YlP8NZbb7F69WrjrvnSSy+tbcO91CK3b9/edJdnqyLbQCBANBrF4/FUZHtYDKfTadgzFiO/6iHfTASyBuaJRKKuHvWWlhbDBF2M3sknk8mQ/NuvZEeS21iDJJWPFicS02fhueyv8SxZjqqqBWfflRLj1tbWER4OleDz+VAUxeiOHEumTZtWVI0nTKSrqiqtra015VjyEULpcrka6sRlFtv8qod86skNi8hW1NgWEvV0Ok3qxitgx4cFjmBTF5NJcAE++oDYXT8i/vkvIR+evVUvNPtOjNURQiz+nsheutDkomtuaAAM569abzHMLbSpVIpoNFr3L7HYUMlKxTb/66ohP43gcDhG3AVkMhmSN18H7/+5qmPbTHBUZ33pkB0foP/iTjLXzkVpax8x+05g3uAWXgsiOhZTpCt1IWsGW0doUtEtVmOraVpNoisaDVKplOFXYNXGlVl080vMotHKGwqq+Z4qztl+7UIIN2bWmk2Tk06CrBRuEa6UHR/CjX9F5sTTkFcWzn9qmkYikcjp/uzs7CQUChktvoW8FgoZ3wihHu80neiWsles9hZciFOhRgOrmxrEUMli9byVHqcU5cRWHCP2/26G1/5Q9RpsJhn1CK4gEYPnnkKbMhXluMp8CiRJMgQ1vxXfbHwjLCF1XeeOO+6gp6eHzs5O44+4A66Em266Cbfbbdw9X3fddTnP67rOY489Rm9vLw6Hg4suuoiZM2dWfHwzTSe69XoLmCsEykWC9YqulUMlS62nksgWIPTD78BrL9W8Bhubmsik4Bd3kgkNIZ92Xk1evcahChjf6LrO8ccfTygU4r333uPll18mlUpV7ZFw1VVXFW3b7+3tZc+ePdxwww309fXxyCOPcO2119b0PTSd6JailDBVW45Vj+jKsmxMVrVqqGQhKi/9uhneWG35+W1sKkbT4M/r0Lu6kY4+0dJDS5LElClTWLBgAfPnz2/IjLT169ezfPlyJElizpw5xGIxgsEggUD1LdATTnTzbylq3bSqRXTNE3yj0aiRX/J4PFUdp9h6xPdWaWQbu+s/4He/rfvcNpMYq5o70ikY7Efv323FqgpSzya6JEncfvvtAKxYsYIVK1bkPB8MBmlvbzf+3dbWNnlEt9QPVWykQe6mVS2NBtWIbi2datUihkqWS4sAxO74d/jj85avwWYSYmW52+4d8GGfdcfLo56SsWuuuYZAIEA4HOa2226ju7ubgw46qOTX1Hon3HSiWwph6+b3+41b+1rzqIWi5nzMYluqU63e3LDb7TbmnJUU2/Wvw7/fUNe5bGwayvo1ZN56FXnxxwt+LuqNVmv9WhGxtrS0sGjRIvr6+nJENxAIMDi431FvaGiI1tbWms41YQxvFEUZsUFWTx61lOiKcdM+n49YLEYoFGpIHslsRCOM0Au9qWKr/pfYpafagmsz/kkm4K7/QnvwDrTh8Iin6/XSrYVEImE0DCUSCTZu3DjCsnHhwoWsWbMGXdfZunUrHo+nptQCNGmka771F6UjkiSRSCRwOByW3N6bUxUC4cFQT1twJRTK2ZpHvgtiLz0PP/33hqzBxqZhRIdh9Sr0vvfQLv8mcle38dRYeOmGw2HuuusuIPu5X7ZsGYcddliO78KCBQvo7e3l5ptvxul0cuGFF9Z0LmhS7wUxaVYIYCwWM9oHfT4f4fDIK2i1mI9Vr9gGAgGCwfJ2iGaxzfdGEJt0w8PDxJ5/Cn7231V/TzY24wpFhcOXIV1yJXJbdkqDw+HA4/FU7Z+gqirt7e3s2bOnESutmgnnvSC8LvN9EaxqaBDHkmWZ1tZWNE1rqAdDpaVf/ff/D9rj9zVkDTY2o04mDb1vor/xMnzqdKD2icLN4rsATSq6oVCoYL7VKtEVUbSiKASDwbrFVqwr/81UcenXqifhvlvrWoONzbgklYQ/PE/X+ZeiaZrxGVBVtap9Elt0x5B6RFcY3mQyGcLhMC0tLZZFt2bRrVhs//QH+O+bLTm/jc24Zdd29mx9H6W1zag8EuY3wAgnskKfSasmEY8GTSm6Vv9wze5iwvAG6i/1ElQttn3vwz9eacm5bWzGPckEejKJpmmGwJr9pwv5LQCGP8OWLVsIBAJ0dnaO1XdQFU0pulZRTGytRuSGy4rtH1bBT/6tIWuwsRm/aOgP34n+pa8jt7SMSCsU8lsADH/erVu3smHDBvbu3YvD4WDq1KmcccYZxrSXUgwODvLAAw8YKctjjjmG448/Puc1mzdv5s477zRGsi9evJhTT63MvKcQTSm65UxvyiHGmKfTaUKhUMNuS0Rkq+s6w8PDRXNUsc3vwM3XFXzOxgaXJzscMl58+kjTs3Uz+h+eRTrnCxV/HkWku3z5cj796U+TyWTo7+9n586dxgDJcsiyzNlnn83MmTOJx+P84Ac/YP78+UydOjXndQceeGDVBjrFaErRLUWxTSvY75ubTCYrEttaxTg/jSDqiPNJ9e8lfe3FNZ3DZhKRiJFV3QlM/x707R/UNZQylUrhdruZM2dOxV8XCASMJge32013dzfBYHCE6FrJhBRdMZNJYBbbUrf3hY5VTYK+WM42v6oivvnP6Dd/vYrvysamOTaJakeHN/6ElEyM2fj1/v5+tm3bxuzZs0c8t3XrVm655RZaW1s5++yzR3SsVUNTim6lpjeVblyVOk8lv8yKzcPf/TP8ky22uUjgdEFy5Nw2m0lGOEjijVfRD19a9ZfWWzKWSCS4++67Offcc3G73TnPzZw5k29/+9u4XC42bNjAnXfeyY033ljzuSaM94JA13VcLpfhWRAMBonFYjVdBcvV/Zq9EUqdJ7b1XYYuONEW3ILotuDa7EMj/darNacXao10M5kMd911Fx/72MdYsmTJiOeFpwvAggULyGQydQ3AnVCRrsfjMcbvVGvlWOw8hUS30gg6FRwg/dcX1b0OG5vJQnrX9pq+rtZcsK7rPPTQQ3R3d3PiiYXN1UOhEC0tLUiSRF9fH7qu4/P5alonNKnomsmfCBGNRi2rRsgX3UrFNr5lM/p3vmbJGmxsJhO6VNvNd63phS1btvDaa6/R09PDLbfcAsAZZ5xh2Dh+4hOf4K233mL16tXG5OJLLy08ZLNSmtLwBvaXfQmxFdZsTqfT8NKtF9GdJstyUSMaM7Etm8EWW5tmR3VkJz2MAdLUGcj/VH3Le09PDzt27GjAimpjwhneAEazQX4awUrTG+HRG4/Hy27ExW64Era9b8l5bWzGlDESXAC5raPqz3AztQBDE4vu0NBQwV+MFaIr0giZTIZYLGZE0YWIP/EA+uM/q+t8NjY2WeTWNjqmTAEq81yA+jbRxoKmFd1i1CO6+Tlbh8Nh9HnnE/vXv4U/v1XPUicmY3hratPkOFzgdLJ3715gv+eC8NhVVRVd13OEOJFI1LyJNlY0regWE9dKZpvlU2lTA0Dq/U2kv/t10Jvnlzyq2IJbO4qa9ZidrHh9SO79k7OF54L5TlOSJMNzwe12s3btWn77298SCATo6uqip6eHQw89tOL5Zb29vTz22GPous7RRx/NSSedlPN8Op3m/vvvZ9u2bXi9Xi699NK6jXWaVnSLUWjMTjEqbWoQxFf9L/p9P7Z0veMDicZ2PFVwfFkBrTEm8U2BLGebRGIZJn73WRF0De/5X6LU3BcR6YqRXIcffjhLly4lnU7T29vL9u3biUQiFYmupmn88pe/5IorrqCtrY0f/vCHLFy4MKcF+JVXXsHr9XLjjTfyxhtv8NRTT3HZZZfV9W02reiWyuGUE91KS7+E6GqpFIm//TL0765rzeOXsRBcCRwOOOgwmDYT/vR7iEWy4ptOT847iVhtk6snCtKSo6C1DaoctyWmcs+fP5/58+dX/HV9fX10dXXR1dUFwNKlS1m/fn2O6K5fv95wFFuyZAmPPvpo3ftGTSu6tVBtW7B4PvH3l09gwbUKGRwqaDpkzCmGYj9jPZvDm3MQTJ8DC5YiDexCT6XhmUchXH6m3ISiiXKSjcJx3GdGdVRPMBikvb3d+HdbWxt9fX1FXyOqmSKRCH6/v+rzGeut+SubiErbdfPRNI3wI/fA7vFT/zeuyNlk1LKjVzJV5HSjYdiwDj54Dzxe5BPPgMOXTT7BtYGubhzzF476+PVCx7LiNaVo2ki3Ek/degxvkn96icyt/wJNVIpSmgbkbWUZMlp9xx3aC1s3gctD5ulH4O03LFueTRPxyc+gqI6afRdqGasVCASMzjPIlqHm54LFa9ra2oxNvUrM0Uuut66vHqdIklSX4U3qz+vI/Pc/TxzBlRUakrdNpeo/bmQYNm+E//25LbiTla5upFM+W3PEWuvXzZo1i71799Lf3086nWbt2rUsXLgw5zULFy5kzZo1ALz11lscfPDBdUe6TdsGLEkSDocj5zER2QKEw+Garn5aJEzimi9kb5VtbGwai9cP37wZZeaBtLa2Eo/HSSar++y1t7eXbWIqxoYNG3j88cfRNI2jjjqKk08+md/85jfMmjWLhQsXkkqluP/++/noo4/wer385V/+pbHxVopSbcATQnSFSbnwRvD5fMRisapFV08liX/zMhgaaMCKm5x6S7pk2d4ssslFVuD676HMzVYcBAIBotGoUQ5WKZ2dnYTD4arFupFMSO8FyBXb/KaGWvI82oa3IDSJN3EkGdALp1UquaWSlGwFQzLJiLSDNm6v3zZjROvf34Jr6VFG3a2iKGO+kTYaNK3oSpKELMsFN8iqaZAwk3rz1cldoI+ejT4KdUVV0imlyJAuVtzfPB8Km1HglJVEZh9MbGjIaPV1OBx0dGQNb0Sbr2j5LSXG9U6NGG2aVnQ1TStq31hL8XLqtT+gv/KiBStrYmr0MjWwW4BtKuGsC1DOzJr7a5pGIpEgkUjgcrno7+8HyGn1VVXViIKFEAsxBjvSHRdUK7q6rpP5zS8hHm3gqpqASR3l24wK13wHZeGygk+ZP7NivLo5sJJl2RBjv9/P9u3bueOOO5g+fTrt7e1MmzaNmTNnVuy7IPjVr37FO++8g6IodHV1ceGFFxYsC7vppptwu91IkoSiKFx33XVVnUfQ1KJrhemNLMt4nE6SOuhNdItiY9NUTJmK9N1bkdXaJUfTNJLJpLFh5vV6ueaaa9A0jXXr1rFlyxZisRhHHnlkVcedP38+Z5xxBoqi8OSTT/L8889z1llnFXztVVddVVc3GjS56BZD07QR5WT5yLKM1+tFURSG+95H27p5lFY3jrArCmxGgzM+j3L2FxpyaFVVmTJlCqqqsmxZ4Qi6HIceeqjx/3PmzOGttxpr2TohRbdUekGWZcObMxqNkgwOEf+3G5vv1lpRsjnYivKoRbrRKhVc0VxhC7RNVcjww5+htLQ07gwWb6K9+uqrLF1aeAS8JEncfvvtAKxYsYIVK1bUdI6mFt1S6YX8x81iG4vFiESyjk7JPzwH27aMynotRder6Cesc5NBy5AVbhubCvneXSid5ZsIBLVuhlU6NeLWW28lFAqNePz0009n0aJFADz77LPIsszHPvaxgse45pprCAQChMNhbrvtNrq7uznooIOqXnNTi24xzKIrSRJer3eE2AJosRjaI3eP1TKrR1H2ex1o2v7IU5JHwQqxeXaHbcaQ085DPvfiqquH6mkBriTSvfLKK0s+/6c//Yl33nmHq666qujaA4EAAC0tLSxatIi+vr7JJ7rlfHC9Xi9Op5NoNJojtoLMi7/Z5x/QJOjGf/IeL/CmGxUhtrHZh78Nvvc/KC53TV9e68gdK9ILvb29rFq1iq997Ws4nc6Cr0kkEui6bhhobdy4kVNOOaWm8zW16BZCkiQjjRCPx0dMCzajF+u+Gq9Uk3e2BddmlHD/3S14lx5lbF6US/HxAAAgAElEQVSLgZLiTyWiWOtwSStqdB999FHS6TS33pod/T5nzhzOP/98gsEgP//5z/nqV79KOBzmrrvuArIb9cuWLeOwww6r6XxN670A++v2IPvDd7vduFwuYrEYHo+npOACZCJhkleeNxpLtRlrJGlft90kHodjNYcuwfv3t6Cqas6dpKqqRj2tGO5qbmwQ3WZmnE4nbre7YN61FH6/H0mSCFc5baLRTFjvBcgVW3Nk6/F4ynwlyM0U5drUh65P7qGPVvOD+1Ba2wr6JYjGhvyBkkKEfT6fESyJ19azkdZMLcDQ5KLrcDgIBAIF0wiV/AJrsX6ckEgSINkpCZvyLDsW5YpvGf+UZdloxy2Frus5jQ0CERF7PB5jHE6xdt9CNNv4dWhy0U0mk0VTCJqmlb0Kpv77Xxu1tLFFksrnqiV538jvFCCDbl+AbEoQmALf/x+UnBFN9edURaQrPquxWGxEu685KjanJyr5jI9Hmlp0S1HOf0GPRWD7B6O4ojFCVrKlZvkTdnUN0iLqsAXXpgT/dg9KW0fBp6wymzEfJ7/dVyCE2OVy4fV6ufnmm3G73XR3dzN16lRmzJiRM8m3Ep5++mleeeUVfD4fAGeccQYLFiwY8bre3l4ee+wxdF3n6KOP5qSTTqrxO21y0S03J61kraAkQ5HykKahWERrfsztBqcLZBWS8WyJXKKwO9ukRVaye2t2tJ/LUZ9C+crXS77EKtGtJGLNN8G56qqrcDgc9Pb28sEHH7Bu3bqqRRfg+OOP51Of+lTR5zVN45e//CVXXHEFbW1t/PCHP2ThwoU1nQuaXHRLUU50JbcH5RMnkXnywVFclcVUkkZIJLOC4lVh5lx4f9OoLK2p0DLZn6WqZu8IbODWX6I4ygclVuVUaxXv9vZ25s+fz7x58+peQzH6+vro6uoyxvQsXbqU9evX1yy6E3IwJZR3GvN4PEz50teyM5qalUqmMWgZ8LbAUSfAzm12lFsM3faWAODr30b5yZMVCS7UXl+bT63ibUWk/dJLL/H973+fBx98kGh0pL1rMBikvb3d+HdbWxvBYO0TZiZspFtsekT+WHYqtIAcn5R5swlTHIcDnv6lXZ1QjsksuvMW0PW9O1BVterqAavSC40qGSvlu3DssccanWVPP/00TzzxBBdddFHZ89YzEbjpRbdST91i89RwuoHqCrLHBUJMJQkSBaagijylpMPOD0d9eTZNgsMF//kAitPJ4OAgkFtTW6h6wDxCpxEbaVZTzndBcPTRR/OTn/xkxOOBQMD42QAMDQ1VbZRupulFtxhCjJ1OJx6Ph1QqVXCeGlO6YWD32CyyEJXkad0ecDghkQCXZ7/oinE7+j5THEmyc5Q2xbni71CWHTPi4XI1tW63G4fDgSzLKIpCS0uLIca11r7XIrpWlIsFg0HDyGb9+vX09PSMeM2sWbPYu3cv/f39BAIB1q5dyyWXXFLzOZtedItFuqLQOpFIEAqFiv5CpXQGvSUA4bGeAiyBz58V3Ohw8Zcpaq7DGHpWbCWyQpzat3EmUgt2A4hNPlNn0vXjBwmFQlWJZKEROl1dXSSTyZwGB2BEeqIRUawVovvUU0/x0UcfAdDR0cH5558PkOO7oCgKn/3sZ7n99tvRNI2jjjqqoDhXSlN7L0D26mtOIzgcDrxer/HLKNeTnVr1FOnH74dIeGxzeh5fNjJt69xXP1zmR686shGt05mtUJDlbJurJGeF2+HMlojZ2Ji55V6U9nY6OjoYGhqqW7S6urrYu3dvzmOSJOV4L1RihFPoOOUQHanVft1oMKG9F8RFQ1VVvF4vuq4TDofRdZ2WChzrHZ8+Exwu0k//cgybJaSsQOo6RN1UZMii69moV3VCLLbfgUzUmto+AzZmTj4b5bwvG/9sZA5V13VDWM0oioLD4cDpdOLz+XKMcEQXWiUtxYJm7EaDCSC6IqcEEIlEcm6XKt1hVI86jnTvm2Mouno2DSArEByo7Esy6ezXFUtF2KJrA+D1I/3H/SPKJ8dibHkmkyGTyeQY4ciybOSJi23apVKpgmttttHrggkhurFYrOAVslLRTT31cxjYY/XSqqfaOW12vtamFLfcjdLe2bDDWyF6ItJNp9M5PioOhwNVVXG73bS0tBjGOkKMQ6EQHo/HjnTHgkQiYVwda2awH8nXYrus2kwMvnwdytHHN/w0jexGExGuedNOpCccDgcvv/wyb7zxBi6Xi+7ubqZPn86RRx5ZtRbcc8897N6drV4SPtzf+ta3Rrzupptuwu12I0kSiqJw3XXX1fCdZml60bUErx/CpQ3PbWzGPXMOJvCvd+ByuZBlecTGldVWplZ1o1WamzWnJ44//njOOOMMIpEImzZt4qOPPqqpYeGyyy4z/v+JJ57A7S4+buiqq67C76+/g7XpRbec6U05JEmi5fzLGP7e9dhZUJum5eY7ULp7GB4eZng4m+cXkaGqqsYIq3o2rvIZ68YIWZZxu93Mmzevbu8FXdd58803ueqqq+o6TiU0veiWQtTwFvuFejyefeN9nOiHL4VtfRAf2XttYzNuOflclPO+WPApERmaEd1mtWxc5TOaDmONPD/A+++/T0tLC1OmTCl6rttvvx2AFStWsGLFiprPNeFFV9xmmTG3BIvkvXL4MjLP/qr0AWWl+s0uG5tGEOhA+v6dyHmm4uUQ3Waapo3YuBINDuaNq1IDJsdDpFuJWJfyXli0aBEAr7/+OsuWLSt6jGuuuYZAIEA4HOa2226ju7u7pvHrMAFEt9QvK9/0xul04vV6SSaTI1qClfkLYcnH4ZUXSp3NiiXb2NSBhPO2R8nUsXlcauPKTKm62lQqZelGWiPHr5fzXshkMqxbt45vfvObRV8jWoVbWlpYtGgRfX19NYtuM1tslUWkF0TnisPhIBgMEo1GC4q1Y9lRZPtpi9CE5Sk2E4gTTqP9Fy+geL11HabSyFJsWoXDYQYGBtizZw+Dg4MkEgkURcHr9eLz+ejq6iIQCOD1enE4HFVvaI3l+HWATZs20d3dTVtbW8HnE4mEUVucSCTYuHFjXW3ATR/plkKSJHw+H+l0mnA4XPaqqCz/JKnW2yFkVzLYjCNUB/zH/Sju8hOuK6EesdI0jUQiQSKRQNd1NE0jHo8bbb9CeIGcutpC6Yl612NVR9obb7wxIrVg9l4Ih8PcddddQPb7X7ZsGYcddljN52t67wVgxNVVXIUVRSGRSOTU+pUjPTRA6l/+BvbssCNbm7Hn3EtQ/uI845+BQIBoNDoiFVANoouz2FDXSvH7/SNGrZvJ918oVsZW6/fU09PDzp07x2VX2oT2XjAjyzJerxdZlolGo4b1XDWobR3IN99K6sE70Fb/3yiaxohxMbV/mGwmEA438oVfgcOX5kSCVtxSj9YGWCFXMtH2KzbthGGVJEkkEomypun5jEfBLceEEF2RRlBVNeeKWUt+CUB2unBcfCWJ3nWwd9foCKHbDcnUPuHVAI1sfrn53lQ2dfIPP0SVVeicguzfb5YtNrbKlUKWw0rRrfb23pyeEHR0dBCPx1EUpeIytmY1u4EJIrrCpDwSieQ8XnYicAkkRUGdM49MIoYuvHZzrsAWCaIsQ2sHtLTC3p1Z0Y1FIa1bc3yb5mHxkajnfxl52qwRT/l8PpxOp2FVajawEWKk6/qo19daJd4iR2ymmP/CBx98wI4dO4zvo5rP+JtvvskzzzzDrl27+MY3vsGsWft/1s899xyvvvoqkiSxcuXKgnnb/v5+7r33XqLRKDNmzODiiy+uuvV4QohuJBIpOISyVtEV0yYix59KOBbNDnQcGswTXYsEUQdScYg7ssbj8X0Wj7bgTh4UBfWYE5E/dSbS9Nk54uN0OvH7/cTj8ZyRMWbEe9z8Xi8lxGNdX1vpcYr5L6RSKT744AP++Mc/snv3bvx+P2eeeSYzZ84se66pU6fyxS9+kYcffjjn8Z07d7J27Vquv/56gsEgt956KzfccMMIXXnqqac44YQTWLZsGQ8//DCvvPIKxx57bFXf74QQ3WK/+HITgfNRVTW32mHBEbinziCz6yNS/3aDVcvNRQIiw5DRwOWG9D6rRlm2N/ImAx8/DmX6LJxLPo7n8CNQVRVJkshkMiiKgq7rBIPBqn0TCgkxZD8TdRtEmc4xFvaQHR0dnH766fh8PgYGBgiHw0bFRDmKjU1fv349S5cuRVVVOjs76erqoq+vj7lz5xqv0XWdzZs3G6N6li9fzjPPPDM5RbcYxSYC5yOqHQCGh4dzPXk7ulBcLlKNmqQrhDWVzM0d1yq4imKyfLRzwuOWAw9F/YvPIe+bUZZm/5QTj8eDx+MxZpS1trYaQizynGIwZDVIkkRLSwuKohAKhYxNZnM0XI2IjqWfrfnclQwrKEcwGGTOnDnGvwuNWY9EIjkjiWodxT4hRLfUL76U6OZXOxTdNXU4obML9jRwgKX4ANX6Jpbk7Pgec0Qky3bb8njkpLNRpk5HWnBEzsOqqtLS0kIymWRgYKSZvaIoOfWwYjPJLMTFImKXy4XP5yMajY4YYSVJUsGIuJwQj6XoltpIq6TttxIqCdhqSV9OCNGtFkmS8Hg8OByOiuoDJacL38VXErnnxzDYoHlMIpLOiVTzcLkhlSospIUicaczGz1rui2+44Hrvos68yBIp5Ba25DU7C2xJEn4/X4jAi0mnMLAxrzzL9zChImNSEmYRdjn86FpGoODgxWLZDEhFn+PdalWKcGvdOS6mUrGrPt8PmKxmJH6qXUU+4RuAy6Ex+MhEAiQyWQIBoMVF2S7V3wK/7/cjnzOxVBh/mgEagVfp5V4M6fT4PVlh1dWkqtOpUBx2II71rR1Ip18No75S5Bb25A7phiC63a7aW9vJ5VKMTQ0VHXuVtM0kskkkUiEYDDIwMAAQ0NDJBIJXC4XbW1tKIpilGOJ2thaEEKsKAqBQABN05Bl2aizrYVaS7+sLhlbuHAha9euJZ1O09/fz969e5k9e3bOayRJYt68ebz11lsArFmzpqrI2TjOROhIE/4KhQgEAgSDQcNZLB6PF+2gKYUYDRKPRYl/84vQv4eq86ViRHo5Sm2iySryvPlo7/baG23NwPLjkDxelPO+iOzbb4AtusIymQzDw8OWRo7mNIW5jFKUYIm/IbdVt5JR6aKaIhaLlez0FNFwueMpikJra2vRyoxiBAIBUqkU0Wh1Vqzr1q3j0UcfZXh4GI/Hw/Tp07niiisAePbZZ3n11VeRZZlzzz2XBQsWAHDHHXdwwQUXGJOH77vvPqLRKNOnT+eSSy4peBEr1ZE2KURXkiSSySSxWKzmN7fL5UKSJOLxOIkH/wftj8/DcLj2HGw9lEoz1IIkjc33MZFxe+HYk1E//gmk2fOQHE7jKXPNbT0m4vmY0xThcLiiqFlV1RwhFht2QoRFU4LYhJMkqSIfk0IUyhOLiqFqN6Ta2tqqbvEfTSZ8G3AhIRW/TFmWS+bJqjmHuI1q+/I1JKZMIfLUw/tLvfp31XX8ghQTw4TFrcniHJKU3ZDr6MoO6rQj6drw+uCoE3Bc8CUkeX8bujlKrDayK0epjbJSiEjXfPdntnT0er2GGCeTyZruEgWF8sQej6emDju7I20ckV/+5fFY48yk6zoOhwOXy0U8Hkf7zLmoqgtt/WtIDieZYH9e84QFKOq+Y45SFCrJ4PFmW59tquewI2DqNOQlH0ddvNx4WJZlo6xpaGjIUrEQx9Z1vaqNslKIDTtZlmltbSWRSBCNRo2o2OPxVFU5UQiRXkmn0wwPD1fdYdes49dhAolusfIvkeyvJ9IVV3xd13OcmdQTTiPjb0XftjWbr61bdPdFAfK+CHe0zW90LduCbFMdkorjU39By/mX4uyellM9oKoqTqeT4eFho+7WKrxeL263m3A4XJfrWKXHzq+cEGk90aorKifMQlwofVJu3ZV02AlD9WZkwoiuuG3L/yXW479g7lAbHh4eMSlUkiTU5cfC8mPR9uxEe/mFkXnWqvKl+15XqoKhkeg66HalQ1XMnAtLjkJfcSLDDjcMDCBJEm63G5/PZ4iFz+fD5XKVFKNKKVfPWw/VHFuM/jFfTCRJyqklFptMwk/X5XKRSCSqXrf4DIvIvppO0/HGhBHdYDBYUFxrEV1ZlvH5fMD+DrVyZTHqKeeSjkXQdmyDHR/uK+mSrC3XkqTsnDZds/OtY4nHh/qjhwrOJzNvZplLwAqJkYgKzZFhKYSbnsPhsGSfIh8rNvhElJ8f/LS0tOB2u0mn0zidTlwuV02VE62trcTjcXbt2tW06YUJUb0AxW0czVUH5ZAkyfhARCKRnDee2L0t1Oki0PbsJPPyi6SffQIUOWuSY2U+NtAOwSFrj2lTHUefiOMr1yIViLTcbjder5dIJJJzG16KQmVc5soB8R40b5TVs5lVbA0tLS1ly8BqQUTOIi+c/5y5eiK/1Tm/csLhcDA0NGR5mqYRTPjqBSge0VZqeiPGsUej0REWkaWOb0aeMhXp5LPJbH0X/b0N+1K0cjZCtSLiDQYZd4KrqNmou1HeFOMBSYbjTkZZegzywmUjBNdcc1vtZlYhJy0hQqJrUuQvRTeUle5e5qjc6hypiJyLReWF0iyi1dnpdLJjxw7uvPNOOjs7mTFjBlOmTOHAAw+0xGthLJkwka5woC/0uMvlKiikQFVNE21tbRWNONF2bif59CPoL7+Q7UJLp6wv8xovuNyABInxWS9ZNz0zkU8+B+W4UwpedP1+Pw6Hw/KaW9hvfDM8PIymaUY0LEq4qr09NzMakXM8Hq+6eUEgoltVVdm6dStbtmxh27ZtzJ07l/nz51u63kYw4ZsjAKPVsdDj4o1rRuTWRJRRyRu2nOgKJ30RMaSffQLt+SezohscrK8BQXzgG53Hcjiz3g8VReb7Rgxl0hOvucLhRLrm26g9M6GtY4TgVtqZVQviljyVSo143+a/rpLGBjPmErNwOGx5XlRMfqi0OaMQIncbi8UassbRYFKkF4qRnxZQVRWv14umaTV31hQ7T/6bTD35HORTzsHvdBL9wyqi9/4Y4tHaN8FkubgZjmXoWW+JRAXnkShc1qbsE2LYV72x77jNgqrCMSfiyHMBg1zRasQtuYicK9koK9fY4PP5cvKkYj5ZI8rXzBehUheKUphzt4ODg02Ru62FSSO6oiJBkiQikYilO7+aphWMKMTctnA4THr5J5E9PrQnH4QPt1SfbnC44IAeaGmBD7Y0LmWR0aCtC2KRbJtzQbGUsmLqdBZeQ8Z0m11xlDJG3r/5dxCKCsuPw3Hh5SNeKupLx6towf7GBjMOh4PW1lbjLszv96NpmpFPrsWbV2BVXtjlchmbeUNDQ00Z3VbKhEkvCIu7fCRJoq2tDU3T6h5dLcxzBIXEFvbXZBbbxU6/9SrabbdAMkFFQuP2Qksrkr8VecpUVLeHTDpN+k+/s74LTpKyKQZdz0bWiUS2EqPQRUpVrTu/qkI6w6gKr9OVXb8sgdONeuAhuC/8CtKcQ3I6rESOMpFIFN0bqBVzt5qVd16CYmVgIuoV6QnRYWZOTZRbi7hQ1JMXbsbKhEqYtOkFUZEA1OTwno+Ims15WzOiZCgWi5Us/laXHEVy+SfglReLpwtEU4Ukg88PWgbZ68XzmbMYfuxnsOsjkFWyMwfqQHVmKw9E7a+uZ//WMpDa9z0WWqOsZP/Ue35BveItPA6K5aIlOXsRcTqzm5uJGLg9EOiAtg445VzkQxeD253TYSWGL0ajUcsFwbxRZvWxVVU1aloL+TwUmspr9uYt1eprNr+pp/V4MkW3ZiZMpGt2GhMVCcKFqNKqg3LHd7lcuFyuERGB8CoVE4krefPouk76gdvRVz+fvUWX9t2yF4ou5H1lZ63tsOhI2Pg2DO7ZZ1BuQWQkKzWUtEnZdXk82RlvY025mXIeX/Z7bOuAxUciKQ7onobscCAtOAK5rTPn5eICGo1GcyoHRPlWrZ4DUPlGWS2IBop6N7MEZiE219Mmk0kSiQSpVKqm+W2tra2oqjqholszk6J6QTQ2FKpIyE8LVIuu6zmRrbg1E501kiSRSqVIJpNFd42LoW3eQPqB27L503Awmw9VlGxuVdcNOwZUdb8huaRAMg7oY9+ZVkmbsxglNJbICrR1IB97EtK02chHfBzJ6RrxMuHvWuoCKi7wtQhxI0vMrLjdL4Z5A3F4eDinciLfc6GUEJuj21KNRs3OpEgvyLKMy+WyPC9WKG+r6zpOpxNVVY3pE+aibrFhZ66jLJZLlg9egHL659Hefh29dx1IerbrTJWzt9wS+zwRyBqgaxroKUAfeyGD8oIrK9mLSCXm7Y3E6YIly1HPubjoSyoVxGKeA0KIRWrCLMQiKKh3o6wQ5tv9RlRUiKjfnAYp5bkgIm0hxOvWrcPj8XDQQQfhdrsZGBiw3KCnmZgwkS5kr/SFaGlpIRKJVPVmLLZJJnawK4kmRPmOuY7SLML5H2z9t4/D+xtJf/A+eiScvWWODOdWAyCBqli/gdYoSnWsjVYE7HDC9DnIRx2Hesq5I54WjQJW19xKkmRchEVuOH+ib723/7W0HleKsHasdbqFEOLXX3+djRs30tfXh67rzJgxg8997nNNbVpTjkmRXoDioitKcSp5gxcTW/HBrKfLBva3eAohFh9EcYs2tPr/SH/0IfTvRgsOwPrX99kt7luT070/5TDeu9wKiaokZdMkigypNGgNvnhIMkzphoXLcHz2MiSP13gq/5bZ6gix0EZZPakJM/UKYqVrr8c2Uvx8RTmZSMHt3r2bGTNmWLre8cakF12fz0cikSh7y1joQyc2PYS9o9Vvbp/Ph9vtJplMIstyzjTXZCJO5D+/i/7Be9nIVstkBViSAX38i64ZsVknNgUluXF+wbICvpZsk8cxn0L59Jkobe05L2mkF20leWEz1QrxaK29njSI2+028svVTLKolAcffJANGzbg9/u5/vrrAYhEItx7770MDAzQ0dHBZZddZvhgP/bYY/T29uJwOLjooouYOXOm5WsyMylyulDa9KaYWU0xsZVlGb/fb9m4n3zMkXN/f3/Oc8aH0OnCPWMWqUgo69cbT4CsILV3onceAG+/bumaaqPCpgaHI1tdlsns2yisIsKttLrC483Ojgu0w8GHIy9cirri03nL2F9za7UXLdRmj1hpjhiy78tUKkUwGLT8PSnEPBQK1bzJZ45uG5m7Peqoo/jkJz/JAw88YDy2atUqDjnkEE466SSef/55nn/+ec466yx6e3vZs2cPN9xwA319fTzyyCNce+21DVlXJUwo0S2GmB5R6PH8KESU3DTK7d8cORercTR/CPVTP4emuuDD95A+3Irk8eCYOgO0DImaSr2spsLIPxHPRrmKkjVpr6TqQVWhpR2C/aVfhwTtneD1g0NFvuzrKN09SK79pvNis0mW5YYIltXzz/KFWGzyDQ8PGyWKVpSvgXWm6ObothEXNDMHHXTQiGBl/fr1XH311QAsX76cH//4x5x11lmsX7+e5cuXI0kSc+bMIRaLEQwGCQQCDV1jMSaU6FYa6RbL24o8VjQatXyHudbIWXK5UU8/L7vuoQEyP/8JyVgMvX8P+Fuy6YZGVwaUq4GtFE3LRq0SlR0vnc7WI5djSjeuI1egtnXi/tTp0NaRM6GhkZtNja4cMHvdFnpPlquaKCfEVhiXj1Z0W45wOGwIaSAQMH5ewWCQ9vb96aW2tjZbdBuNruuoqlpUbEWUkkgkLBvuZ0a0BdcbOcttHfCFK9A3voX20Yfo72+EnR9B/+7GRLytbRCPZXOvVo1pt3qdbi/K3/87eqCdFJACHPE4qqoa0aGu6yQSCSRJQlEUy6LcRot5JZ4GtZSvpdNpo0GhWMdapbjdbqM6qNHRrZXUOsLLCiaF6CaTSRwOB+3t7TlF3JqmGY5jjaxvLNcWXA1ySysc+UmUpRmkx+9DczhIyTIM7M7mM60kHMwKb3S48LEdzrGtv1VV5L+7BSWQu0mWTqeNppXBwUEymYxRNSKEzDw8stquqnpMyytB5PtrFfNyQizuuITzmBidU83PQFRPyLJMf3//uKi7bWlpMSLYYDCI3+8HslGv+cIyNDREa2vrWC1zYoluoSYGIaSi+6VQ7SRgdLJVYvRRDvHGLpW3rQcjwrrwcmKbNqDqGtrWzWi/fgQG91Z/QGG0no+uZw1v0kU+jC0BGKjg9t9KpvRk65SnzUL94teRvb6cp801t+bb8fwGFfMk20L50WLvAytux4vRiHHqAjFFV0xHicViNacmGl2ZUCsLFy5kzZo1nHTSSaxZs4ZFixYZj7/00kssW7aMvr4+PB7PmKUWYIKVjJmNzKtxADP3lwvHJWEGXY0rvzlva0Xfez5CzIuVIml7dpL+f/8Me3dkRVSIgsOVrRYoemu/z0eh2lt/RQFJhbS1t9YF8fphyXLkQxYiHTAV+dAlObeIIvISPsm1CFax0i3R1u12u+uu0y5GI81vzKmKUChUMqgo9jN45ZVXSKVSHHLIIUydOpWhoSHLLzrVcO+99/Lee+8xPDxMS0sLp512GosWLeKee+5hcHCQ9vZ2LrvsMmMi86OPPkpvby9Op5MLL7yQWbNmNXR9k6ZOt6OjI8cnVIiepml4PJ6quo5EN5nZ5KNYN5lo8bQib1uIasRcT6fRXvot2rat6BvfQeruAU1D37o56+9QqFRLkrJuY6kqxVOSs/Www/U7uBU5AbQGkP/hP1A6phTNwzW6blVsEomp0FZ50YrjW1EXWwyr7Bd37NjBu+++y6ZNm9i1axc+n49LL73UuIW3yWXSiK7T6TSMaJxOJ5Ik8eabb/Loo49y6aWX0t7eXtcHpFA3ma7rKIpCPB633GsV6tuESz/7BPqGN9G1DAwNZEV3qFz51Sjh8mQj5WgJoXF54C+vxHnMpwo+bcUsrlIU2ygThkfifWC+M7DaIlYAABTPSURBVKrG8KiRqQpzVUU9fiTm3O3g4KCxzuHhYbxe74Ru5a2HSSO6ZpLJJPfddx8ul4vzzz+fadOm4XA4RuTtao2MxK2+cOrPvx2tNwqyqu1Y37MTPRaBA6ahvfwi2gu/hm1baj6eZYiuukLvP7cPWluRzrkYx9EnjPxSU81tudvlWjBvlFXahSgMj4QYm+eV5aeoGn2xqHcjTuDxePD7/QwPDzckCs/nxRdf5JVXXgGgp6eHiy66iFAoxL333ks0GmXGjBlcfPHFBYcVjDcmpegC7Nixg56enpzHxAdDRMPittEswqVu381O/8PDwyNeW8iRv5r8sNjYaVRPvSpJKGtfIfyTH2QFLxkfO3tIWdknunrWGGfqdJg2E8d5X4JAO5LqGPElIvfZiDIt2H9nYUWqolCKSqRIxJ2Llb9fq4ZOiuhW1B6PRu52aGiIH/3oR1x//fU4nU7uueceDjvsMHp7e1m8eDHLli3j4YcfZtq0aRx77LENX0+9TJo24HzyBRf2D/Mz53XNIiyqGsy3isKfN5FIMGfOnJK3+oUc+cWHz71vKkGh/LDY7DBmqjVgZ9zv9yNJEsMfW4EcHER78Tewd1d2w62Yj4MkZdMAjfjg+Vr2/Y+O1HkAdHRlN8o6DxjxUrPxdyMqQszRp1XlfeZ5ZeaONU3TcDqdeL3enPeC+LsWCtkv1sJoR7dmxF2ioigkk0laW1vZvHkzl1xyCZDtMnvmmWeaQnRLMaFFt1KE+ImcrCgrE2L8xhtv8Jvf/IZzzjmHqVOnGh1ulX7wxYfPvJEhoh+v12vkn5PJJNFo1HJBKTRQUT3+VDj+VLRdO0g/cT/0roVwKPd2X1HA35r1og2HspOMq6WUfaME8ux5MLgX50HzcS5YgvO4U8iYUjSQjT4bZfxt3tlvRHuwaEIAcmrBC70XRClWOQtQM2a3sXouRubodu/evaNemdDW1saJJ57ITTfdhMPh4NBDD2XmzJl4PB6jIkl0kjU7tugWQHQwJRIJHnvsMVRV5eqrryYQCOREpOb8bbkPRz6iMN3j8RCLxYhGo8atqLlm0nz8aj9QIroqFb3J3T04v/o3AKQ2b0D/xU9gx7ZslYPbC1OmQjwO02bD5ndKb3wVoqhfrgTtXejd01D+4nNo8xcRB1LRqGEGL6oGUqmU0VFmJY3sKKvm+OLuy4z5omw2BDdHxVbYL8LYRreCaDTK22+/zT/+4z/i8Xi4++676e3tHfG6sewkswpbdMtwzjnnGDu0+eVmYtNE3CqKLh+zUBba5BGGJZAb/eR3EZnHAolpFPn550IUO345HAcvgBv/A21oAO3ZX6GHBpF8LdDdg77uNTh0EfqHW2HPjoqOVxRJggMPRf3WvyI7cvO2ItL0eDykUikGBgaMn0MtkWAhGt1RZsXxi6XBVFU1qh5EcCAu0NVG6bIsG00CYxHdmtm0aRMdHR3G+3bx4sVs2bLF8MEW7dBj2UlmFbbolqFUSYz40IsdaFFY7nQ6jZ50MT8tlUoRDod58cUXOeOMMyoaB18uPywEyCw+YnhmXebTbR3I53/R+Leu62Siw+hb38Uxdx6OT59O9KXnsp4PiXj1ngyHLUG5/G9GCC4U3sgql54xR4LlNkMbWdPb6OOnUiljJpnY4Co0IqeS9mYR3YbD4YaUOlZLW1sbfX19Rsv+5s2bmTlzJpFIhLfeeotly5bldJk1MxO6emE8IKK0119/nd/+9rd85jOf4cQTTxxRWlYPQoTdbrcxiaJctF0tTqcTn8tFPJUkFk9ka4DfWYueTMCOD7PTIJzO7HihWGT/UE3zh97pgnO+gPPUzxb8HuopozLP6DIPS8zPDTeqTMtsj9gIERNNFOWOX+rnsGbNGnp6epgzZ86oViZUytNPP83atWuRZZkZM2ZwwQUXMDQ0xH333Uc0GmX69OlccskldsmYTXmGhoZ44YUXOPnkk42IpJ6yNTOFakrNrZyibK3W/HCpkTZ6cACSSeiYgrRvsyP9xP1oq/53XxtyKju52OWGtnaUi76KctiSEccXEbvVQ0XFhqjX6zXEJ798z4rzCTezRpjdA0a3Y60bicJdbdWqVfT19bFjxw58Ph8HH3wwJ510kuXrtbFFd9xjFklRNSHK1ooJhDBbr3RX39xFld/WXKxUqZaaVV3TSN32PdiyKVsD7PbC4o/jWHkJUp45TaNrbkWTgLkFtlQddTXdZJDrdWvlQEuBVdGziJIBw3UtFAoxMDDAnDlzLFqtjRlbdJsQ8yaaEGKxcbJ69Wq2bt3K+eefX3M/PWQ/1OZbUcAQHafTWdetuLZjG/pwCHnGnJxhkOK8oua2Ebvl1ZrflPLZKHRBanRHHOxvEa43evZ6vfh8vlHL3UajUX7xi1+wY0d2s/XCCy/kgAMOKDi7bCJji+4EYefOnTz66KPMnTuXz372swQCgRG3y/Xk6Mx99uYJxVblh83ldo28FbdiI8t8QRINLeJ4DoejYdG5VS3ChaLb0eCBBx7gwAMP5JhjjiGdTpNMJnn++efxer3G7LJoNMpZZ501KusZKyZtR9pEI5lMsnLlSnp6eoxbWnNaQnTTVVK2lk+hBgrITX14PJ6ab8fNt/qN8GC1as6XQFzAzGkJ0TyQTqfx+Xx4vd6KKiYqRVyQ6m3SGO3oVhCPx3nvvfe46KKLgP0XrmKzyyYrtug2EYU8QAuVrYl0hChbM78uXyglSaK9vb1oA0WhKQSF6oeL1c02uiY23yu2ERFdMa9bc6VANUbo+ZSbg1YpIrrVdZ09e/aMWnQr2Lt3L36/nwcffJDt27czc+ZMzj333KKzyyYrTS26b775Js888wy7du3iG9/4Ro4oPffcc7z66qtIksTKlSs57LDDxnClo4e5m04gcpZCJFVVZe/evfzsZz9j1qxZnHTSSVX16xeqHxbi4/F4jPwzZAV6eHi44RtljYiezV63xS5I+U0qhe4MilWOVDoHrRJEdBsKhRpSElcJmqaxbds2Vq5cyZw5c3jsscdYtWrVmKxlPNPUojt16lS++MUv8vDDD+c8vnPnTtauXcv1119PMBjk1ltv5YYbbpi03p/5zQVr1qzhd7/7HStXrmTZsmVGPWf+5lE1kZI5nyzaj8Ux3G43Pp/PEB/zjLpaaORYG0GtXrfl7gxE56LwYRbRba3fw1hHt2ba2toIBAJGRcSSJUtYtWpV0dllk5WmF91CrF+/nqVLl6KqKp2dnXR1ddHX18fcuXNHeYXjk7lz53LEEUfgcDgYGhoCcrvphGiau90qqe8119wWitzMY5Hy88OV2F5C4zvKzBtZ9UzJNWO+MzBXPkQiEVRVpa2trabW5vEQ3ZppbW2lvb2dXbt20d3dzaZNm+ju7qa7u7vg7LLJSlOLbjGCwWBO/WGl7kTbtm3jkUceMezlPve5zzF79uwGrnRs6OrqGvFYsQhN5Ie9Xq9hAp+fH9Y0jXg8zowZM8raXuafQ6Q+XC5XSV8FqzfK8mm02xiUNxcvlKIx332In4WiKAQCATRNG/PoNp+VK1dy//33k06n6ezs5KKLLkLXde655x5eeeUVY3bZZGbci+6tt95qTPI1c/rpp1d1xazEneipp57ilFNOYcGCBWzYsIEnn3ySr33ta1WtdyIhxLSQ54HT6cTj8bB161buvfdejj76aDo6OqoWALPnbP45hK+CSAtFo9GG5IbNc8QakRuuNB1SqORPlKx5vV4eeughtm7dyty5c5k2bRpTp06lq6trXDlvzZgxg+uuu27E41ddddUYrGZ8Mu5F98orr6z6a+qZcy8EJhaLjemY5vGK2f3qj3/8I2vXruWCCy5g1qxZJU3gq8nfinNkMhn8fj+RSIR0Oj2iSqDedl7zFJB6N7KKUa+5uDm1c8kllxAOh3n77bfZunUra9as4ZxzzqGjo8Pydds0jnEvurWwcOFCfvazn3HiiScSDAbZu3dvRWmCc889l9tvv50nn3wSXde55pprRmG1zcuyZcs45phjkCSJSCRi1ISaN47cbrex0ZPf1lws4jNHhmYxNOdwzW3NYnOqmvywVZMWimGVuThg1ASHQiHi8Tjz5s1j3rx5Fq52JJqm8YMf/IBAIMDll19Of39/U84qG480dUfaunXrePTRRxkeHsbj8TB9+nSuuOIKAJ599lleffVVZFnm3HPPZcGCBUDpdMWmTZuYN28eS5YsYe3atbz88ss1RdoAv//973nppZdQFIUFCxZM6mJwyOYhzS3NqqqSyWRyouFkMsn27dtZvHhxTWIo8sPmLrL8/HAtQyerxSpzcVVVDeFuVJ65GC+88AIffvgh8Xicyy+/nHvuuacpZ5WNFRO2I23x4sUsXry44HMnn3wyJ5988ojHS4noAw88wMqVKwE44ogj+PnPf17TujZv3szbb7/N3/7t3xozzyY7mUympAn87t27uffeeznssMM49NBDkSTJqHGt5hyVjkVKJpNGdGwV5ep6q0FEt8FgsCFmOqUYGhpiw4YNfOYzn+HFF19E1/UJOatsrGhq0bWa1tZW3n33XQ4++GA2b97MlClTajrO6tWr+fSnP23cfom8oU0uIgJ95513eOqppzjvvPOYPXu2Udsrfm75bc3VRKdiY8rtdjdsLBLsL2ULhUJ1+V+Yo9vdu3c3JM9cjscff5yzzjrLuHhFIpEJOatsrLBF18QFF1zAY489hqZpqKrK5z//+ZqOs3v3bt5//31+/etf43A4OPvsswu28NpkmT17NldffTWyLBctWxONBflla+VqWgt53VbSvFDJWCSwtpTN5/Ph8XgIhUKjHt0K3nnnHfx+PzNnzmTz5s1FXzeeKiaaDVt0TRx44IF885vfrOi1pXLDmqYRjUb5xje+wQcffMA999zDP/zDP9T8Rv2///s/nnzySW6++eYJ2c1TqlOwWNmaEGIRgeWL5KZNmzjkkEOM3G0pahmLJExvaulay8cc3e7Zs2dMolvB+++/z9tvv82GDRsMw5/HH398Qs4qGyts0a2RUrnh1atXs3jxYiRJYvbs2cbufi2COTg4yMaNG2lvb69nuRMKIXr5s+kcDgfJZJKHH36YSCTCQQcdZETK1aYNCuWHRVrC7/fjdDrJZDI5gyFrEcvxEN2aOfPMMznzzDOB7N7ECy+8wCWXXMLdd9894WaVjRW26DaARYsWsXnzZg4++GB2795NJpPB5/OV/8ICPPHEE5x11ln89Kc/tXiVEwfRTZdIJPjpT3/K8uXLjTZwXdeNtER+2Vq1lQWpVAqXy4UkSfT396NpWsXmNvmMp+i2Es4880zuu+8+fvOb3zB9+nSOPvrosV5S09LUJWPjlXQ6zUMPPcRHH32EqqqcddZZHHLIIVUf5+2332bTpk2sXLmSm266ieuuu25CphesRMyIK0SxsrVKZtNVOpqn1FikDz74gEAgQEdHh7HpNh6iWxvrmbAlY+MVVVWN8ppylMoNP/fcc0bdca386le/4p133kFRFLq6urjwwgsn9KiUUnnzYmVrQogLmcAPDw/T29vLcccdV1HXWqn88GuvvcbGjRvRdZ2enh5mzZrFkUceaVQF2EwO7Eh3nLJ9+3ZuvfVWY3ZZMBiktbWVa6+9tqpNjD//+c8cfPDBKIrCk08+CTDpGzVKYTaB37hxI4888ginnnoqK1asqMoNLR+/329Et8FgkP/f3t2DNHdGARz/E2OISxNECEH8hKqIIGkGdRArDXQRRJRL7SYiIrZ06WBBsFtBUJyskw4i4oMf24sVhTpqraJis1WCEYUqcgVFIeo7VIOv9VrNhzfJPb8tCTeeJYfH53nOOeFwmIODA+rr6y3bcjSTyYy0DJCI7YWdnR22t7dfvQq3ssPDQxYXF2lrayM3N/eTFbHdbn/1tbXs7OxowYSu6++yd3t2dsbU1BTn5+fYbDbq6upoaGjg4uLCcgMizSLbCwKAtbU1fD6f2WGkhfz8fDo7OwHjSren19aelhzn5OTgdDrRdT2uqc1vZbPZaG5upqCggKurK4aGhigvL2d9fZ2ysrLogMjl5WX5r8cEknTTxMDAgOFnr2l/ubS0hM1mw+/3v+nvBoNB5ufnubu7o7a2lkAg8LbAM5TRtbWHJj8Oh4NIJGLKzQSXyxXtkOd0OvF4POi6LgMiU4Qk3Qzwf0151tfX2dvbo7e3900FGre3t8zOztLT04Pb7WZ4eJiqqirDiR1W9lwT+FRwenpKOBymqKhIBkSmCNnBz3DBYJCVlRW6urpwOBxvejYUCpGXl0deXh52ux2fz8fu7m6SIhWJdn19zcTEBC0tLTidTrPDEfdkpZvh5ubmiEQijI6OAlBcXIymaa96Vtf1Tyrh3G43oVAoKXGKxLq5uWF8fBy/3091dTWADIhMEZJ0M1x/f39Cvy+W/hFGp+kiOe7u7piensbj8dDY2Bh9v6qqSgZEpgBJusJQPGOPHjM6TZe94eTY399nY2MDr9fL4OAgAE1NTQQCARkQmQIk6QpDhYWFnJyccHp6isvlYmtrK6Y7vkan6ZJ0k6O0tJSRkZFnP5MBkeaTpCsMZWVl0draytjYGLe3t9TU1OD1euP6zsen6UJYkSRd8aLKysrofLl4JfI0/engRCHShSRd8S6eO02Px+rqKh6P510rvcwiBSqZRe7piqQzOk2P1cPgRCv0dH0oUOnu7qavr4/NzU2Oj4/NDkvEQVa6IumMTtNj3bZ4Ojgxkz0uUAGiBSpyCJm+JOmKpHvpNP2tXjs48TUuLy+ZmZnh6OgIgPb2dkpKShIRZsJIgUrmkaQr0spzgxMnJydjusq2sLBARUUFHR0dRCKRlOubYEQm8aa3F/vpCpHKNE37EvhRKdUUw7OfAdtAqVIqZX8EmqbVAT8rpb6+f/0TgFLqF1MDEzGTla6wqlLgH2BC07Rq4E/gB6XUhblh/ccfwOeappUAh8A3wLfmhiTiIbcXRNpSSv0eyyr3nh34AvhVKeUDLoC+hAWXIEqpCPAd8BsQ/PcttWduVCIestIVVhUGwkqptfvXs6Rg0gVQSn0APpgdh0gMWekKS1JKHQMHmqaV37/1FfCXiSEJi5CVrrCy74EpTdMcwN9Ah8nxCAuQ2wtCCPGOPgKhfHuxI5G0aQAAAABJRU5ErkJggg==\n", 58 | "text/plain": [ 59 | "
" 60 | ] 61 | }, 62 | "metadata": { 63 | "needs_background": "light" 64 | }, 65 | "output_type": "display_data" 66 | } 67 | ], 68 | "source": [ 69 | "rng = check_random_state(1234)\n", 70 | "\n", 71 | "N = 20000\n", 72 | "\n", 73 | "a = (( 7 / 2 * np.pi - np.pi/2) * (rng.rand(N)**0.65) + np.pi/2)\n", 74 | "t = 100 * rng.rand(N)\n", 75 | "\n", 76 | "data = np.vstack((a * np.cos(a), t, a * np.sin(a))).T\n", 77 | "\n", 78 | "fig = plt.figure()\n", 79 | "ax = fig.add_subplot(111, projection='3d')\n", 80 | "\n", 81 | "ax.scatter(data[:, 0], data[:, 1], data[:, 2])\n", 82 | "\n", 83 | "plt.show()" 84 | ] 85 | }, 86 | { 87 | "cell_type": "markdown", 88 | "metadata": {}, 89 | "source": [ 90 | "## Gaussian Affinities" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 30, 96 | "metadata": {}, 97 | "outputs": [ 98 | { 99 | "name": "stdout", 100 | "output_type": "stream", 101 | "text": [ 102 | "CPU times: user 18.5 s, sys: 3.42 s, total: 21.9 s\n", 103 | "Wall time: 12.7 s\n" 104 | ] 105 | } 106 | ], 107 | "source": [ 108 | "%%time\n", 109 | "# some baseline parameters\n", 110 | "n_neighbors = 200\n", 111 | "algorithm = 'brute'\n", 112 | "metric = 'euclidean'\n", 113 | "p=2\n", 114 | "n_jobs = -1\n", 115 | "\n", 116 | "# initialize nn model\n", 117 | "nn_model = NearestNeighbors(\n", 118 | " n_neighbors=n_neighbors,\n", 119 | " metric=metric,\n", 120 | " algorithm=algorithm,\n", 121 | " p=p,\n", 122 | " n_jobs=n_jobs\n", 123 | ")\n", 124 | "\n", 125 | "# fit nn model to data\n", 126 | "nn_model.fit(data);\n", 127 | "\n", 128 | "# grab distances and indices\n", 129 | "dists, indices = nn_model.kneighbors(\n", 130 | " data, \n", 131 | " n_neighbors=n_neighbors, \n", 132 | " return_distance=True\n", 133 | ")\n", 134 | "\n" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": 31, 140 | "metadata": {}, 141 | "outputs": [], 142 | "source": [ 143 | "def heat_kernel(distances, length_scale=None):\n", 144 | " if length_scale is None:\n", 145 | " length_scale = 1.0\n", 146 | " return np.exp(- distances**2 / length_scale)" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": 32, 152 | "metadata": {}, 153 | "outputs": [], 154 | "source": [ 155 | "dists = heat_kernel(dists, 2)" 156 | ] 157 | }, 158 | { 159 | "cell_type": "code", 160 | "execution_count": 33, 161 | "metadata": {}, 162 | "outputs": [], 163 | "source": [ 164 | "# Construct sparse KNN Graph\n", 165 | "n_samples = data.shape[0]\n", 166 | "\n", 167 | "indptr = np.arange(0, n_samples * n_neighbors + 1, n_neighbors)\n", 168 | "\n", 169 | "adjacency_matrix = csr_matrix((dists.ravel(), indices.ravel(), indptr), shape=(n_samples, n_samples))\n", 170 | "\n", 171 | "# ensure that its symmetrix\n", 172 | "adjacency_matrix = 0.5 * (adjacency_matrix + adjacency_matrix.T)" 173 | ] 174 | }, 175 | { 176 | "cell_type": "code", 177 | "execution_count": null, 178 | "metadata": {}, 179 | "outputs": [], 180 | "source": [] 181 | } 182 | ], 183 | "metadata": { 184 | "kernelspec": { 185 | "display_name": "Python 3", 186 | "language": "python", 187 | "name": "python3" 188 | }, 189 | "language_info": { 190 | "codemirror_mode": { 191 | "name": "ipython", 192 | "version": 3 193 | }, 194 | "file_extension": ".py", 195 | "mimetype": "text/x-python", 196 | "name": "python", 197 | "nbconvert_exporter": "python", 198 | "pygments_lexer": "ipython3", 199 | "version": "3.7.2" 200 | } 201 | }, 202 | "nbformat": 4, 203 | "nbformat_minor": 2 204 | } 205 | -------------------------------------------------------------------------------- /notebooks/uncategorized/examples/scaling_demos.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import time as time\n", 11 | "from scipy.spatial.distance import pdist\n", 12 | "from scale.nystrom import KRRNystrom, generate_data\n", 13 | "from scale.rff import KRRRFF, KRRRBFSampler\n", 14 | "from sklearn.kernel_ridge import KernelRidge\n", 15 | "from sklearn.metrics import mean_squared_error, pairwise_kernels" 16 | ] 17 | }, 18 | { 19 | "cell_type": "markdown", 20 | "metadata": {}, 21 | "source": [ 22 | "### Get Data" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 2, 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "random_state = 123 # reproducibility\n", 32 | "\n", 33 | "x_train, x_test, y_train, y_test = generate_data(random_state=random_state)" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "### Experimental Parameters" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 3, 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "n_components = 100 # number of sample components to keep\n", 50 | "k_rank =20 # rank of the matrix for rsvd\n", 51 | "lam = 1e-3 # regularization parameter\n", 52 | "kernel = 'rbf' # rbf kernel matrix\n", 53 | "sigma = np.mean(pdist(x_train, metric='euclidean'))\n", 54 | "gamma = 1 / (2 * sigma**2) # length scale for rbf kernel" 55 | ] 56 | }, 57 | { 58 | "cell_type": "markdown", 59 | "metadata": {}, 60 | "source": [ 61 | "### Naive KRR " 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": 4, 67 | "metadata": {}, 68 | "outputs": [ 69 | { 70 | "name": "stdout", 71 | "output_type": "stream", 72 | "text": [ 73 | "Sklearn KRR (Time): 12.098781 secs\n", 74 | "Sklearn KRR (MSE): 0.004343\n" 75 | ] 76 | } 77 | ], 78 | "source": [ 79 | "t0 = time.time()\n", 80 | "\n", 81 | "krr_model = KernelRidge(alpha=lam, kernel=kernel, gamma=gamma)\n", 82 | "krr_model.fit(x_train, y_train)\n", 83 | "\n", 84 | "y_pred = krr_model.predict(x_test)\n", 85 | "\n", 86 | "t1 = time.time() - t0\n", 87 | "print('Sklearn KRR (Time): {:2f} secs'.format(t1))\n", 88 | "\n", 89 | "error_normal = mean_squared_error(y_pred.squeeze(),\n", 90 | " y_test.squeeze())\n", 91 | "print('Sklearn KRR (MSE): {:5f}'.format(error_normal))" 92 | ] 93 | }, 94 | { 95 | "cell_type": "markdown", 96 | "metadata": {}, 97 | "source": [ 98 | "### Random Fourier Features" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": 5, 104 | "metadata": {}, 105 | "outputs": [ 106 | { 107 | "name": "stdout", 108 | "output_type": "stream", 109 | "text": [ 110 | "RFF (time): 0.0794 secs\n", 111 | "RFF (MSE): 0.004343\n" 112 | ] 113 | } 114 | ], 115 | "source": [ 116 | "t0 = time.time()\n", 117 | "\n", 118 | "krr_rff = KRRRFF(lam=lam, sigma=sigma,\n", 119 | " n_components=n_components,\n", 120 | " random_state=random_state)\n", 121 | "\n", 122 | "krr_rff.fit(x_train, y_train)\n", 123 | "\n", 124 | "y_pred_rff = krr_rff.predict(x_test)\n", 125 | "\n", 126 | "t1 = time.time() - t0\n", 127 | "print('RFF (time): {:.4f} secs'.format(t1))\n", 128 | "\n", 129 | "error_rff = mean_squared_error(y_pred_rff.squeeze(), y_test.squeeze())\n", 130 | "print('RFF (MSE): {:5f}'.format(error_rff))" 131 | ] 132 | }, 133 | { 134 | "cell_type": "markdown", 135 | "metadata": {}, 136 | "source": [ 137 | "### Nystrom Approximation (Normal)" 138 | ] 139 | }, 140 | { 141 | "cell_type": "code", 142 | "execution_count": 6, 143 | "metadata": {}, 144 | "outputs": [ 145 | { 146 | "name": "stdout", 147 | "output_type": "stream", 148 | "text": [ 149 | "Nystrom (time): 1.7862 secs\n", 150 | "Nystrom (MSE): 0.004380\n" 151 | ] 152 | } 153 | ], 154 | "source": [ 155 | "t0 = time.time()\n", 156 | "\n", 157 | "krr_nystrom = KRRNystrom(lam=lam, kernel=kernel, sigma=sigma,\n", 158 | " n_components=n_components, \n", 159 | " svd='arpack',\n", 160 | " random_state=random_state)\n", 161 | "\n", 162 | "krr_nystrom.fit(x_train, y_train)\n", 163 | "\n", 164 | "y_pred = krr_nystrom.predict(x_test)\n", 165 | "\n", 166 | "t1 = time.time() - t0\n", 167 | "print('Nystrom (time): {:.4f} secs'.format(t1))\n", 168 | "\n", 169 | "error_nystrom = mean_squared_error(y_pred.squeeze(), y_test.squeeze())\n", 170 | "print('Nystrom (MSE): {:5f}'.format(error_nystrom))" 171 | ] 172 | }, 173 | { 174 | "cell_type": "markdown", 175 | "metadata": {}, 176 | "source": [ 177 | "### Nystrom Approximation (Randomized)" 178 | ] 179 | }, 180 | { 181 | "cell_type": "code", 182 | "execution_count": 7, 183 | "metadata": {}, 184 | "outputs": [ 185 | { 186 | "name": "stdout", 187 | "output_type": "stream", 188 | "text": [ 189 | "Nystrom (time): 2.0591 secs\n", 190 | "Nystrom (MSE): 0.004380\n" 191 | ] 192 | } 193 | ], 194 | "source": [ 195 | "t0 = time.time()\n", 196 | "\n", 197 | "krr_nystrom = KRRNystrom(lam=lam, kernel=kernel, sigma=sigma,\n", 198 | " n_components=n_components, \n", 199 | " svd='randomized', k_rank=k_rank,\n", 200 | " random_state=random_state)\n", 201 | "\n", 202 | "krr_nystrom.fit(x_train, y_train)\n", 203 | "\n", 204 | "y_pred = krr_nystrom.predict(x_test)\n", 205 | "\n", 206 | "t1 = time.time() - t0\n", 207 | "print('Nystrom (time): {:.4f} secs'.format(t1))\n", 208 | "\n", 209 | "error_nystrom = mean_squared_error(y_pred.squeeze(), y_test.squeeze())\n", 210 | "print('Nystrom (MSE): {:5f}'.format(error_nystrom))" 211 | ] 212 | }, 213 | { 214 | "cell_type": "markdown", 215 | "metadata": {}, 216 | "source": [ 217 | "### RBF Sampler" 218 | ] 219 | }, 220 | { 221 | "cell_type": "code", 222 | "execution_count": 8, 223 | "metadata": {}, 224 | "outputs": [ 225 | { 226 | "name": "stdout", 227 | "output_type": "stream", 228 | "text": [ 229 | "RBF Sampler (time): 3.3447 secs\n", 230 | "RBF Sampler (MSE): 0.004967\n" 231 | ] 232 | } 233 | ], 234 | "source": [ 235 | "t0 = time.time()\n", 236 | "\n", 237 | "krr_nystrom = KRRRBFSampler(lam=lam, kernel=kernel, sigma=sigma,\n", 238 | " n_components=2000,\n", 239 | " random_state=random_state)\n", 240 | "\n", 241 | "krr_nystrom.fit(x_train, y_train)\n", 242 | "\n", 243 | "y_pred_rbf = krr_nystrom.predict(x_test)\n", 244 | "\n", 245 | "t1 = time.time() - t0\n", 246 | "print('RBF Sampler (time): {:.4f} secs'.format(t1))\n", 247 | "\n", 248 | "error_rbf = mean_squared_error(y_pred_rbf.squeeze(), y_test.squeeze())\n", 249 | "print('RBF Sampler (MSE): {:5f}'.format(error_rbf))" 250 | ] 251 | }, 252 | { 253 | "cell_type": "code", 254 | "execution_count": 9, 255 | "metadata": {}, 256 | "outputs": [ 257 | { 258 | "name": "stderr", 259 | "output_type": "stream", 260 | "text": [ 261 | "/Users/eman/anaconda3/envs/py36/lib/python3.6/site-packages/sklearn/kernel_approximation.py:470: UserWarning: n_components > n_samples. This is not possible.\n", 262 | "n_components was set to n_samples, which results in inefficient evaluation of the full kernel.\n", 263 | " warnings.warn(\"n_components > n_samples. This is not possible.\\n\"\n" 264 | ] 265 | } 266 | ], 267 | "source": [ 268 | "#size of data\n", 269 | "import numpy as np\n", 270 | "from sklearn.metrics.pairwise import rbf_kernel\n", 271 | "from rff import RFF\n", 272 | "from nystrom import RandomizedNystrom\n", 273 | "from sklearn.kernel_approximation import Nystroem\n", 274 | "\n", 275 | "N_SAMPLES, DIM = 1000, 200 \n", 276 | "X = np.random.randn(N_SAMPLES,DIM)\n", 277 | "\n", 278 | "n_components = 50\n", 279 | "k_rank = 50\n", 280 | "\n", 281 | "#Number of monte carlo samples D\n", 282 | "Ds = np.arange(1,5000,200)\n", 283 | "K_rbf = rbf_kernel(X, gamma=gamma)\n", 284 | "errors_rff, errors_rnystrom, errors_nystrom = [] , [], []\n", 285 | "\n", 286 | "for D in Ds:\n", 287 | " krr_nystrom = RandomizedNystrom(kernel=kernel, sigma=sigma,\n", 288 | " n_components=D, k_rank=k_rank,\n", 289 | " random_state=random_state)\n", 290 | " \n", 291 | " krr_nystrom.fit(X)\n", 292 | " K_rnystrom = krr_nystrom.compute_kernel(X)\n", 293 | " errors_rnystrom.append(((K_rnystrom-K_rbf)**2).mean())\n", 294 | " \n", 295 | " krr_nystrom = Nystroem(kernel=kernel,gamma=gamma, n_components=D)\n", 296 | " krr_nystrom.fit(X)\n", 297 | " L = krr_nystrom.transform(X)\n", 298 | " K_nystrom = np.dot(L, L.T)\n", 299 | " \n", 300 | " errors_nystrom.append(((K_nystrom-K_rbf)**2).mean())\n", 301 | "\n", 302 | " \n", 303 | " krr_rff = RFF(sigma=sigma,\n", 304 | " n_components=D,\n", 305 | " random_state=random_state,\n", 306 | " projection='cos')\n", 307 | " krr_rff.fit(X)\n", 308 | " K_rff = krr_rff.compute_kernel(X)\n", 309 | "\n", 310 | " errors_rff.append(((K_rff-K_rbf)**2).mean())\n", 311 | " \n" 312 | ] 313 | }, 314 | { 315 | "cell_type": "code", 316 | "execution_count": 10, 317 | "metadata": {}, 318 | "outputs": [ 319 | { 320 | "data": { 321 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAuUAAAHwCAYAAAAByRFLAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzs3Xl4VOX9///XO5OEEEhCIIAYJKGggLZKFUsrKruiRQVEgVoloqJFqPxq9SsuRVsUba21SlFxgaqXWtxYFJf2U0BFW4GqFaVqRJDIJoSwQ5Lh/v0RJk6SyTozOZPM89HrXDNzn/ucec+Yhtfcuc895pwTAAAAAO8keF0AAAAAEO8I5QAAAIDHCOUAAACAxwjlAAAAgMcI5QAAAIDHCOUAAACAxwjlAAAAgMcI5QDQDJnZejM7YGZ7g7ZZXtcFAAgt0esCAABRc55z7h81dTCzROdcaW1t9T0HAKB+GCkHgDhiZnlmtsLM/mRmhZJur6YtwcxuNbMNZrbNzJ40s4wj58g1M2dmV5jZ15L+aWYpZva0me0wsyIzW2lmHT19sQDQhDBSDgDxp6+k5yR1kJQkaUyItrwj20BJ2yQ9KWmWpEuDztNfUi9JhyWNl5Qh6RhJhyT1lnQg2i8EAJoLQjkANF8LzCx4WskNkkokbXLOPXikrdTMFKLtEkn3OefWSZKZTZO0xswuDzrf7c65fUf2l0hqJ6m7c+6/klZH7VUBQDPE9BUAaL5GOOfaBG2PHmnfGKJv5bajJW0IerxBZQM5wVNSgo95StIbkp4zs01m9nszSwqzfgCIG4RyAIg/rg5tmyTlBD3uIqlU0tZQxzjnSpxzdzjnjpd0mqThki6LTLkA0PwRygEAoTwr6f8zs65m1lrSXZL+Vt0qK2Y20Mx+YGY+SbtVNk3G33jlAkDTRigHgOZrcaV1yl+ux7FPqGxKyluSvpJ0UNKUGvofJekFlQXytZKWS3q6YWUDQPwx50L9FRMAAABAY2GkHAAAAPAYoRwAAADwGKEcAAAA8BihHAAAAPAYoRwAAADwWKLXBTSGrKwsl5ub63UZAAAAaOZWr1693TnXvr7HxUUoz83N1apVq7wuAwAAAM2cmW1oyHFNcvqKmX3PzB43sxe8rgUAAAAIV8yEcjN7wsy2mdmaSu3DzOwzM8s3s5skyTm3zjl3hTeVAgAAAJEVM6Fc0jxJw4IbzMwn6S+SzpF0vKRxZnZ845cGAAAARE/MhHLn3FuSCis1/0hS/pGR8WJJz0m6oNGLAwAAAKIoZkJ5NbIlbQx6XCAp28zamdnDkn5oZtNCHWhmE81slZmt+vbbbxujVgAAAKBBYn31FQvR5pxzOyRdU9OBzrk5kuZIUp8+fVwUagMAAAAiItZHygskHRP0uLOkTR7VAgAAAERFrIfylZKONbOuZpYsaaykRR7XBAAAAERUzIRyM3tW0nuSephZgZld4ZwrlTRZ0huS1kqa75z7xMs6AQAAgEiLmTnlzrlx1bQvkbSkkcsBAAAAGk3MjJQDAAAA8YpQDgAAAHiMUA4AAAB4jFAOAAAAeIxQDgAAAHiMUA4AAAB4jFAeRU8++aQ++YRl1QEAAFAzQnmUPP300xo/frxGjBihnTt3el0OAAAAYhihPEpGjRql3r17Kz8/Xz/72c/k9/u9LgkAAAAxilAeJampqVqwYIGysrL0+uuv65ZbbvG6JAAAAMQoQnkU5eTkaP78+fL5fLrnnnv0t7/9zeuSAAAAEIMI5VE2cOBA3XfffZKkyy+/XB999JHHFQEAACDWEMobwZQpU5SXl6cDBw5oxIgR2r59u9clAQAAIIYQyhuBmemhhx7SqaeeqvXr12vMmDEqLS31uiwAAADECEJ5I0lJSdFLL72kjh076p///KduuOEGr0sCAABAjCCUN6LOnTvrxRdfVFJSku6//349+eSTXpcEAACAGEAob2T9+vXTrFmzJEkTJ07UypUrPa4IAAAAXiOUe2DixIm6+uqrdejQIY0cOVJbt271uiQAAAB4iFDukQceeED9+vXTN998o9GjR6u4uNjrkgAAAOARQrlHkpOT9cILLyg7O1vvvPOOpk6d6nVJAAAA8Aih3ENHHXWUXn75ZbVo0UIPPfSQHn30Ua9LAgAAgAcI5R479dRT9cgjj0iSrr32Wr377rseVwQAAIDGRiiPAePHj9d1112nkpISXXjhhfrmm2+8LgkAAACNiFAeI/7whz9o4MCB2rJli0aNGqWDBw96XRIAAAAaCaE8RiQlJWn+/PnKycnR+++/r0mTJsk553VZAAAAaASE8hiSlZWlBQsWqGXLlpo7d67+8pe/eF0SAAAAGgGhPMb07t1bTzzxhCRp6tSpWrZsmbcFAQAAIOoI5TFo7NixuvHGG+X3+3XRRRdpw4YNXpcEAACAKCKUx6i77rpLZ599trZv366RI0dq//79XpcEAACAKCGUxyifz6dnn31W3bp10wcffKCrrrqKCz8BAACaKUJ5DMvMzNTChQvVqlUrPfPMM7rvvvu8LgkAAABRQCiPcSeccIKeeuopSdKNN96oN9980+OKAAAAEGmE8iZg5MiR+s1vfqPDhw9r7Nix+vLLL70uCQAAABFEKG8ipk+frvPOO087d+7UiBEjtHfvXq9LAgAAQIQQypuIhIQEPf300+rZs6fWrFmjvLw8LvwEAABoJgjlTUh6eroWLlyo9PR0vfjii7rrrru8LgkAAAARQChvYo477jg988wzMjPddttteuWVV7wuCQAAAGEilDdBP/3pTzVjxgw553TJJZfos88+87okAAAAhCHR6wLQMNOmTdMHH3ygF154QQMHDtTgwYN1/PHHq1evXurVq5e6deumxET+8wIAADQFFg8XC/bp08etWrXK6zIibu/everfv7/+85//VNmXlJSkY489tjykB7YePXooNTXVg2oBAACaPzNb7ZzrU+/jCOVNW3Fxsd5//32tXbu2wrZhw4aQ/c1MOTk55SE9eHQ9MzOzkasHAABoXgjlNWjOobw6+/bt02effaZPP/20QljPz89XaWlpyGM6duxYZWS9V69eOvroo2VmjfwKAAAAmh5CeQ3iMZRXp6SkRPn5+VVG1v/3v/9p//79IY9JT09Xjx491LlzZ3Xq1KnCdtRRR6lTp07q0KGDfD5fI78aAACA2EIorwGhvHaHDx/Wxo0btXbt2iqj64WFhbUen5CQoA4dOlQI6tUF+JSUlEZ4RQAAAI2PUF4DQnnDOef07bff6vPPP9emTZu0ZcsWbd68ucq2ffv2Op+zTZs2VYJ6YMvKylJmZmb51qZNG0bgAQBAk9HQUN4k18wzs16SrpOUJen/nHMPeVxSs2Vm6tChgzp06FBjv+LiYm3bti1kYA8O8lu2bFFRUZGKioq0du3aOtWQkZFRIajXdSPQAwCApqLRQ7mZPSFpuKRtzrnvB7UPk/RnST5Jjznn7q7uHM65tZKuMbMESY9GuWTUQXJysjp37qzOnTvX2O/w4cPasWNHtSPuO3bs0M6dO8u3Xbt2lW/r16+vd13p6ekhA3taWlqFLT09vdq21NRULnQFAABR1ejTV8zsTEl7JT0ZCOVm5pP0uaShkgokrZQ0TmUBfWalU0xwzm0zs/Ml3SRplnPumZqe04vpKzu3rNeyJX+Rr2Ur+Vq1li+1tXyt0srut0qTLylZPvPJl+BTYkJi+f2G3CZYQrMNjX6/X7t27aoQ1IO3oqKiavft2rUrIjWYmVq3bl1jgA+1tW7dWq1atVKrVq0q3G/VqhVf7AQAQCNIsMb/8vomM33FOfeWmeVWav6RpHzn3DpJMrPnJF3gnJupslH1UOdZJGmRmb0qqcZQ7oXPP16qURvv9bqM5i/1yJYdvadwctpz5H81KpW088gGAAA8desZt+p3g37ndRl1FivDddmSNgY9LpDUt7rOZjZA0ihJLSQtqabPREkTJalLly6RqrPO2qR30AW7OsnvL5G/tET+w6Xy+0vLbg/75TfJnyD5TSpN+O5+Q24P1zRI3kxH0GNRc7hourn+xQUAEH+a2r9psRLKQ71r1SYc59wySctqOqFzbo6kOVLZ9JUwamuQHn1/qgV9N4Xe6Zy0f7+0Z4+0e3fZbfD9+rb5/TVUUstLb9VKysws29q2DX0/8LhjR6lTJykrS+ICygZzzungwYPau3ev9u3bp3379lW4X9vj4Pv79+/XgQMHKmwHDx5seG21/bzUIiUlpXxr0aJFhcc1tQfaWrRoEXKr776EhMb/cyUAAOGIlVBeIOmYoMedJVWTaJsBs7Iw3KqVdNRR4Z0rEPB37pQKC8tuA1vw41D7ioqkffvKtoKCuj9nQoLUoUNZQD/qqO9uQ91v1Sq819cMmZlatmypli1bqn379hE//+HDh3Xo0KEqYT2c7dChQzp48GD5VvnxwYMHVVJSUn7fa4mJidWG9uTk5Hrf1veYpKQkJScnKykpqcL9wC2rAgEAKouVUL5S0rFm1lXSN5LGSvqZtyU1EcEBv5aVT6pwrmykvS5hvrBQ2rKlbNux47v7tWndumJYry7At2/P6HuEJCQklIf+xhT4MFA5rNcU5APtgeBfeQvsr+++0tJSlZaWat++fY36HtRVQkJCyLBe39tw+9Tl+MTExPK+gftN7U/CANAUeLEk4rOSBkjKMrMCSdOdc4+b2WRJb6hsxZUnnHOfNHZtccdMSk8v23Jz635ccbG0det3wXzz5urv790r5eeXbTUJjL537Sodf7zUq1fZ7fHHS8ccU7YfMc2rDwOVOefKR+1DBfbi4uLy2+D7kdp36NAhlZSUqKSkRMXFxVVui4uLyz/AHDp0yNP3qqECHyqCg3qo8F7b/sptgS34cV331eccdW1LSGi+K1sBiD18oyeixzlp167ag/uWLVJN3wiamvpdSA8O6127SiwtiCbI7/eHDOx1vQ3cD7W/Lueoz77S0lKVlJRUuI0n9Qn1NW0+n69B+6rbH9xWeX9D9lXXL3DLhxOg7hq6JCKhHLGhpKQsnOfnS59+Kq1dW3b76adlo/KhJCdLPXpUDevHHlu2D0DEOefk9/srBPXKob1yW237A9ONgu/XtK++/UpKSuT3+0MeW11baWmpDh8+7PXbHTMSEhKqBPVQob4+bZXvR2ury3NEqk91Gx9q4guhvAaE8iausPC7kB4c1jduDN3f55O6d68a1nv0KBt1B4A6OHz4cMiwXl2wD4T/wAeAUFu4+4I/YFTuH/y4uvt12Vf5dfhrXOELdWFm8vl85R9u6rrV1r+6/TUdV9djAo+D2yPdFq3X4fWHIEJ5DQjlzdSePdL//lc1rK9bVzZ1pjKzsrnz/fpJjz0mtWjR6CUDQFPjnCv/gBLqA0Hltpr2hbrfGFttz1WXWhpaL39xaXyBD0E333yz7rjjDi+ev2l8oycQMWlp0qmnlm3BDhyQPv+8alj/4gvpq6/KtgkTpIEDvakbAJqQQMDx+VghqyECH2oCAb2+gb4h+8NtD1VvqPvhtNXlNdb3dfj9fjnn5JxTaWlpk/tSP0I5mp+WLaWTTirbgpWUSCNGSEuW1HxhKQAAEcKHmsYV/CGoqX2RHKEc8SMpScrOLru/Y4e3tQAAgIhryh+CmtZHCCBc7dqV3RLKAQBADCGUI74QygEAQAwilCO+tG1bdksoBwAAMYRQjvjCSDkAAIhBhHLEl0AoLyz0tg4AAIAghHLEF0bKAQBADCKUI74QygEAQAwilCO+BC703LlT4quPAQBAjCCUI74kJkrp6WWBvKjI62oAAAAkEcoRj5jCAgAAYgyhHPGHFVgAAECMIZQj/jBSDgAAYgyhHPGHUA4AAGIMoRzxh1AOAABiDKEc8SewLCKhHAAAxAhCOeIPI+UAACDGEMoRf1h9BQAAxBhCOeIPI+UAACDGEMoRfwjlAAAgxhDKEX8I5QAAIMYQyhF/WH0FAADEGEI54k96upSYKO3bJx065HU1AAAAhHLEIbPvRstZgQUAAMQAQjniE/PKAQBADCGUIz4RygEAQAwhlCM+EcoBAEAMIZQjPrECCwAAiCGEcsQnRsoBAEAMIZQjPgVCOauvAACAGEAoR3xipBwAAMQQQjniE6EcAADEEEI54hOhHAAAxBBCOeITq68AAIAYQihHfGKkHAAAxBBCOeJT8OorznlbCwAAiHuEcsSnFi2kVq2k0lJpzx6vqwEAAHGOUI74xRQWAAAQIwjliF+EcgAAECMI5YhfrMACAABiRKLXBTSEmZ0h6RKV1X+8c+40j0tCU8RIOQAAiBGNPlJuZk+Y2TYzW1OpfZiZfWZm+WZ2U03ncM697Zy7RtIrkv4azXrRjAWvwAIAAOAhL0bK50maJenJQIOZ+ST9RdJQSQWSVprZIkk+STMrHT/BObftyP2fSboy2gWjmWKkHAAAxIhGD+XOubfMLLdS848k5Tvn1kmSmT0n6QLn3ExJw0Odx8y6SNrlnNtdzf6JkiZKUpcuXSJTPJoXQjkAAIgRsXKhZ7akjUGPC4601eQKSXOr2+mcm+Oc6+Oc69O+ffsIlIhmh1AOAABiRKxc6Gkh2mr8mkXn3PQo1YJ4weorAAAgRsTKSHmBpGOCHneWtMmjWhAvGCkHAAAxIlZC+UpJx5pZVzNLljRW0iKPa0Jzx+orAAAgRnixJOKzkt6T1MPMCszsCudcqaTJkt6QtFbSfOfcJ41dG+IMI+UAACBGeLH6yrhq2pdIWtLI5SCetWkjJSRIu3ZJpaVSYqxcYgEAAOJNrExfARpfQoKUmVl2nyksAADAQ4RyxDdWYAEAADGAUI74xrxyAAAQAwjliG+swAIAAGIAoRzxjZFyAAAQAwjliG+EcgAAEAMI5YhvhHIAABADCOWIb6y+AgAAYgChHPGNkXIAABADCOWIb6y+AgAAYgChHPGNkXIAABADCOWIb4RyAAAQAwjliG/Bodw5b2sBAABxi1CO+NaypdSihXTokLR/v9fVAACAOEUoR3wzYwoLAADwHKEcYAUWAADgMUI5wEg5AADwGKEcIJQDAACPEcoBQjkAAPAYoRxo27bsllAOAAA8QigHGCkHAAAeI5QDrL4CAAA8RigHGCkHAAAeI5QDhHIAAOAxQjlAKAcAAB4jlAOsvgIAADxGKAcCoXznTsnv97YWAAAQlwjlQGKilJEhOSft2uV1NQAAIA4RygGJeeUAAMBThHJAIpQDAABPEcoBiVAOAAA8RSgHJFZgAQAAniKUAxIj5QAAwFOEckD6LpQXFnpbBwAAiEuEckBipBwAAHiKUA5IhHIAAOApQjkgEcoBAICnCOWAxOorAADAU4RyQGKkHAAAeIpQDkisvgIAADxFKAckKS1NSkyU9u2TDh3yuhoAABBnCOWAJJkxhQUAAHiGUA4EEMoBAIBHCOVAACuwAAAAj8R8KDez75nZ42b2Qk1tQNgYKQcAAB6Jaig3syfMbJuZranUPszMPjOzfDO7qaZzOOfWOeeuqK0NCBsrsAAAAI8kRvn88yTNkvRkoMHMfJL+ImmopAJJK81skSSfpJmVjp/gnNsW5RqBMoyUAwAAj0Q1lDvn3jKz3ErNP5KU75xbJ0lm9pykC5xzMyUNj2Y9QI0I5QAAwCNezCnPlrQx6HHBkbaQzKydmT0s6YdmNq26thDHTTSzVWa26ttvv41g+Wi2COUAAMAj0Z6+EoqFaHPVdXbO7ZB0TW1tIY6bI2mOJPXp06fa8wPlWH0FiHmHDh1SYWGh9uzZI7/f73U5AJo5n8+ntLQ0tW3bVi1atIjqc3kRygskHRP0uLOkTR7UAVTESDkQ0w4dOqSvv/5amZmZys3NVVJSksxCjfMAQPiccyopKdHu3bv19ddfq0uXLlEN5l5MX1kp6Vgz62pmyZLGSlrkQR1ARay+AsS0wsJCZWZmKisrS8nJyQRyAFFlZkpOTlZWVpYyMzNVGOV8EO0lEZ+V9J6kHmZWYGZXOOdKJU2W9IaktZLmO+c+iWYdQJ0wUg7EtD179ig9Pd3rMgDEofT0dO3ZsyeqzxHt1VfGVdO+RNKSaD43UG/BI+XOSYzCATHF7/crKSnJ6zIAxKGkpKSoX8cS89/oCTSa5GSpdWuptFTavdvragCEwJQVAF5ojN89hHIgGCuwAAAADxDKgWDMKwcAAB4glAPBWIEFAAB4gFAOBGOkHECMM7MKm8/nU9u2bTVgwADNmzdPzlX9vrz169dXOa7ytmzZsvL+eXl5NfYdMGBA471gIE548eVBQOwilANoIqZPny5JKikpUX5+vl5++WUtX75cq1at0qxZs0Iek5GRoalTp4bcl5ubW6XtggsuUO/evevUF0B4COVAMEI5gCbi9ttvr/B4xYoVOvPMMzV79mxdf/316tq1a5Vj2rRpU+W4mowYMUJ5eXnhFQqgTpi+AgRj9RUATVS/fv3Us2dPOee0evVqr8sBUE+EciAYI+UAmrDAfHK+ZAloepi+AgRj9RUATdRbb72lzz77TMnJyfrRj34Usk9RUVHI6Su9e/fWiBEjqrQvWLBA69evr9I+depUtWnTJtySAQQhlAPBGCkHmqSm8k2foVZGaahAuA6+0NM5p3vvvVedOnUKecyuXbt0xx13VGkfP358yFC+cOFCLVy4sEp7Xl4eoRyIMEI5EIxQDqCJqByuzUyPP/64Lr/88mqPycnJCTnyXZ25c+dyoSfQSJhTDgQjlANNknOuSWzReM179+7V3//+dx1zzDG65ppr9M9//jOizwOgcRDKgWAZGVJCgrR7t1RS4nU1AFCrVq1aaciQIVq8eLH8fr/Gjx+v/fv3e10WgHoilAPBEhKkzMyy+1zsCaAJOfHEE3XVVVepoKBAf/rTn7wuB0A9EcqByliBBUATdeuttyolJUX33nuvdu7c6XU5AOqBUA5UxrxyAE1Udna2rr76ahUVFen3v/+91+UAqAdCOVAZoRxAEzZt2jSlpqbqgQce0NatW70uB0AdsSQiUBmhHEAMq20Vl44dO2rfvn0V2nJzc+u1+su8efM0b968hpQHoIEYKQcqa9u27JZQDgAAGgmhHKiMCz0BAEAjI5QDlTF9BQAANDJCOVAZoRwAADQyQjlQGaEcAAA0MkI5UBmhHAAANDJCOVAZq68AAIBGRigHKgtefaUe6/oCAAA0FKEcqCw1VUpJkQ4dkvbv97oaAAAQBwjlQCjMKwcAAI2IUA6EQigHAACNiFAOhEIoBwAAjYhQDoTCCiwAAKAREcqBUIJXYAEAAIiyGkO5mf086H6/SvsmR6sowHNMXwEQo8xMZqacnBwdPHgwZJ/c3FyZmUpLSxulptzcXOXm5jbKcwHNVW0j5b8Kuv9gpX0TIlwLEDsI5QBi3Ndff63777/f6zIAREhtodyquR/qMdB8EMoBxLDMzEy1bdtWM2fO1Pbt270uB0AE1BbKXTX3Qz0Gmg9COYAYlpqaqttuu027d+/WHXfcUWv///3vfzIzDRo0qNo+P/jBD5SUlKQtW7ZIkpxz+utf/6rTTjtN7du3V0pKio455hidffbZ+tvf/iZJWrZsmcxMGzZs0IYNG8qn1piZ8vLyys9tZhowYIC2bNmiK6+8UtnZ2fL5fJo3b155n82bN+vaa69Vbm6ukpOT1b59e40aNUqrV6+uUuu8efNkZpo3b57+/ve/64wzzlDr1q3Vvn17XX755SoqKpIkffDBBxo+fLgyMzPVunVrnX/++Vq/fn0d3mGg8SXWsr+nmf1XZaPi3Y7c15HH34tqZYCXWH0FQIy79tprNWvWLD3yyCOaMmWKjjvuuGr79uzZUwMHDtTSpUv1+eefV+n77rvvas2aNbrwwgt11FFHSZJuueUWzZw5U127dtXFF1+sjIwMbd68WStXrtTzzz+vMWPGKDc3V9OnTy+fRjN16tTyc/bu3bvCcxQWFurHP/6xWrdurVGjRikhIUEdO3aUJH311Vc6/fTTtWnTJg0aNEjjxo3Txo0b9fzzz+vVV1/Viy++qOHDh1d5XYsWLdIrr7yi4cOH65prrtG7776refPm6auvvtLdd9+twYMH64wzztAVV1yhjz/+WIsXL9aXX36pjz/+WAkJrHWBGOOcq3aTlFPTVtOxsbSdcsopDqiXtWudk5w79livKwFwxKeffup1CTFBksvOznbOOff88887SW7kyJEV+uTk5DhJrqSkpLwt0Pf666+vcs7x48c7Se7NN98sb2vbtq3Lzs52+/btq9L/22+/rfJ8OTk5NdYsyV166aUVago466yznCQ3Y8aMCu0rVqxwPp/PtW3b1u3Zs6e8fe7cuU6S8/l8btmyZeXtfr/fDRkyxElymZmZ7umnn65wvgkTJjhJbsGCBdXWClSnrr+DJK1yDcirNX5MdM5tCN4k7ZV0sqSsI4+B5onpK0DTYtY0tggbPXq0fvKTn+jll1/WO++8U2PfESNG6Oijj9a8efN06NCh8vaioiLNnz9f3bp105AhQyock5SUJJ/PV+VcWVlZ9a41OTlZ9957rxITK/6RvqCgQG+++aa6dOmiG2+8scK+0047TePGjVNhYaFeeumlKuccN26c+vfvX/44ISFBl156qSTp+9//vi655JIK/S+77DJJ0ocffljv+oFoq21JxFfM7PtH7neStEZlq648ZWZTazoWaNIyM8tud+6U/H5vawGAGvzxj3+UJF1//fWBv3KHlJiYqCuvvFI7duzQiy++WN7+1FNP6cCBA5o4caIs6IPDJZdcovXr1+uEE07QtGnT9Prrr2vXrl0NrjM3N1cdOnSo0v7BBx9Iks444wwlJSVV2R+YBx/oF6xPnz5V2o4++mhJ0imnnFJlX3Z2tqSyDwJArKltQlVX59yaI/cvl/R359x5kvqKJRHRnCUmSm3aSM5JRy4YAhDDyiacxf4WBT/5yU80evRovf/++5o/f36NfSdOnKjExEQ98sgj5W1z5sxRcnKyLr/88gp9//SnP+n+++9Xq1atdPfdd+ucc85RVlaWLrjgAuXn59e7zsBc9coCQb9Tp04h9wfai0L8Ls7IyKjSFhiJr2lfSUlJHSoGGldtoTz4p3awpCWS5JzbI+lwtIoCYgJTWAA0EXdXjjBfAAAgAElEQVTffbeSkpI0bdo0FRcXV9svOztb5513nt566y2tXbu2/ALPkSNHqn379hX6+nw+XXfddfroo4+0detWvfjiixo5cqQWLVqkYcOGVZgCUxdWzfSdQHgOrPpS2ebNmyv0A5qr2kL5RjObYmYjVTaX/HVJMrOWkqr+jQloTliBBUAT0a1bN02aNElfffWVHnyw8nf9VTRp0iRJZSPkc+bMkSRdffXVNR7ToUMHjRo1SvPnz9egQYP05Zdfas2aNeX7fT6f/A2c6vfDH/5QkvTOO++E/AbSpUuXSpJOPvnkBp0faCpqC+VXSDpBUp6kMc65wN+OfixpbhTrArwXGCkvLPS2DgCog9/85jdq06aN7rzzTu3du7fafoMHD9Zxxx2nv/71r5o/f76OO+44DRw4sEKfQ4cO6f/+7/+qzFEvKSlR4ZHfiampqeXt7dq107fffqsDBw7Uu+7OnTtr6NChWr9+fZVvKP33v/+tZ555RpmZmRo5cmS9zw00JTWuU+6c2ybpmhDtSyUtjVZRwczse5JukZThnBt9pG2ApN9J+kTSc865ZY1RC+IM01cANCFt27bVzTffXGUFk8rMTNdcc41+9atfSQo9Sn7gwAENGTJEubm56tu3r3JycnTw4EH9/e9/19q1a3X++eerV69e5f0HDx6slStXatiwYTrzzDPVokULnXTSSTrvvPPqVPvDDz+sfv366YYbbtCbb76pPn36lK9TnpCQoLlz5yotLa0e7wbQ9NQYys1sUU37nXPn13L8E5KGS9rmnPt+UPswSX+W5JP0mHPu7hqeY52kK8zsheBmlS3PmCKJS6gRHYRyAE3ML3/5S82ePbvWb63My8vTr3/9ayUlJWn8+PFV9rdq1Ur33HOPli5dqnfffVcLFixQWlqaunXrpoceekgTJlRc6+HWW29VUVGRFi9erBUrVsjv92v8+PF1DuXf+973tGrVKs2YMUNLlizRsmXLlJ6ermHDhumWW27RqaeeWuf3AGiqrKblk8zsW0kbJT0r6d8q+ybPcs655TWe3OxMlYXnJwOh3Mx8kj6XNFRlgXqlpHEqC+gzK51iwpHRepnZC0Ej5QnOucNm1lHSfc65S1SDPn36uFWrVtXUBajqt7+Vpk+XbrlFmjHD62qAuLd27doKo7NouGXLlmngwIH6+c9/rqeeesrrcoAmoa6/g8xstXOu6nqdtahxpFzSUSoLz+Mk/UzSq5Kedc59UpeTO+feMrPcSs0/kpR/ZARcZvacpAucczNVNqpel/MGVn7ZKalFXY4B6o2RcgDN1O9//3tJ0uTJkz2uBEBAbXPK/SpbceV1M2uhsnC+zMx+65yr+fLu6mWrbPQ9oEBl656HZGbtJN0p6YdmNs05N9PMRkk6W1IbSbOqOW6ipImS1KVLlwaWirjG6isAmpGPP/5Yr7zyilavXq3XXntNw4cPV9++1f7zC6CR1TZSriNh/KcqC+S5kh6QVPW7busu1EKl1c6hcc7tUKWLTZ1zL9VWg3NujqQ5Utn0lfqXibjH6isAmpHVq1fr5ptvVnp6ui666CLNnj3b65IABKntQs+/Svq+pNck3RH07Z7hKJB0TNDjzpI2ReC8QGQxfQVAM5KXl6e8vDyvywBQjdpGyi+VtE/ScZJ+GfRtXCbJOefSG/CcKyUda2ZdJX0jaazK5qsDsYVQDgAAGkltc8pr+3KhGpnZs5IGSMoyswJJ051zj5vZZElvqGzFlSfqeuEo0KgI5QAAoJHUOqc8HM65cdW0L5G0JJrPDYStdWspKUnav186eFBKSfG6IgAA0EyFNRIONGtmrMACAAAaBaEcqAkrsAAAgEZAKAdqwrxyAADQCAjlQE0I5QAAoBEQyoGaEMoBAEAjIJQDNSGUAwCARkAoB2rC6isAEBG5ubnKzc31uowKbr/9dpmZli1b5nUpkmLzPULjIZQDNWH1FQAxxswqbD6fT23bttWAAQM0b948Oee8LhGNJDc3V2amtLQ0bd26NWSfAQMGyMyUn5/fKDUFng/1F9UvDwKaPKavAIhR06dPlySVlJQoPz9fL7/8spYvX65Vq1Zp1qxZHlfXNEyePFljx45Vly5dvC4lLHv37tX06dP18MMPe10KwkAoB2pCKAcQo26//fYKj1esWKEzzzxTs2fP1vXXX6+uXbt6U1gTkpWVpaysLK/LCFv37t312GOP6brrrlOvXr28LgcNxPQVoCaEcgBNRL9+/dSzZ08557R69eoK+4qLizVr1iyde+65ysnJUYsWLdS2bVsNGTJEr732WsjzBeY379+/XzfccIO6dOmiFi1aqHv37rrnnntCTpNxzmnWrFk64YQTlJKSouzsbE2ePFm7du2qtu5Dhw7p7rvv1oknnqjU1FSlp6frjDPO0Pz586v0Xb9+vcxMeXl5+vLLLzV69Gi1a9dOaWlpOuuss7RmzRpJ0rfffquJEyeqU6dOSklJ0amnnqqlS5dWOV+oOeWB6RfVbQMGDKhwjtLSUs2ePVs//vGPlZ6ertTUVP3whz/UrFmzdPjw4Yi8R7WZOXOm/H6/brzxxlr77ty5U6mpqerWrVu1U52GDx8uM6vwc7Ro0SINHjxYnTp1UosWLXT00Uerf//+mj17tqTv/tssX75ckqp9zwI/V7t379avfvUr5ebmKikpqcKHzF27dmnatGnq0aOHUlJSlJmZqbPPPlv/+Mc/qtS6bNkymZluv/12rVq1SsOGDVNGRoYyMzN14YUXauPGjZKkdevWaezYsWrfvr1atmypgQMH6qOPPqr1/WpMjJQDNSGUA2hCAiErKSmpQnthYaGuu+46nXbaaRo6dKjat2+vzZs3a/HixTr33HP16KOP6sorr6xyvpKSEp111lnatGmTzjnnHCUmJmrBggW66aabdPDgwfIpNAFTp07VAw88oE6dOmnixIlKSkrSwoUL9e9//1vFxcVKTk6u0L+4uFhnn322li9frp49e+raa6/V/v379cILL2jMmDH68MMPddddd1Wpa/369erbt6969eqlvLw8rV+/Xi+//LIGDBig9957T8OGDVN6errGjBmjwsJCPffcczrnnHP0+eef1zpVJS8vr0rwlqS3335b//znP5Wamlrh/TnvvPP0xhtvqEePHvrZz36mlJQULV26VFOmTNG///1vPfXUU2G9R3UxYsQInXnmmXrllVe0dOlSDRw4sNq+mZmZGjt2rObOnat//OMfGjp0aIX9BQUFev3113XKKafolFNOkSTNmTNHV199tY466iidd955ysrK0rZt2/Tf//5Xc+fO1aRJk9SmTRtNnz5d8+bN04YNGyr8bFS+eLW4uFiDBg1SYWGhzjrrLKWnp5f/ZaeoqEj9+vXTp59+qlNPPVVTp07V9u3bNX/+fJ111ll66KGHdPXVV1d5XStXrtQ999yj/v3766qrrtLHH3+sl156SR9//LEWLVqk008/XT179tRll12mDRs26KWXXtLQoUO1bt06tW7dut7veVQ455r9dsoppzigQQ4dck5yzudz7vBhr6sB4tqnn37qdQkxQZIr++e7ouXLl7uEhASXnJzsNm3aVGHfwYMH3caNG6scU1RU5E444QSXmZnp9u/fX2FfTk6Ok+TOOeecCvu2bt3qMjIyXEZGhisuLi5vX7FihZPkunXr5nbs2FHefuDAAffjH//YSXI5OTkVnuOuu+4qf46SkpIKzxF4/hUrVpS3f/XVV+Wvf8aMGRXO9dvf/tZJcpmZme7qq692fr+/fN+TTz7pJLmpU6dWOGb69OlOklu6dGmV9ybYRx995NLS0lxWVpb74osvqhw/efJkV1paWt5eWlrqJkyY4CS5BQsWhPUe1STwHpWUlLj333/fmZk7+eST3eGgf6/69+/vJFWoe+XKlU6Su/DCC6ucM/Ca5syZU9528sknu+TkZLd169Yq/b/99tsKjwPPV1vNgwcPdnv37q2yf+LEiU6SmzhxYoXX8fnnn7v09HSXnJzsvvrqq/L2pUuXlv9MPP300xXOFfhvkJmZWe3Py/33319trZXV9XeQpFWuAXnV88DcGBuhHGFp3brs/ypFRV5XAsS1mv5B1O1qElskBALI9OnT3fTp093NN9/sLr74YpeUlOTMzD3wwAP1Ot8f//hHJ8ktX768QnsgPAWHuYDLLrvMSXIff/xxeduVV17pJLknnniiSv9AcKocOLt37+7MzK1du7bKMY899piT5C6//PLytkAoz83NrRCCnXNuw4YNTpJLTU11u3fvrrCvtLTUJSYmugEDBlRor0so/+abb1znzp1dSkpKhQ8Ifr/ftWvXzh111FEVPlAE7Ny505mZu+iii8rbGvIe1SQ4lDvn3NixY50k99RTT5X3CRXKnXOuT58+LjEx0W3evLm8rbS01HXu3NmlpaW5PXv2lLeffPLJLjU11RUWFtZaU11D+YcfflhlX3FxsUtNTXWtW7eu8KEl4NZbb3WS3B133FHeFnjfTj/99Cr9ly9fXu3Py/r1650kl5eXV+trCoh2KGf6ClCbdu2kvXvLprBkZHhdDQBIku64444Kj81Mjz/+uC6//PKQ/T/55BP94Q9/0FtvvaXNmzfr4MGDFfZ/8803VY7JyMhQ9+7dq7Qfc8wxksrmJwf85z//kST179+/Sv8zzjhDiYkVI8eePXuUn5+v7Oxs9ezZs8oxgwYNkiR98MEHVfb17t1bPp+vQtvRRx8tSTruuOOUlpZWYZ/P51PHjh1VUFBQ5Vw12bt3r4YPH65vvvlGzz77rE477bTyfZ9//rl27NihY489VjNmzAh5fMuWLbV27dryx/V9j+pr5syZevnll3XLLbdo9OjRSklJqbbvpEmTNGHCBD3xxBO6+eabJUlLlixRQUGBfvGLX1SY0nHJJZfo+uuv1wknnKAxY8aof//+6tevn9q3b9+gOlNSUnTiiSdWaf/f//6n/fv3q1+/fmob+J6QIIMGDdKMGTNC/kz06dOnSlvgZyLUz0t2drYk1ftnIpoI5UBt2rWTNmwoC+Xf+57X1QAIwU2Pv7W5ywbkpH379um9997TFVdcoWuuuUY5OTnlgTbgX//6lwYNGqTS0lINHjxY559/vtLT05WQkKAPP/xQCxcu1KFDh6o8R5s2bUI+dyA8+v3+8rbAhYodO3as0t/n86ld4BqdSv07deoU8jkC7UVFRVX2ZYQYIAnUFGpfYH9JSUnIfaH4/X6NHTtWH3zwgWbOnKkxY8ZU2L/jyLVGX3zxRZUPSMH27t1bfr++71F95ebmasqUKbr33nv15z//Wf/v//2/avuOHTtW119/vR599FHddNNNSkhI0COPPCJJVeZs/+pXv1JWVpZmz56tBx54QPfff7/MTP3799cf/vCHkIG4Jh06dAi5lnlj/kwE9tXnZyLaWH0FqA0XewKIYa1atdKQIUO0ePFi+f1+jR8/Xvv376/QZ8aMGTpw4IDefPNNvfbaa7r//vv129/+Vrfffrv69u0bkToCwSfUl9j4/f7yEFu5/5YtW0Keb/PmzRX6NbZf/vKXevXVV3XVVVfppptuqrI/UNfIkSNrnJLw1VdfVTmmru9RQ9xyyy1q27atZs6cqe3bt1fbr2XLluUXyb755pvlF3j27dtXJ510UpX+l112mf71r39px44devXVV3XFFVforbfe0tlnn61t27bVq8bqvlwo1n8moo1QDtSGUA6gCTjxxBN11VVXqaCgQH/6058q7MvPzy//1s/KAkvYhevkk0+u9nxvv/22SktLK7SlpaWpW7du+uabb/TFF19UOSawhGHgvI3pj3/8o2bPnq2zzjqrfMm/ynr27Kk2bdroX//6V51HW+v7HjVEmzZtdNttt2nXrl01juBL0i9+8QuZmR555BE99thj8vv9IVc2qXz+wIo9eXl5Kiws1Ntvv12+PzBNJPivKHXVo0cPpaam6sMPP6wwNSrAy5+JxkAoB2oTmNdGKAcQ42699ValpKTo3nvvrRBqcnNzVVhYqP/+978V+j/++ON64403IvLceXl5kqQ777xThYWF5e0HDx7UtGnTQh4zYcIEOed0ww03VAhx27dv1+9+97vyPo3ppZde0o033qgf/OAHev7556ud552YmKgpU6Zo8+bN+uUvf6kDBw5U6bN582Z9+umn5Y8b8h41xKRJk9StWzc98sgjWr9+fbX9jj32WA0ePFivvPKKHn74YbVp06bKNB1Jev3110N+YAiMkAcvExmYgvP111/Xu+7k5GRdcskl2rt3r37zm99U2Pfll1/qgQceUFJSki699NJ6n7spYE45UJvASHnQL1AAiEXZ2dm6+uqr9ec//1m///3vNXPmTElla2O/8cYbOv3003XxxRcrIyNDq1at0jvvvKPRo0frhRdeCPu5+/XrpylTpujBBx/U97//fY0ePbp8De7MzMyQ84R//etf67XXXtPChQt10kkn6dxzz9X+/fv1/PPPa9u2bbrxxht1+umnh11bffz85z/X4cOHdeqpp+q+++6rsj83N7c8XN9222366KOP9PDDD2vx4sUaNGiQsrOztW3bNn3xxRdasWKF7rzzTh1//PGSGvYeNURycrJmzpypiy++WBs2bKix76RJk/SPf/xDW7du1ZQpUyoE7ICxY8cqJSVFp59+unJzc+Wc09tvv62VK1fqlFNO0ZAhQ8r7Dh48WM8//7xGjRqlc889Vy1btlROTk6dg/Tdd9+tt99+W7NmzdLKlSs1cODA8nXK9+zZo1mzZjXfb6ttyJItTW1jSUSE5f77y5ZEnDzZ60qAuMY65WVUzTrlAVu2bHGpqakuNTXVbdmypbx98eLFrm/fvq5169YuIyPDDR061C1fvtzNnTvXSXJz586tcJ6cnJxql+erbinBw4cPuwcffND17NnTJScnu06dOrlJkya5oqKias934MABd+edd7oTTjjBpaSkuNatW7t+/fq5Z555pkrfwJKI48ePr/a96d+/f8h9oZ4/1OsIvL/VbZXPf/jwYffkk0+6QYMGuczMTJeUlOSOPvpo169fP3fnnXe6r7/+Ouz3qDqVl0Ss7Cc/+Ul53aGWtnSubBnErKwsJ8mtWbMmZJ+HHnrIjRgxwnXt2tW1bNnSZWZmut69e7t77rkn5PKT06ZNc127dnWJiYlV3rO6vMadO3e6G2+80XXv3t0lJye7jIwMN2TIEPfGG29U6RtYEnH69OlV9oXz8xJKtJdENOea/xXrffr0catWrfK6DDRVTz8tXXqpNG6c9MwzXlcDxK21a9eqV69eXpcBNCvr1q1T9+7d1a9fvwpzw1FVXX8Hmdlq51z9lqQRc8qB2nGhJwCgmbr33nvlnNPkyZO9LiXuMaccqA2hHADQjHz99dd65pln9MUXX2ju3Lk66aSTdNFFF3ldVtwjlAO1YfUVAEAzsm7dOk2bNk2pqakaOnSoHnroISUkMHnCa4RyoDasvgIAaEYGDBigeLimsKnhYxFQm4wMKSFB2r1biqGv4wUAAM0HoRyoTULCd1NYGC0HAABRQCgH6oKLPQEAQBQRyoG6IJQDMYF5sAC80Bi/ewjlQF2wAgvgOZ/PpxKu6wDggZKSEvl8vqg+B6EcqAtWYAE8l5aWpt27d3tdBoA4tHv3bqWlpUX1OQjlQF0wfQXwXNu2bbVz505t375dxcXFTGUBEFXOORUXF2v79u3auXOn2gb+ah4lrFMO1AWhHPBcixYt1KVLFxUWFmr9+vXy+/1elwSgmfP5fEpLS1OXLl3UokWLqD4XoRyoC0I5EBNatGihTp06qVOnTl6XAgARxfQVoC4I5QAAIIoI5UBdsPoKAACIIkI5UBesvgIAAKKIUA7UBdNXAABAFBHKgboIDuUswwYAACKMUA7URcuWZVtxsbRvn9fVAACAZoZQDtQVU1gAAECUEMqBumIFFgAAECWEcqCuWIEFAABECaEcqCumrwAAgCiJ+VBuZt8zs8fN7IWgtuPNbL6ZPWRmo72sD3GEUA4AAKIkqqHczJ4ws21mtqZS+zAz+8zM8s3spprO4Zxb55y7olLzOZIedM79QtJlES4bCI1QDgAAoiQxyuefJ2mWpCcDDWbmk/QXSUMlFUhaaWaLJPkkzax0/ATn3LYQ531K0nQzO19SuyjUDVRFKAcAAFES1VDunHvLzHIrNf9IUr5zbp0kmdlzki5wzs2UNLyO590m6dojAf+lUH3MbKKkiZLUpUuXBtUPVMDqKwAAIEq8mFOeLWlj0OOCI20hmVk7M3tY0g/NbNqRtlwzm6OyEfg/hDrOOTfHOdfHOdenffv2kase8YvVVwAAQJREe/pKKBairdrvLXfO7ZB0TaW29ToyCg40GqavAACAKPFipLxA0jFBjztL2uRBHUD9EMoBAECUeBHKV0o61sy6mlmypLGSFnlQB1A/hHIAABAl0V4S8VlJ70nqYWYFZnaFc65U0mRJb0haK2m+c+6TaNYBRERmZtltUZHk93tbCwAAaFaivfrKuGral0haEs3nBiLO55PatCkL5Tt3SllZXlcEAACaiZj/Rk8gprACCwAAiAJCOVAfzCsHAABRQCgH6oNQDgAAooBQDtQHoRwAAEQBoRyoD0I5AACIAkI5UB9t25bdEsoBAEAEEcqB+mD1FQAAEAWEcqA+mL4CAACigFAO1AehHAAARAGhHKgPQjkAAIgCQjlQH4RyAAAQBYRyoD5YfQUAAEQBoRyoj9atpaQk6cCBsg0AACACCOVAfZixLCIAAIg4QjlQX8wrBwAAEUYoB+qLUA4AACKMUA7UF6EcAABEGKEcqC9WYAEAABFGKAfqiws9AQBAhBHKgfpi+goAAIgwQjlQX4RyAAAQYYRyoL4I5QAAIMII5UB9EcoBAECEEcqB+mL1FQAAEGGEcqC+WH0FAABEGKEcqK/ASHlhoeSct7UAAIBmgVAO1FdyspSWJvn90q5dXlcDAACaAUI50BBc7AkAACKIUA40BKEcAABEEKEcaAhWYAEAABFEKAcaghVYAABABBHKgYZg+goAAIggQjnQEIRyAAAQQYRyoCEI5QAAIIII5UBDEMoBAEAEEcqBhmD1FQAAEEGEcqAhWH0FAABEEKEcaAimrwAAgAgilAMNQSgHAAARRCgHGiIjQ/L5pD17pOJir6sBAABNHKEcaAiz7y72ZF45AAAIE6EcaChCOQAAiBBCOdBQzCsHAAARQigHGopQDgAAIiTmQ7mZjTCzR81soZmddaStl5k9bGYvmNkvvK4RcYpQDgAAIiSqodzMnjCzbWa2plL7MDP7zMzyzeymms7hnFvgnLtKUp6kMUfa1jrnrpF0saQ+USofqBmhHAAAREi0R8rnSRoW3GBmPkl/kXSOpOMljTOz483sB2b2SqWtQ9Chtx45LnCe8yW9I+n/ovwagNAI5QAAIEISo3ly59xbZpZbqflHkvKdc+skycyek3SBc26mpOGVz2FmJuluSa855/4TdO5FkhaZ2auSnonOKwBqwOorAAAgQqIayquRLWlj0OMCSX1r6D9F0hBJGWbW3Tn3sJkNkDRKUgtJS0IdZGYTJU2UpC5dukSgbKASRsoBAECEeBHKLUSbq66zc+4BSQ9UalsmaVlNT+KcmyNpjiT16dOn2vMDDUYoBwAAEeLF6isFko4JetxZ0iYP6gDCQygHAAAR4kUoXynpWDPrambJksZKWuRBHUB4COUAACBCor0k4rOS3pPUw8wKzOwK51yppMmS3pC0VtJ859wn0awDiIrAhZ47dkiOGVIAAKDhor36yrhq2peomgs0gSajZcuy7cABad8+qXVrrysCAABNVMx/oycQ05jCAgAAIoBQDoSDUA4AACKAUA6Eg1AOAAAigFAOhINQDgAAIoBQDoSDUA4AACKAUA6EI7AsYmGht3UAAIAmjVAOhIORcgAAEAGEciAchHIAABABhHIgHIRyAAAQAYRyIByEcgAAEAGEciAchHIAABABhHIgHKy+AgAAIoBQDoQjM1Myk4qKJL/f62oAAEATRSgHwuHzSW3aSM5JO3d6XQ0AAGiiCOVAuJhXDgAAwkQoB8JFKAcAAGEilAPhIpQDAIAwEcqBcLECCwAACBOhHAgXI+UAACBMhHIgXIRyAAAQJkI5EC5COQAACBOhHAgXoRwAAISJUA6Ei1AOAADCRCgHwsXqKwAAIEyEciBcjJQDAIAwEcqBcBHKAQBAmAjlQLhatZKSk6UDB8o2AACAeiKUA+EyY7QcAACEhVAORAKhHAAAhIFQDkQCK7AAAIAwEMqBSGCkHAAAhIFQDkQCoRwAAISBUA5EAqEcAACEgVAORAKhHAAAhIFQDkQCoRwAAISBUA5EAquvAACAMBDKgUhgpBwAAISBUA5EAqEcAACEgVAORAKhHAAAhIFQDkRC8Jzyw4e9rQUAADQ5hHIgEpKSpPT0skC+a5fX1QAAgCaGUA5ECiuwAACABiKUA5HCvHIAANBAhHIgUgjlAACggQjlQKQQygEAQAMlel1AbcxshKSfSuog6S/OuTfN7AxJl6is/uOdc6d5WSMgiVAOAAAaLKoj5Wb2hJltM7M1ldqHmdlnZpZvZjfVdA7n3ALn3FWS8iSNOdL2/7d398G21XUdx98fDiBpBAqCPCWIZKAC8XCjoRCBAIsJckAhCiZvQxQQNjHlLQs0J0HHcEiUkOShFLqDMTAMCAyiqEPci0A8RjyIcQdGbsPlqRK48O2P9TvO7nAvcA57n3XO2e/XzJlz1m+vvdZv7e/e+3zO7/zW2t+pquOBK4ELR9R9aXoM5ZIkaYZGPVJ+AfAF4KLJhiQTwNnArwIrgOVJrgAmgE9Puf9Hqurx9vPH2/0G/Rbwe8PvtjQDXn1FkiTN0EhDeVXdmGTbKc2LgAeq6iGAJJcAh1bVp4FDpm4jSYDTgaur6taB9p8Fnqqqp1JIHLAAAAvPSURBVEfUfWl6HCmXJEkz1MeJnlsBjwwsr2hta3MScABweJLjB9oXA+ev7U5JjktyS5JbVq5c+Xr6K702hnJJkjRDfZzomTW01dpWrqqzgLPW0H7qK+2kqs4FzgXYY4891rp9aWgM5ZIkaYb6GClfAWwzsLw18GgP/ZCGy1AuSZJmqI9QvhzYIcl2SdYHjgSu6KEf0nBNnuhpKJckSdM06ksiXgzcBLwryYoki6tqNXAicA1wL7C0qu4eZT+kWbHRRjAxAc8+C88/33dvJEnSPDLqq68ctZb2q4CrRrlvadYl3Wj5ypXdZRHf9ra+eyRJkuaJPqavSAuX88olSdIMGMqlYTKUS5KkGTCUS8NkKJckSTNgKJeGySuwSJKkGTCUS8M0OVL+xBP99kOSJM0rhnJpmJy+IkmSZsBQLg2ToVySJM2AoVwaJkO5JEmaAUO5NEyGckmSNAOGcmmYvPqKJEmaAUO5NExefUWSJM2AoVwapsHpK1X99kWSJM0bhnJpmDbYAN74RnjhBXj22b57I0mS5glDuTRsnuwpSZKmyVAuDZuhXJIkTZOhXBo2r8AiSZKmyVAuDZtXYJEkSdNkKJeGzekrkiRpmgzl0rAZyiVJ0jSt23cHpAVnMpSfdhp86lOj3Vcy2u1rvC2ka+37Whlfs1H76b5W5vJra7ZeK7OxnyVL4NRTR7+fITGUS8O2zz6w4YbwzDPd9colSdLsW7267x5Mi6FcGrbdd4dVq+DFF0e7n9kaaalylHE6FtrjtRCOxdfK+JrNEenp1n4uPldm87UyGyYmZmc/Q2Iol0ZhYmLevRlIkqT+eKKnJEmS1DNDuSRJktQzQ7kkSZLUM0O5JEmS1DNDuSRJktQzQ7kkSZLUM0O5JEmS1DNDuSRJktQzQ7kkSZLUM0O5JEmS1DNDuSRJktQzQ7kkSZLUM0O5JEmS1DNDuSRJktQzQ7kkSZLUM0O5JEmS1DNDuSRJktSzVFXffRi5JCuBH/a0+02B/+pp3+qPdR9f1n58WfvxZe3H15pq//aqeut0NzQWobxPSW6pqj367odml3UfX9Z+fFn78WXtx9cwa+/0FUmSJKlnhnJJkiSpZ4by0Tu37w6oF9Z9fFn78WXtx5e1H19Dq71zyiVJkqSeOVIuSZIk9cxQPiJJDk5yX5IHknys7/7o9UvylSSPJ7lroO0tSa5Lcn/7/ubWniRntfrfkWS3gfsc29a/P8mxfRyLXrsk2yS5Icm9Se5OcnJrt/YLXJINkixL8m+t9p9o7dslubnV8Z+TrN/a39CWH2i3bzuwrSWt/b4kB/VzRJquJBNJbktyZVu29mMgycNJ7kxye5JbWtvI3/MN5SOQZAI4G/gAsBNwVJKd+u2VhuAC4OApbR8Drq+qHYDr2zJ0td+hfR0HfAm6FzVwKvCLwCLg1MkXtuas1cCfVNWOwF7ACe31bO0XvueA/apqF2BX4OAkewFnAGe22q8CFrf1FwOrquqdwJltPdrz5Ujg3XTvIV9svyc0950M3DuwbO3Hx/urateByx2O/D3fUD4ai4AHquqhqnoeuAQ4tOc+6XWqqhuBJ6Y0Hwpc2H6+EDhsoP2i6vwrsHGSLYCDgOuq6omqWgVcx8uDvuaQqnqsqm5tPz9D9wt6K6z9gtdq+GxbXK99FbAfcGlrn1r7yefEpcD+SdLaL6mq56rqB8ADdL8nNIcl2Rr4deC8thys/Tgb+Xu+oXw0tgIeGVhe0dq08GxeVY9BF96AzVr72p4DPjfmsfYv6V8Absbaj4U2feF24HG6X6oPAk9W1eq2ymAdf1LjdvtTwCZY+/nq88CfAi+15U2w9uOigGuTfD/Jca1t5O/56w6h43q5rKHNy9yMl7U9B3xuzFNJfhr4OvDRqnq6GwRb86praLP281RVvQjsmmRj4DJgxzWt1r5b+wUiySHA41X1/ST7TjavYVVrvzDtXVWPJtkMuC7Jv7/CukOrvSPlo7EC2GZgeWvg0Z76otH6Ufs3Fe374619bc8BnxvzUJL16AL5V6vqX1qztR8jVfUk8C268wo2TjI5qDVYx5/UuN2+Ed2UN2s//+wN/EaSh+mmoO5HN3Ju7cdAVT3avj9O98f4ImbhPd9QPhrLgR3aWdrr053kcUXPfdJoXAFMnlF9LHD5QPsx7azsvYCn2r+7rgEOTPLmdsLHga1Nc1SbF/oPwL1V9bcDN1n7BS7JW9sIOUl+CjiA7pyCG4DD22pTaz/5nDgc+GZ1HwZyBXBku0LHdnQnhC2bnaPQTFTVkqrauqq2pfsd/s2qOhprv+AleVOSDSd/pnuvvotZeM93+soIVNXqJCfSPfgTwFeq6u6eu6XXKcnFwL7ApklW0J1VfTqwNMli4D+BI9rqVwG/RndSz/8AvwtQVU8k+Wu6P9wAPllVU08e1dyyN/A7wJ1tbjHAn2Ptx8EWwIXtahnrAEur6sok9wCXJPkUcBvdH2207/+Y5AG6UdIjAarq7iRLgXvoruZzQpsWo/nnz7D2C93mwGVtiuK6wNeq6htJljPi93w/0VOSJEnqmdNXJEmSpJ4ZyiVJkqSeGcolSZKknhnKJUmSpJ4ZyiVJkqSeGcolaUSSVJLPDSyfkuS0IW37giSHv/qar3s/RyS5N8kNU9rXSXJWkruS3JlkebsO8yj78nCSTUe5D0nqi6FckkbnOeCDcy1Itutuv1aLgT+sqvdPaf8wsCWwc1W9F/hN4MkhdVGSxo6hXJJGZzVwLvDHU2+YOtKd5Nn2fd8k306yNMl/JDk9ydFJlrUR6e0HNnNAku+09Q5p959I8tk2cn1Hkt8f2O4NSb4G3LmG/hzVtn9XkjNa218Bvwyck+SzU+6yBfBYVb0EUFUrqmpVu9+XktyS5O4knxjYx8NJ/ibJTe323ZJck+TBJMcP9PPGJJcluSfJOUle9rsqyW+3x+T2JH/fjnuiPa6To/cve9wlaa7yEz0labTOBu5I8plp3GcXYEe6TwZ8CDivqhYlORk4CfhoW29b4H3A9sANSd4JHEP3Mc97JnkD8L0k17b1FwHvqaofDO4syZbAGcDuwCrg2iSHVdUnk+wHnFJVt0zp41Lgu0l+Bbge+Kequq3d9hft0+wmgOuT7FxVd7TbHqmqX0pyJnAB3SembgDcDZwz0M+dgB8C3wA+CFw60N8d6Ubq966qF5J8ETi6bWOrqnpPW2/j1/BYS9Kc4Ei5JI1QVT0NXAT80TTutryqHquq54AHgclQfSddEJ+0tKpeqqr76cL7zwMHAsckuR24GdgE2KGtv2xqIG/2BL5VVSurajXwVWCfVzmuFcC7gCXAS3The/9284eS3Er3MeTvpgvYk64YOJabq+qZqloJ/HggRC+rqofax5FfTDdaP2h/uj8glrfj3B94R3sM3pHk75IcDDz9SscgSXOJI+WSNHqfB24Fzh9oW00bGEkSYP2B254b+PmlgeWX+P/v2zVlPwUEOKmqrhm8Icm+wH+vpX951SNYg/ZHw9XA1Ul+BByW5CHgFGDPqlqV5AK6kfBJg8cy9Tgnj21NxzW1vxdW1ZKXHUiyC3AQcALwIeAj0z0uSeqDI+WSNGJV9QTddI/FA80P0432AhwKrDeDTR/RroKyPd1I8X3ANcAfJFkPIMnPJXnTq2znZuB9STZtU06OAr79Sndo88G3bD+vA+xMN93kZ+jC/1NJNgc+MIPjWpRku7bdDwPfnXL79cDhSTZr+39Lkre3E2rXqaqvA38J7DaDfUtSLxwpl6TZ8TngxIHlLwOXJ1lGFzLXNor9Su6jC8+bA8dX1Y+TnEc3xeXWNgK/EjjslTZSVY8lWQLcQDcKfVVVXf4q+94M+HKbtw6wDPhC68NtdPO7HwK+N4Pjugk4HXgvcCNw2ZT+3pPk43Rz39cBXqAbGf9f4PyBE0NfNpIuSXNVqqb+V1CSpH60aTanVNUhffdFkmaT01ckSZKknjlSLkmSJPXMkXJJkiSpZ4ZySZIkqWeGckmSJKlnhnJJkiSpZ4ZySZIkqWeGckmSJKln/wc+GHH1y36fegAAAABJRU5ErkJggg==\n", 322 | "text/plain": [ 323 | "" 324 | ] 325 | }, 326 | "metadata": {}, 327 | "output_type": "display_data" 328 | } 329 | ], 330 | "source": [ 331 | "%matplotlib inline\n", 332 | "import matplotlib.pyplot as plt\n", 333 | "\n", 334 | "errors_rff, errors_rnystrom, errors_nystrom = np.array(errors_rff), np.array(errors_rnystrom), np.array(errors_nystrom)\n", 335 | "\n", 336 | "\n", 337 | "fig, ax = plt.subplots(figsize=(12,8))\n", 338 | "\n", 339 | "ax.plot(Ds, errors_rff, label='RFF', linewidth=2, color='k')\n", 340 | "ax.plot(Ds, errors_nystrom, label='Nystrom', linewidth=2, color='r')\n", 341 | "ax.plot(Ds, errors_rnystrom, label='Randomized Nystrom', linewidth=2, color='g')\n", 342 | "ax.set_xlabel('Number of Samples')\n", 343 | "ax.set_ylabel('MSE')\n", 344 | "ax.set_yscale(\"log\")\n", 345 | "ax.set_title('Errors')\n", 346 | "ax.legend(fontsize=20)\n", 347 | "plt.show()" 348 | ] 349 | }, 350 | { 351 | "cell_type": "code", 352 | "execution_count": null, 353 | "metadata": {}, 354 | "outputs": [], 355 | "source": [] 356 | } 357 | ], 358 | "metadata": { 359 | "kernelspec": { 360 | "display_name": "Python 3", 361 | "language": "python", 362 | "name": "python3" 363 | }, 364 | "language_info": { 365 | "codemirror_mode": { 366 | "name": "ipython", 367 | "version": 3 368 | }, 369 | "file_extension": ".py", 370 | "mimetype": "text/x-python", 371 | "name": "python", 372 | "nbconvert_exporter": "python", 373 | "pygments_lexer": "ipython3", 374 | "version": "3.7.2" 375 | } 376 | }, 377 | "nbformat": 4, 378 | "nbformat_minor": 2 379 | } 380 | --------------------------------------------------------------------------------