├── .gitignore ├── LICENSE ├── README.md ├── demos ├── datasets.py ├── demo_mnist.ipynb ├── demo_regression_UCI.ipynb ├── demo_step_function.ipynb ├── priors.ipynb ├── run_regression.py └── using_natural_gradients.ipynb ├── doubly_stochastic_dgp ├── __init__.py ├── dgp.py ├── layer_initializations.py ├── layers.py ├── model_zoo.py └── utils.py ├── media └── DGP_presentation.pdf ├── setup.py └── tests ├── test_collapsed.py ├── test_dgp.py ├── test_utils.py └── test_zoo_models.py /.gitignore: -------------------------------------------------------------------------------- 1 | # mac hidden files 2 | .DS_Store 3 | 4 | # data files 5 | data/ 6 | 7 | # pycharm 8 | .idea/ 9 | 10 | 11 | # Byte-compiled / optimized / DLL files 12 | __pycache__/ 13 | *.py[cod] 14 | *$py.class 15 | 16 | # C extensions 17 | *.so 18 | 19 | # Distribution / packaging 20 | .Python 21 | env/ 22 | build/ 23 | develop-eggs/ 24 | dist/ 25 | downloads/ 26 | eggs/ 27 | .eggs/ 28 | lib/ 29 | lib64/ 30 | parts/ 31 | sdist/ 32 | var/ 33 | wheels/ 34 | *.egg-info/ 35 | .installed.cfg 36 | *.egg 37 | 38 | # PyInstaller 39 | # Usually these files are written by a python script from a template 40 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 41 | *.manifest 42 | *.spec 43 | 44 | # Installer logs 45 | pip-log.txt 46 | pip-delete-this-directory.txt 47 | 48 | # Unit test / coverage reports 49 | htmlcov/ 50 | .tox/ 51 | .coverage 52 | .coverage.* 53 | .cache 54 | nosetests.xml 55 | coverage.xml 56 | *.cover 57 | .hypothesis/ 58 | 59 | # Translations 60 | *.mo 61 | *.pot 62 | 63 | # Django stuff: 64 | *.log 65 | local_settings.py 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | 77 | # PyBuilder 78 | target/ 79 | 80 | # Jupyter Notebook 81 | .ipynb_checkpoints 82 | 83 | # pyenv 84 | .python-version 85 | 86 | # celery beat schedule file 87 | celerybeat-schedule 88 | 89 | # SageMath parsed files 90 | *.sage.py 91 | 92 | # dotenv 93 | .env 94 | 95 | # virtualenv 96 | .venv 97 | venv/ 98 | ENV/ 99 | 100 | # Spyder project settings 101 | .spyderproject 102 | .spyproject 103 | 104 | # Rope project settings 105 | .ropeproject 106 | 107 | # mkdocs documentation 108 | /site 109 | 110 | # mypy 111 | .mypy_cache/ 112 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Doubly-Stochastic-DGP 2 | Deep Gaussian Processes with Doubly Stochastic Variational Inference 3 | 4 | Requirements: gpflow1.1.1 and tensorflow1.8. NB not compatabile with more recent versions (e.g. gpflow1.2) 5 | 6 | This code accompanies the paper 7 | 8 | @inproceedings{salimbeni2017doubly, 9 | title={Doubly stochastic variational inference for deep gaussian processes}, 10 | author={Salimbeni, Hugh and Deisenroth, Marc}, 11 | booktitle={Advances in Neural Information Processing Systems}, 12 | year={2017} 13 | } 14 | 15 | See the arxiv version at https://arxiv.org/abs/1705.08933 16 | 17 | This code now offers additional functionality than in the above paper. In particular, natural gradients are now supported. If you use these, please consider citing the following paper: 18 | 19 | @inproceedings{salimbeni2018natural, 20 | title={Natural Gradients in Practice: Non-Conjugate Variational Inference in Gaussian Process Models}, 21 | author={Salimbeni, Hugh and Eleftheriadis, Stefanos and Hensman, James}, 22 | booktitle={Artificial Intelligence and Statistics}, 23 | year={2018} 24 | } 25 | -------------------------------------------------------------------------------- /demos/datasets.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Hugh Salimbeni 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import numpy as np 16 | import os 17 | import pandas 18 | 19 | from io import BytesIO 20 | from urllib.request import urlopen 21 | from zipfile import ZipFile 22 | 23 | import csv 24 | 25 | 26 | class Dataset(object): 27 | def __init__(self, name, N, D, type, data_path='/data/'): 28 | self.data_path = data_path 29 | self.name, self.N, self.D = name, N, D 30 | assert type in ['regression', 'classification', 'multiclass'] 31 | self.type = type 32 | 33 | def csv_file_path(self, name): 34 | return '{}{}.csv'.format(self.data_path, name) 35 | 36 | def read_data(self): 37 | data = pandas.read_csv(self.csv_file_path(self.name), 38 | header=None, delimiter=',').values 39 | return {'X':data[:, :-1], 'Y':data[:, -1, None]} 40 | 41 | def download_data(self): 42 | NotImplementedError 43 | 44 | def get_data(self, seed=0, split=0, prop=0.9): 45 | path = self.csv_file_path(self.name) 46 | if not os.path.isfile(path): 47 | self.download_data() 48 | 49 | full_data = self.read_data() 50 | split_data = self.split(full_data, seed, split, prop) 51 | split_data = self.normalize(split_data, 'X') 52 | 53 | if self.type is 'regression': 54 | split_data = self.normalize(split_data, 'Y') 55 | 56 | return split_data 57 | 58 | def split(self, full_data, seed, split, prop): 59 | ind = np.arange(self.N) 60 | 61 | np.random.seed(seed + split) 62 | np.random.shuffle(ind) 63 | 64 | n = int(self.N * prop) 65 | 66 | X = full_data['X'][ind[:n], :] 67 | Xs = full_data['X'][ind[n:], :] 68 | 69 | Y = full_data['Y'][ind[:n], :] 70 | Ys = full_data['Y'][ind[n:], :] 71 | 72 | return {'X': X, 'Xs': Xs, 'Y': Y, 'Ys': Ys} 73 | 74 | def normalize(self, split_data, X_or_Y): 75 | m = np.average(split_data[X_or_Y], 0)[None, :] 76 | s = np.std(split_data[X_or_Y + 's'], 0)[None, :] + 1e-6 77 | 78 | split_data[X_or_Y] = (split_data[X_or_Y] - m) / s 79 | split_data[X_or_Y + 's'] = (split_data[X_or_Y + 's'] - m) / s 80 | 81 | split_data.update({X_or_Y + '_mean': m.flatten()}) 82 | split_data.update({X_or_Y + '_std': s.flatten()}) 83 | return split_data 84 | 85 | 86 | datasets = [] 87 | uci_base = 'https://archive.ics.uci.edu/ml/machine-learning-databases/' 88 | 89 | 90 | class Boston(Dataset): 91 | def __init__(self): 92 | self.name, self.N, self.D = 'boston', 506, 12 93 | self.type = 'regression' 94 | 95 | def download_data(self): 96 | url = '{}{}'.format(uci_base, 'housing/housing.data') 97 | 98 | data = pandas.read_fwf(url, header=None).values 99 | with open(self.csv_file_path(self.name), 'w') as f: 100 | csv.writer(f).writerows(data) 101 | 102 | 103 | class Concrete(Dataset): 104 | def __init__(self): 105 | self.name, self.N, self.D = 'concrete', 1030, 8 106 | self.type = 'regression' 107 | 108 | def download_data(self): 109 | url = '{}{}'.format(uci_base, 'concrete/compressive/Concrete_Data.xls') 110 | 111 | data = pandas.read_excel(url).values 112 | with open(self.csv_file_path(self.name), 'w') as f: 113 | csv.writer(f).writerows(data) 114 | 115 | 116 | class Energy(Dataset): 117 | def __init__(self): 118 | self.name, self.N, self.D = 'energy', 768, 8 119 | self.type = 'regression' 120 | 121 | def download_data(self): 122 | url = '{}{}'.format(uci_base, '00242/ENB2012_data.xlsx') 123 | 124 | data = pandas.read_excel(url).values 125 | data = data[:, :-1] 126 | 127 | with open(self.csv_file_path(self.name), 'w') as f: 128 | csv.writer(f).writerows(data) 129 | 130 | 131 | class Kin8mn(Dataset): 132 | def __init__(self): 133 | self.name, self.N, self.D = 'kin8nm', 8192, 8 134 | self.type = 'regression' 135 | 136 | def download_data(self): 137 | 138 | url = 'http://mldata.org/repository/data/download/csv/uci-20070111-kin8nm' 139 | 140 | data = pandas.read_csv(url, header=None).values 141 | 142 | with open(self.csv_file_path(self.name), 'w') as f: 143 | csv.writer(f).writerows(data) 144 | 145 | 146 | class Naval(Dataset): 147 | def __init__(self): 148 | self.name, self.N, self.D = 'naval', 11934, 12 149 | self.type = 'regression' 150 | 151 | def download_data(self): 152 | 153 | url = '{}{}'.format(uci_base, '00316/UCI%20CBM%20Dataset.zip') 154 | 155 | with urlopen(url) as zipresp: 156 | with ZipFile(BytesIO(zipresp.read())) as zfile: 157 | zfile.extractall('/tmp/') 158 | 159 | data = pandas.read_fwf('/tmp/UCI CBM Dataset/data.txt', header=None).values 160 | data = data[:, :-1] 161 | 162 | with open(self.csv_file_path(self.name), 'w') as f: 163 | csv.writer(f).writerows(data) 164 | 165 | 166 | class Power(Dataset): 167 | def __init__(self): 168 | self.name, self.N, self.D = 'power', 9568, 4 169 | self.type = 'regression' 170 | 171 | def download_data(self): 172 | url = '{}{}'.format(uci_base, '00294/CCPP.zip') 173 | with urlopen(url) as zipresp: 174 | with ZipFile(BytesIO(zipresp.read())) as zfile: 175 | zfile.extractall('/tmp/') 176 | 177 | data = pandas.read_excel('/tmp/CCPP//Folds5x2_pp.xlsx').values 178 | 179 | with open(self.csv_file_path(self.name), 'w') as f: 180 | csv.writer(f).writerows(data) 181 | 182 | 183 | class Protein(Dataset): 184 | def __init__(self): 185 | self.name, self.N, self.D = 'protein', 45730, 9 186 | self.type = 'regression' 187 | 188 | def download_data(self): 189 | 190 | url = '{}{}'.format(uci_base, '00265/CASP.csv') 191 | 192 | data = pandas.read_csv(url).values 193 | 194 | data = np.concatenate([data[:, 1:], data[:, 0, None]], 1) 195 | 196 | with open(self.csv_file_path(self.name), 'w') as f: 197 | csv.writer(f).writerows(data) 198 | 199 | 200 | class WineRed(Dataset): 201 | def __init__(self): 202 | self.name, self.N, self.D = 'wine_red', 1599, 11 203 | self.type = 'regression' 204 | 205 | def download_data(self): 206 | 207 | url = '{}{}'.format(uci_base, 'wine-quality/winequality-red.csv') 208 | 209 | data = pandas.read_csv(url, delimiter=';').values 210 | 211 | with open(self.csv_file_path(self.name), 'w') as f: 212 | csv.writer(f).writerows(data) 213 | 214 | 215 | class WineWhite(Dataset): 216 | def __init__(self): 217 | self.name, self.N, self.D = 'wine_white', 4898, 12 218 | self.type = 'regression' 219 | 220 | def download_data(self): 221 | 222 | url = '{}{}'.format(uci_base, 'wine-quality/winequality-white.csv') 223 | 224 | data = pandas.read_csv(url, delimiter=';').values 225 | 226 | with open(self.csv_file_path(self.name), 'w') as f: 227 | csv.writer(f).writerows(data) 228 | 229 | 230 | class Datasets(object): 231 | def __init__(self, data_path='/data/'): 232 | if not os.path.isdir(data_path): 233 | os.mkdir(data_path) 234 | 235 | datasets = [] 236 | 237 | datasets.append(Boston()) 238 | datasets.append(Concrete()) 239 | datasets.append(Energy()) 240 | datasets.append(Kin8mn()) 241 | datasets.append(Naval()) 242 | datasets.append(Power()) 243 | datasets.append(Protein()) 244 | datasets.append(WineRed()) 245 | datasets.append(WineWhite()) 246 | 247 | self.all_datasets = {} 248 | for d in datasets: 249 | d.data_path = data_path 250 | self.all_datasets.update({d.name : d}) 251 | -------------------------------------------------------------------------------- /demos/demo_mnist.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# MNIST classification" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [ 15 | { 16 | "name": "stdout", 17 | "output_type": "stream", 18 | "text": [ 19 | "Extracting /data/MNIST_data/train-images-idx3-ubyte.gz\n", 20 | "Extracting /data/MNIST_data/train-labels-idx1-ubyte.gz\n", 21 | "Extracting /data/MNIST_data/t10k-images-idx3-ubyte.gz\n", 22 | "Extracting /data/MNIST_data/t10k-labels-idx1-ubyte.gz\n" 23 | ] 24 | } 25 | ], 26 | "source": [ 27 | "import matplotlib.pyplot as plt\n", 28 | "%matplotlib inline\n", 29 | "\n", 30 | "import numpy as np\n", 31 | "import tensorflow as tf\n", 32 | "\n", 33 | "from gpflow.likelihoods import MultiClass\n", 34 | "from gpflow.kernels import RBF, White\n", 35 | "from gpflow.models.svgp import SVGP\n", 36 | "from gpflow.training import AdamOptimizer\n", 37 | "\n", 38 | "from scipy.stats import mode\n", 39 | "from scipy.cluster.vq import kmeans2\n", 40 | "\n", 41 | "from doubly_stochastic_dgp.dgp import DGP\n", 42 | "\n", 43 | "import time\n", 44 | "\n", 45 | "def get_mnist_data(data_path='/data'):\n", 46 | " from tensorflow.examples.tutorials.mnist import input_data\n", 47 | " mnist = input_data.read_data_sets(data_path+'/MNIST_data/', one_hot=False)\n", 48 | "\n", 49 | " X, Y = mnist.train.next_batch(mnist.train.num_examples)\n", 50 | " Xval, Yval = mnist.validation.next_batch(mnist.validation.num_examples)\n", 51 | " Xtest, Ytest = mnist.test.next_batch(mnist.test.num_examples)\n", 52 | "\n", 53 | " Y, Yval, Ytest = [np.array(y, dtype=float)[:, None] for y in [Y, Yval, Ytest]]\n", 54 | "\n", 55 | " X = np.concatenate([X, Xval], 0)\n", 56 | " Y = np.concatenate([Y, Yval], 0)\n", 57 | " \n", 58 | " return X.astype(float), Y.astype(float), Xtest.astype(float), Ytest.astype(float)\n", 59 | "\n", 60 | "X, Y, Xs, Ys = get_mnist_data()\n" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "We'll use 100 inducing points " 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 2, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "M = 100\n", 77 | "Z = kmeans2(X, M, minit='points')[0]" 78 | ] 79 | }, 80 | { 81 | "cell_type": "markdown", 82 | "metadata": {}, 83 | "source": [ 84 | "We'll compare three models: an ordinary sparse GP and DGPs with 2 and 3 layers. \n", 85 | "\n", 86 | "We'll use a batch size of 1000 for all models " 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": 3, 92 | "metadata": {}, 93 | "outputs": [], 94 | "source": [ 95 | "m_sgp = SVGP(X, Y, RBF(784, lengthscales=2., variance=2.), MultiClass(10), \n", 96 | " Z=Z, num_latent=10, minibatch_size=1000, whiten=True)\n", 97 | "\n", 98 | "def make_dgp(L):\n", 99 | " kernels = [RBF(784, lengthscales=2., variance=2.)]\n", 100 | " for l in range(L-1):\n", 101 | " kernels.append(RBF(30, lengthscales=2., variance=2.))\n", 102 | " model = DGP(X, Y, Z, kernels, MultiClass(10), \n", 103 | " minibatch_size=1000,\n", 104 | " num_outputs=10)\n", 105 | " \n", 106 | " # start things deterministic \n", 107 | " for layer in model.layers[:-1]:\n", 108 | " layer.q_sqrt = layer.q_sqrt.value * 1e-5 \n", 109 | " \n", 110 | " return model\n", 111 | "\n", 112 | "m_dgp2 = make_dgp(2)\n", 113 | "m_dgp3 = make_dgp(3)" 114 | ] 115 | }, 116 | { 117 | "cell_type": "markdown", 118 | "metadata": {}, 119 | "source": [ 120 | "For the SGP model we'll calcuate accuracy by simply taking the max mean prediction:" 121 | ] 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": 4, 126 | "metadata": {}, 127 | "outputs": [], 128 | "source": [ 129 | "def assess_model_sgp(model, X_batch, Y_batch):\n", 130 | " m, v = model.predict_y(X_batch)\n", 131 | " l = model.predict_density(X_batch, Y_batch)\n", 132 | " a = (np.argmax(m, 1).reshape(Y_batch.shape).astype(int)==Y_batch.astype(int))\n", 133 | " return l, a" 134 | ] 135 | }, 136 | { 137 | "cell_type": "markdown", 138 | "metadata": {}, 139 | "source": [ 140 | "For the DGP models we have stochastic predictions. We need a single prediction for each datum, so to do this we take $S$ samples for the one-hot predictions ($(S, N, 10)$ matrices for mean and var), then we take the max over the class means (to give a $(S, N)$ matrix), and finally we take the modal class over the samples (to give a vector of length $N$):\n", 141 | "\n", 142 | "We'll use 100 samples" 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": 5, 148 | "metadata": {}, 149 | "outputs": [], 150 | "source": [ 151 | "S = 100\n", 152 | "def assess_model_dgp(model, X_batch, Y_batch):\n", 153 | " m, v = model.predict_y(X_batch, S)\n", 154 | " l = model.predict_density(X_batch, Y_batch, S)\n", 155 | " a = (mode(np.argmax(m, 2), 0)[0].reshape(Y_batch.shape).astype(int)==Y_batch.astype(int))\n", 156 | " return l, a" 157 | ] 158 | }, 159 | { 160 | "cell_type": "markdown", 161 | "metadata": {}, 162 | "source": [ 163 | "We need batch predictions (we might run out of memory otherwise)" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": 6, 169 | "metadata": {}, 170 | "outputs": [], 171 | "source": [ 172 | "def batch_assess(model, assess_model, X, Y):\n", 173 | " n_batches = max(int(len(X)/1000), 1)\n", 174 | " lik, acc = [], []\n", 175 | " for X_batch, Y_batch in zip(np.split(X, n_batches), np.split(Y, n_batches)):\n", 176 | " l, a = assess_model(model, X_batch, Y_batch)\n", 177 | " lik.append(l)\n", 178 | " acc.append(a)\n", 179 | " lik = np.concatenate(lik, 0)\n", 180 | " acc = np.array(np.concatenate(acc, 0), dtype=float)\n", 181 | " return np.average(lik), np.average(acc)" 182 | ] 183 | }, 184 | { 185 | "cell_type": "markdown", 186 | "metadata": {}, 187 | "source": [ 188 | "Now we're ready to go\n", 189 | "\n", 190 | "The sparse GP:" 191 | ] 192 | }, 193 | { 194 | "cell_type": "code", 195 | "execution_count": 7, 196 | "metadata": {}, 197 | "outputs": [], 198 | "source": [ 199 | "iterations = 20000" 200 | ] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": 8, 205 | "metadata": {}, 206 | "outputs": [ 207 | { 208 | "name": "stdout", 209 | "output_type": "stream", 210 | "text": [ 211 | "sgp test lik: -0.1092, test acc 0.9698\n" 212 | ] 213 | } 214 | ], 215 | "source": [ 216 | "AdamOptimizer(0.01).minimize(m_sgp, maxiter=iterations)\n", 217 | "l, a = batch_assess(m_sgp, assess_model_sgp, Xs, Ys)\n", 218 | "print('sgp test lik: {:.4f}, test acc {:.4f}'.format(l, a))" 219 | ] 220 | }, 221 | { 222 | "cell_type": "markdown", 223 | "metadata": {}, 224 | "source": [ 225 | "Using more inducing points improves things, but at the expense of very slow computation (500 inducing points takes about a day)\n", 226 | "\n", 227 | "The two layer DGP:" 228 | ] 229 | }, 230 | { 231 | "cell_type": "code", 232 | "execution_count": 9, 233 | "metadata": {}, 234 | "outputs": [ 235 | { 236 | "name": "stdout", 237 | "output_type": "stream", 238 | "text": [ 239 | "dgp2 test lik: -0.0731, test acc 0.9794\n" 240 | ] 241 | } 242 | ], 243 | "source": [ 244 | "AdamOptimizer(0.01).minimize(m_dgp2, maxiter=iterations)\n", 245 | "l, a = batch_assess(m_dgp2, assess_model_dgp, Xs, Ys)\n", 246 | "print('dgp2 test lik: {:.4f}, test acc {:.4f}'.format(l, a))" 247 | ] 248 | }, 249 | { 250 | "cell_type": "markdown", 251 | "metadata": {}, 252 | "source": [ 253 | "And the three layer:" 254 | ] 255 | }, 256 | { 257 | "cell_type": "code", 258 | "execution_count": 10, 259 | "metadata": {}, 260 | "outputs": [ 261 | { 262 | "name": "stdout", 263 | "output_type": "stream", 264 | "text": [ 265 | "dgp3 test lik: -0.0709, test acc 0.9799\n" 266 | ] 267 | } 268 | ], 269 | "source": [ 270 | "AdamOptimizer(0.01).minimize(m_dgp3, maxiter=iterations)\n", 271 | "l, a = batch_assess(m_dgp3, assess_model_dgp, Xs, Ys)\n", 272 | "print('dgp3 test lik: {:.4f}, test acc {:.4f}'.format(l, a))" 273 | ] 274 | }, 275 | { 276 | "cell_type": "markdown", 277 | "metadata": {}, 278 | "source": [ 279 | "Using the deeper models we see a small improvement in accuracy, and a larger improvement in test log likelihood " 280 | ] 281 | } 282 | ], 283 | "metadata": { 284 | "kernelspec": { 285 | "display_name": "Python 3", 286 | "language": "python", 287 | "name": "python3" 288 | }, 289 | "language_info": { 290 | "codemirror_mode": { 291 | "name": "ipython", 292 | "version": 3 293 | }, 294 | "file_extension": ".py", 295 | "mimetype": "text/x-python", 296 | "name": "python", 297 | "nbconvert_exporter": "python", 298 | "pygments_lexer": "ipython3", 299 | "version": "3.5.4" 300 | } 301 | }, 302 | "nbformat": 4, 303 | "nbformat_minor": 1 304 | } 305 | -------------------------------------------------------------------------------- /demos/demo_regression_UCI.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "data": { 10 | "application/javascript": [ 11 | "IPython.OutputArea.auto_scroll_threshold = 9999;" 12 | ], 13 | "text/plain": [ 14 | "" 15 | ] 16 | }, 17 | "metadata": {}, 18 | "output_type": "display_data" 19 | } 20 | ], 21 | "source": [ 22 | "%%javascript\n", 23 | "IPython.OutputArea.auto_scroll_threshold = 9999;" 24 | ] 25 | }, 26 | { 27 | "cell_type": "markdown", 28 | "metadata": {}, 29 | "source": [ 30 | "# DGP for regression\n", 31 | "\n", 32 | "Here we'll show the DGP for regression, using small to medium data sets. " 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": 2, 38 | "metadata": {}, 39 | "outputs": [ 40 | { 41 | "name": "stderr", 42 | "output_type": "stream", 43 | "text": [ 44 | "/Users/hughsalimbeni/anaconda3/envs/prowler_env/lib/python3.5/importlib/_bootstrap.py:222: RuntimeWarning: compiletime version 3.6 of module 'tensorflow.python.framework.fast_tensor_util' does not match runtime version 3.5\n", 45 | " return f(*args, **kwds)\n", 46 | "/Users/hughsalimbeni/anaconda3/envs/prowler_env/lib/python3.5/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", 47 | " from ._conv import register_converters as _register_converters\n", 48 | "/Users/hughsalimbeni/anaconda3/envs/prowler_env/lib/python3.5/site-packages/multipledispatch/dispatcher.py:24: AmbiguityWarning: \n", 49 | "Ambiguities exist in dispatched function _expectation\n", 50 | "\n", 51 | "The following signatures may result in ambiguous behavior:\n", 52 | "\t[Gaussian, Identity, NoneType, Kernel, InducingPoints], [Gaussian, Linear, NoneType, Sum, InducingPoints]\n", 53 | "\n", 54 | "\n", 55 | "Consider making the following additions:\n", 56 | "\n", 57 | "@dispatch(Gaussian, Identity, NoneType, Sum, InducingPoints)\n", 58 | "def _expectation(...)\n", 59 | " warn(warning_text(dispatcher.name, ambiguities), AmbiguityWarning)\n" 60 | ] 61 | } 62 | ], 63 | "source": [ 64 | "import numpy as np\n", 65 | "import tensorflow as tf\n", 66 | "tf.logging.set_verbosity(0)\n", 67 | "\n", 68 | "import time\n", 69 | "\n", 70 | "import matplotlib.pyplot as plt\n", 71 | "%matplotlib inline \n", 72 | "\n", 73 | "from gpflow.likelihoods import Gaussian\n", 74 | "from gpflow.kernels import RBF, White\n", 75 | "from gpflow.mean_functions import Constant\n", 76 | "from gpflow.models.sgpr import SGPR, GPRFITC\n", 77 | "from gpflow.models.svgp import SVGP\n", 78 | "from gpflow.models.gpr import GPR\n", 79 | "from gpflow.training import AdamOptimizer, ScipyOptimizer, NatGradOptimizer\n", 80 | "from gpflow.actions import Action, Loop\n", 81 | "\n", 82 | "from scipy.cluster.vq import kmeans2\n", 83 | "from scipy.stats import norm\n", 84 | "from scipy.special import logsumexp\n", 85 | "\n", 86 | "from doubly_stochastic_dgp.dgp import DGP\n", 87 | "from datasets import Datasets\n", 88 | "datasets = Datasets(data_path='/data/')" 89 | ] 90 | }, 91 | { 92 | "cell_type": "markdown", 93 | "metadata": {}, 94 | "source": [ 95 | "Let's use the kin8nm data set" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 3, 101 | "metadata": {}, 102 | "outputs": [ 103 | { 104 | "name": "stdout", 105 | "output_type": "stream", 106 | "text": [ 107 | "N: 7372, D: 8, Ns: 820\n" 108 | ] 109 | } 110 | ], 111 | "source": [ 112 | "data = datasets.all_datasets['kin8nm'].get_data()\n", 113 | "X, Y, Xs, Ys, Y_std = [data[_] for _ in ['X', 'Y', 'Xs', 'Ys', 'Y_std']]\n", 114 | "print('N: {}, D: {}, Ns: {}'.format(X.shape[0], X.shape[1], Xs.shape[0]))" 115 | ] 116 | }, 117 | { 118 | "cell_type": "markdown", 119 | "metadata": {}, 120 | "source": [ 121 | "## Single layer models\n", 122 | "\n", 123 | "Our baseline model is a sparse GP, but since the dataset is small we can also train without minibatches so we'll also compare to a collapsed sparse GP (with analytically optimal $q(\\mathbf u)$) which is known as SGPR in GPflow terminology, and we'll also cpmpare to FITC" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": 4, 129 | "metadata": { 130 | "collapsed": true 131 | }, 132 | "outputs": [], 133 | "source": [ 134 | "def make_single_layer_models(X, Y, Z):\n", 135 | " D = X.shape[1]\n", 136 | " m_sgpr = SGPR(X, Y, RBF(D), Z.copy())\n", 137 | " m_svgp = SVGP(X, Y, RBF(D), Gaussian(), Z.copy())\n", 138 | " m_fitc = GPRFITC(X, Y, RBF(D), Z.copy())\n", 139 | " for m in m_sgpr, m_svgp, m_fitc:\n", 140 | " m.likelihood.variance = 0.01\n", 141 | " return [m_sgpr, m_svgp, m_fitc], ['{} {}'.format(n, len(Z)) for n in ['SGPR', 'SVGP', 'FITC']]\n", 142 | "\n", 143 | "Z_100 = kmeans2(X, 100, minit='points')[0]\n", 144 | "models_single_layer, names_single_layer = make_single_layer_models(X, Y, Z_100)\n" 145 | ] 146 | }, 147 | { 148 | "cell_type": "markdown", 149 | "metadata": {}, 150 | "source": [ 151 | "## DGP models\n", 152 | "\n", 153 | "We'll include a DGP with a single layer here for comparision. We've used a largish minibatch size of $\\text{min}(1000, N)$, but it works fine for smaller batches too\n", 154 | "\n", 155 | "In the paper we used 1 sample. Here we'll go up to 5 in celebration of the new implementation (which is much more efficient)" 156 | ] 157 | }, 158 | { 159 | "cell_type": "code", 160 | "execution_count": 5, 161 | "metadata": {}, 162 | "outputs": [], 163 | "source": [ 164 | "def make_dgp_models(X, Y, Z):\n", 165 | " models, names = [], []\n", 166 | " for L in range(1, 4):\n", 167 | " D = X.shape[1]\n", 168 | "\n", 169 | " # the layer shapes are defined by the kernel dims, so here all hidden layers are D dimensional \n", 170 | " kernels = []\n", 171 | " for l in range(L):\n", 172 | " kernels.append(RBF(D))\n", 173 | "\n", 174 | " # between layer noise (doesn't actually make much difference but we include it anyway)\n", 175 | " for kernel in kernels[:-1]:\n", 176 | " kernel += White(D, variance=1e-5) \n", 177 | "\n", 178 | " mb = 1000 if X.shape[0] > 1000 else None \n", 179 | " model = DGP(X, Y, Z, kernels, Gaussian(), num_samples=5, minibatch_size=mb)\n", 180 | "\n", 181 | " # start the inner layers almost deterministically \n", 182 | " for layer in model.layers[:-1]:\n", 183 | " layer.q_sqrt = layer.q_sqrt.value * 1e-5\n", 184 | "\n", 185 | " models.append(model)\n", 186 | " names.append('DGP{} {}'.format(L, len(Z)))\n", 187 | " \n", 188 | " return models, names\n", 189 | "\n", 190 | "models_dgp, names_dgp = make_dgp_models(X, Y, Z_100)\n" 191 | ] 192 | }, 193 | { 194 | "cell_type": "markdown", 195 | "metadata": {}, 196 | "source": [ 197 | "## Prediction\n", 198 | "\n", 199 | "We'll calculate test rmse and likelihood in batches (so the larger datasets don't cause memory problems)\n", 200 | "\n", 201 | "For the DGP models we need to take an average over the samples for the rmse. The `predict_density` function already does this internally\n" 202 | ] 203 | }, 204 | { 205 | "cell_type": "code", 206 | "execution_count": 6, 207 | "metadata": { 208 | "collapsed": true 209 | }, 210 | "outputs": [], 211 | "source": [ 212 | "def batch_assess(model, assess_model, X, Y):\n", 213 | " n_batches = max(int(X.shape[0]/1000.), 1)\n", 214 | " lik, sq_diff = [], []\n", 215 | " for X_batch, Y_batch in zip(np.array_split(X, n_batches), np.array_split(Y, n_batches)):\n", 216 | " l, sq = assess_model(model, X_batch, Y_batch)\n", 217 | " lik.append(l)\n", 218 | " sq_diff.append(sq)\n", 219 | " lik = np.concatenate(lik, 0)\n", 220 | " sq_diff = np.array(np.concatenate(sq_diff, 0), dtype=float)\n", 221 | " return np.average(lik), np.average(sq_diff)**0.5\n", 222 | "\n", 223 | "def assess_single_layer(model, X_batch, Y_batch):\n", 224 | " m, v = model.predict_y(X_batch)\n", 225 | " lik = np.sum(norm.logpdf(Y_batch*Y_std, loc=m*Y_std, scale=Y_std*v**0.5), 1)\n", 226 | " sq_diff = Y_std**2*((m - Y_batch)**2)\n", 227 | " return lik, sq_diff \n", 228 | "\n", 229 | "S = 100\n", 230 | "def assess_sampled(model, X_batch, Y_batch):\n", 231 | " m, v = model.predict_y(X_batch, S)\n", 232 | " S_lik = np.sum(norm.logpdf(Y_batch*Y_std, loc=m*Y_std, scale=Y_std*v**0.5), 2)\n", 233 | " lik = logsumexp(S_lik, 0, b=1/float(S))\n", 234 | " \n", 235 | " mean = np.average(m, 0)\n", 236 | " sq_diff = Y_std**2*((mean - Y_batch)**2)\n", 237 | " return lik, sq_diff" 238 | ] 239 | }, 240 | { 241 | "cell_type": "markdown", 242 | "metadata": {}, 243 | "source": [ 244 | "## Training \n", 245 | "\n", 246 | "We'll optimize single layer models and using LFBGS and the dgp models with Adam. It will be interesting to compare the result of `m_svgp` compared to `m_dgp1`: if there is a difference it will be down to the optimizer. \n", 247 | "\n", 248 | "We'll show here also the reuslt of using a small and large number of iterations." 249 | ] 250 | }, 251 | { 252 | "cell_type": "code", 253 | "execution_count": 7, 254 | "metadata": { 255 | "collapsed": true 256 | }, 257 | "outputs": [], 258 | "source": [ 259 | "iterations_few = 100\n", 260 | "iterations_many = 5000\n", 261 | "s = '{:<16} lik: {:.4f}, rmse: {:.4f}'" 262 | ] 263 | }, 264 | { 265 | "cell_type": "code", 266 | "execution_count": 8, 267 | "metadata": {}, 268 | "outputs": [ 269 | { 270 | "name": "stdout", 271 | "output_type": "stream", 272 | "text": [ 273 | "after 100 iterations\n", 274 | "SGPR 100 lik: 0.9481, rmse: 0.0895\n", 275 | "SVGP 100 lik: 0.7620, rmse: 0.1090\n", 276 | "FITC 100 lik: 1.0514, rmse: 0.0910\n", 277 | "after 5000 iterations\n", 278 | "SGPR 100 lik: 0.9758, rmse: 0.0864\n", 279 | "SVGP 100 lik: 0.9736, rmse: 0.0868\n", 280 | "FITC 100 lik: 1.1284, rmse: 0.0828\n" 281 | ] 282 | } 283 | ], 284 | "source": [ 285 | "for iterations in [iterations_few, iterations_many]:\n", 286 | " print('after {} iterations'.format(iterations))\n", 287 | " for m, name in zip(models_single_layer, names_single_layer):\n", 288 | " ScipyOptimizer().minimize(m, maxiter=iterations)\n", 289 | " lik, rmse = batch_assess(m, assess_single_layer, Xs, Ys)\n", 290 | " print(s.format(name, lik, rmse))" 291 | ] 292 | }, 293 | { 294 | "cell_type": "markdown", 295 | "metadata": {}, 296 | "source": [ 297 | "Now for the DGP models. First we use Adam for all parameters (as in the Doubly Stochastic VI for DGPs paper)" 298 | ] 299 | }, 300 | { 301 | "cell_type": "code", 302 | "execution_count": 9, 303 | "metadata": {}, 304 | "outputs": [ 305 | { 306 | "name": "stdout", 307 | "output_type": "stream", 308 | "text": [ 309 | "after 100 iterations\n", 310 | "DGP1 100 lik: 0.2778, rmse: 0.1139\n", 311 | "DGP2 100 lik: 0.2394, rmse: 0.1170\n", 312 | "DGP3 100 lik: 0.2165, rmse: 0.1289\n", 313 | "after 5000 iterations\n", 314 | "DGP1 100 lik: 0.9434, rmse: 0.0896\n", 315 | "DGP2 100 lik: 1.2913, rmse: 0.0661\n", 316 | "DGP3 100 lik: 1.3039, rmse: 0.0655\n" 317 | ] 318 | } 319 | ], 320 | "source": [ 321 | "for iterations in [iterations_few, iterations_many]:\n", 322 | " print('after {} iterations'.format(iterations))\n", 323 | " for m, name in zip(models_dgp, names_dgp):\n", 324 | " AdamOptimizer(0.01).minimize(m, maxiter=iterations)\n", 325 | " lik, rmse = batch_assess(m, assess_sampled, Xs, Ys)\n", 326 | " print(s.format(name, lik, rmse))" 327 | ] 328 | }, 329 | { 330 | "cell_type": "markdown", 331 | "metadata": {}, 332 | "source": [ 333 | "We can also use natural gradients for the final layer, which can help considerably. " 334 | ] 335 | }, 336 | { 337 | "cell_type": "code", 338 | "execution_count": 10, 339 | "metadata": {}, 340 | "outputs": [ 341 | { 342 | "name": "stdout", 343 | "output_type": "stream", 344 | "text": [ 345 | "after 100 iterations\n", 346 | "DGP1 100 lik: 0.9487, rmse: 0.0891\n", 347 | "DGP2 100 lik: 1.2837, rmse: 0.0668\n", 348 | "DGP3 100 lik: 1.2958, rmse: 0.0661\n", 349 | "after 5000 iterations\n", 350 | "DGP1 100 lik: 0.9549, rmse: 0.0890\n", 351 | "DGP2 100 lik: 1.2915, rmse: 0.0664\n", 352 | "DGP3 100 lik: 1.3147, rmse: 0.0650\n" 353 | ] 354 | } 355 | ], 356 | "source": [ 357 | "for iterations in [iterations_few, iterations_many]:\n", 358 | " print('after {} iterations'.format(iterations))\n", 359 | " for m, name in zip(models_dgp, names_dgp):\n", 360 | " ng_vars = [[m.layers[-1].q_mu, m.layers[-1].q_sqrt]]\n", 361 | " for v in ng_vars[0]:\n", 362 | " v.set_trainable(False) \n", 363 | " ng_action = NatGradOptimizer(gamma=0.1).make_optimize_action(m, var_list=ng_vars)\n", 364 | " adam_action = AdamOptimizer(0.01).make_optimize_action(m)\n", 365 | "\n", 366 | " Loop([ng_action, adam_action], stop=iterations)()\n", 367 | "\n", 368 | " lik, rmse = batch_assess(m, assess_sampled, Xs, Ys)\n", 369 | " print(s.format(name, lik, rmse))" 370 | ] 371 | }, 372 | { 373 | "cell_type": "markdown", 374 | "metadata": {}, 375 | "source": [ 376 | "Note that even after 100 iterations we get a good result, which is not the case using ordinary gradients." 377 | ] 378 | } 379 | ], 380 | "metadata": { 381 | "kernelspec": { 382 | "display_name": "Python 3", 383 | "language": "python", 384 | "name": "python3" 385 | }, 386 | "language_info": { 387 | "codemirror_mode": { 388 | "name": "ipython", 389 | "version": 3 390 | }, 391 | "file_extension": ".py", 392 | "mimetype": "text/x-python", 393 | "name": "python", 394 | "nbconvert_exporter": "python", 395 | "pygments_lexer": "ipython3", 396 | "version": "3.5.3" 397 | } 398 | }, 399 | "nbformat": 4, 400 | "nbformat_minor": 1 401 | } 402 | -------------------------------------------------------------------------------- /demos/run_regression.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Hugh Salimbeni 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import sys, os 16 | import numpy as np 17 | import tensorflow as tf 18 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' 19 | 20 | import socket 21 | 22 | import itertools 23 | 24 | from gpflow.likelihoods import Gaussian 25 | from gpflow.kernels import RBF, White 26 | from gpflow.training import AdamOptimizer 27 | 28 | import gpflow_monitor 29 | 30 | from scipy.cluster.vq import kmeans2 31 | from scipy.stats import norm 32 | from scipy.special import logsumexp 33 | 34 | from doubly_stochastic_dgp.dgp import DGP 35 | from datasets import Datasets 36 | datasets = Datasets() 37 | 38 | results_path = '/vol/bitbucket/hrs13/tmp_results_{}/'.format(socket.gethostname()) 39 | 40 | dataset_name = str(sys.argv[1]) 41 | L = int(sys.argv[2]) 42 | split = int(sys.argv[3]) 43 | 44 | 45 | iterations = 10000 46 | log_every = 100 47 | minibatch_size = 10000 48 | 49 | 50 | 51 | data = datasets.all_datasets[dataset_name].get_data(split=split) 52 | X, Y, Xs, Ys, Y_std = [data[_] for _ in ['X', 'Y', 'Xs', 'Ys', 'Y_std']] 53 | 54 | print('############################ {} L={} split={}'.format(dataset_name, L, split)) 55 | print('N: {}, D: {}, Ns: {}'.format(X.shape[0], X.shape[1], Xs.shape[0])) 56 | 57 | Z = kmeans2(X, 100, minit='points')[0] 58 | 59 | D = X.shape[1] 60 | 61 | kernels = [] 62 | for l in range(L): 63 | kernels.append(RBF(D)) 64 | 65 | for kernel in kernels[:-1]: 66 | kernel += White(D, variance=2e-6) 67 | 68 | mb = minibatch_size if X.shape[0] > minibatch_size else None 69 | model = DGP(X, Y, Z, kernels, Gaussian(), num_samples=1, minibatch_size=mb) 70 | 71 | # start the inner layers almost deterministically 72 | for layer in model.layers[:-1]: 73 | layer.q_sqrt = layer.q_sqrt.value * 1e-5 74 | model.likelihood.variance = 0.05 75 | 76 | global_step = tf.Variable(0, trainable=False, name="global_step") 77 | model.enquire_session().run(global_step.initializer) 78 | 79 | s = "{}/{}_L{}_split{}".format(results_path, dataset_name, L, split) 80 | fw = tf.summary.FileWriter(os.path.join(s.format(dataset_name, L)), 81 | model.enquire_session().graph) 82 | 83 | opt_method = gpflow_monitor.ManagedOptimisation(model, AdamOptimizer(0.01), global_step) 84 | 85 | its_to_print = (x * log_every for x in itertools.count()) 86 | 87 | opt_method.tasks += [ 88 | gpflow_monitor.PrintTimings(its_to_print, gpflow_monitor.Trigger.ITER), 89 | gpflow_monitor.ModelTensorBoard(its_to_print, gpflow_monitor.Trigger.ITER, 90 | model, fw), 91 | gpflow_monitor.LmlTensorBoard(its_to_print, gpflow_monitor.Trigger.ITER, 92 | model, fw, verbose=False), 93 | gpflow_monitor.StoreSession(its_to_print, gpflow_monitor.Trigger.ITER, 94 | model.enquire_session(), (s+'/checkpoints').format(dataset_name, L)) 95 | ] 96 | 97 | class TestTensorBoard(gpflow_monitor.ModelTensorBoard): 98 | def __init__(self, sequence, trigger: gpflow_monitor.Trigger, model, file_writer, Xs, Ys): 99 | super().__init__(sequence, trigger, model, file_writer) 100 | self.Xs = Xs 101 | self.Ys = Ys 102 | self._full_test_err = tf.placeholder(tf.float64, shape=()) 103 | self._full_test_nlpp = tf.placeholder(tf.float64, shape=()) 104 | 105 | self.summary = tf.summary.merge([tf.summary.scalar("test_rmse", self._full_test_err), 106 | tf.summary.scalar("test_nlpp", self._full_test_nlpp)]) 107 | 108 | def _event_handler(self, manager): 109 | minibatch_size = 1000 110 | S = 100 111 | means, vars = [], [] 112 | for mb in range(-(-len(Xs) // minibatch_size)): 113 | m, v = model.predict_y(Xs[mb * minibatch_size:(mb + 1) * minibatch_size, :], S) 114 | means.append(m) 115 | vars.append(v) 116 | mean_SND = np.concatenate(means, 1) 117 | var_SDN = np.concatenate(vars, 1) 118 | 119 | mean_ND = np.average(mean_SND, 0) 120 | 121 | test_err = np.average(Y_std * np.mean((Ys - mean_ND) ** 2.0) ** 0.5) 122 | test_nll_ND = logsumexp(norm.logpdf(Ys * Y_std, mean_SND * Y_std, var_SDN ** 0.5 * Y_std), 0, b=1 / float(S)) 123 | test_nll = np.average(test_nll_ND) 124 | 125 | summary, step = model.enquire_session().run([self.summary, global_step], 126 | feed_dict={self._full_test_err: test_err, 127 | self._full_test_nlpp: test_nll}) 128 | self.file_writer.add_summary(summary, step) 129 | 130 | 131 | 132 | opt_method.tasks.append(TestTensorBoard(its_to_print, gpflow_monitor.Trigger.ITER, 133 | model, fw, Xs, Ys)) 134 | 135 | 136 | 137 | 138 | opt_method.minimize(maxiter=iterations) -------------------------------------------------------------------------------- /demos/using_natural_gradients.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Natural gradients can help the DGP model considerably\n", 8 | "\n", 9 | "This is epecially true with a Gaussian likelihood. Here we show an example of 1D regression (with thanks to Loïc Brevault for the test function)\n" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 1, 15 | "metadata": {}, 16 | "outputs": [ 17 | { 18 | "name": "stderr", 19 | "output_type": "stream", 20 | "text": [ 21 | "/Users/hughsalimbeni/anaconda3/envs/prowler_env/lib/python3.5/importlib/_bootstrap.py:222: RuntimeWarning: compiletime version 3.6 of module 'tensorflow.python.framework.fast_tensor_util' does not match runtime version 3.5\n", 22 | " return f(*args, **kwds)\n", 23 | "/Users/hughsalimbeni/anaconda3/envs/prowler_env/lib/python3.5/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", 24 | " from ._conv import register_converters as _register_converters\n", 25 | "/Users/hughsalimbeni/anaconda3/envs/prowler_env/lib/python3.5/site-packages/multipledispatch/dispatcher.py:24: AmbiguityWarning: \n", 26 | "Ambiguities exist in dispatched function _expectation\n", 27 | "\n", 28 | "The following signatures may result in ambiguous behavior:\n", 29 | "\t[Gaussian, Linear, NoneType, Sum, InducingPoints], [Gaussian, Identity, NoneType, Kernel, InducingPoints]\n", 30 | "\n", 31 | "\n", 32 | "Consider making the following additions:\n", 33 | "\n", 34 | "@dispatch(Gaussian, Identity, NoneType, Sum, InducingPoints)\n", 35 | "def _expectation(...)\n", 36 | " warn(warning_text(dispatcher.name, ambiguities), AmbiguityWarning)\n" 37 | ] 38 | } 39 | ], 40 | "source": [ 41 | "import numpy as np\n", 42 | "import tensorflow as tf\n", 43 | "import matplotlib.pyplot as plt\n", 44 | "\n", 45 | "from gpflow import autoflow, params_as_tensors\n", 46 | "from gpflow import settings\n", 47 | "from gpflow.mean_functions import Zero, Linear\n", 48 | "from gpflow.likelihoods import Gaussian as Gaussian_lik\n", 49 | "from gpflow.kernels import RBF, White\n", 50 | "from gpflow.training import AdamOptimizer, NatGradOptimizer\n", 51 | "from gpflow.actions import Action, Loop\n", 52 | "\n", 53 | "from doubly_stochastic_dgp.dgp import DGP\n" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": 2, 59 | "metadata": {}, 60 | "outputs": [], 61 | "source": [ 62 | "f = lambda X: -(np.multiply(np.sin(40*np.power((X-0.85),4)), np.cos(2.5*(X-0.95)))+(X-0.9)/2+1)/2" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": 3, 68 | "metadata": {}, 69 | "outputs": [ 70 | { 71 | "data": { 72 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAD8CAYAAACfF6SlAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAFJtJREFUeJzt3X+sX3V9x/Hn21L0Lv4o2K60hVrMagMTQ/UbhnE4N8rK\nzGIb55hmzrrAGjXuj202a0OyLfqHdZ3bssREOzQrJpso6UoT2SoUHfshxMuKFFhqkYFwW2hFSma4\nSqnv/fE9xdvr9/Z7vvece78/zvORND3nfD/9fj7ntn19z/fz+ZzzicxEktQsL+t3AyRJ88/wl6QG\nMvwlqYEMf0lqIMNfkhrI8JekBjL8JamBDH9JaiDDX5Ia6Jx+N2AmixcvzlWrVvW7GZI0VO67777v\nZ+aSbuUGNvxXrVrF+Ph4v5shSUMlIh4vU85uH0lqIMNfkhrI8JekBjL8JamBDH9JaiDDX5IayPCX\npAYy/CWpgQx/SWqggb3Dt6o9BybYse8QR05MsnzRGFvWr2Hj2hX9bpYkDYSRDP89BybYtvsgkydP\nATBxYpJtuw8C+AEgSYxot8+OfYdeCv7TJk+eYse+Q31qkSQNlpEM/yMnJns6LklNM5Lhv3zRWE/H\nJalpRjL8t6xfw9jCBWccG1u4gC3r1/SpRZI0WEZywPf0oK6zfSSps5EMf2h/ABj2ktTZSHb7SJLO\nzvCXpAYy/CWpgQx/SWogw1+SGsjwl6QGqiX8I+LaiDgUEY9ExNYOr788Im4pXr83IlbVUa8kaXYq\nh39ELAA+A/wGcCnwvoi4dFqx64FnM/MXgL8BPlW1XknS7NVx5X8F8EhmPpqZLwBfAjZMK7MB2FVs\n3wpcHRFRQ92SpFmoI/xXAE9M2X+yONaxTGa+CDwHvLaGuiVJszBQA74RsTkixiNi/Pjx4/1ujiSN\nrDrCfwK4aMr+hcWxjmUi4hzgNcAz098oM3dmZiszW0uWLKmhaZKkTuoI/28BqyPi4og4F3gvsHda\nmb3ApmL7PcBdmZk11C1JmoXKT/XMzBcj4qPAPmAB8IXMfCgiPg6MZ+Ze4PPAFyPiEeAHtD8gJEl9\nUssjnTPzduD2acf+bMr2j4DfrqMuSVJ1AzXgK0maH4a/JDWQ4S9JDWT4S1IDGf6S1ECGvyQ1kOEv\nSQ1k+EtSAxn+ktRAhr8kNZDhL0kNZPhLUgMZ/pLUQIa/JDVQLY90boo9BybYse8QR05MsnzRGFvW\nr2Hj2unLFUvS4DP8S9pzYIJtuw8yefIUABMnJtm2+yCAHwCSho7dPiXt2HfopeA/bfLkKXbsO9Sn\nFknS7Bn+JR05MdnTcUkaZIZ/ScsXjfV0XJIGmeFf0pb1axhbuOCMY2MLF7Bl/Zo+tUiSZs8B35JO\nD+o620fSKKgU/hFxPnALsAp4DLguM5/tUO5fgSuB/8jM36xSZz9tXLvCsJc0Eqp2+2wF9mfmamB/\nsd/JDuD3KtYlSapJ1fDfAOwqtncBGzsVysz9wP9VrEuSVJOq4b80M48W208BSyu+nyRpHnTt84+I\nO4ELOrx049SdzMyIyCqNiYjNwGaAlStXVnkrSdJZdA3/zFw302sR8XRELMvMoxGxDDhWpTGZuRPY\nCdBqtSp9kEiSZla122cvsKnY3gTcVvH9JEnzoGr4bweuiYjDwLpin4hoRcRNpwtFxL8DXwGujogn\nI2J9xXolSRVUmuefmc8AV3c4Pg7cMGX/qir1SJLq5eMdJKmBDH9JaiDDX5IayPCXpAYy/CWpgQx/\nSWogw1+SGsjwl6QGMvwlqYEMf0lqIMNfkhrIBdyHxJ4DEy4eL6k2hv8Q2HNggm27DzJ58hQAEycm\n2bb7IIAfAJJmxW6fIbBj36GXgv+0yZOn2LHvUJ9aJGnYGf5D4MiJyZ6OS1I3hv8QWL5orKfjktSN\n4T8Etqxfw9jCBWccG1u4gC3r1/SpRZKGnQO+Q+D0oK6zfSTVxfAfEhvXrjDsJdXGbh9JaiDDX5Ia\nqFL4R8T5EXFHRBwufj+vQ5nLI+KbEfFQRDwQEb9TpU5JUnVVr/y3AvszczWwv9if7nngA5n5i8C1\nwN9GxKKK9UqSKqga/huAXcX2LmDj9AKZ+Z3MPFxsHwGOAUsq1itJqqBq+C/NzKPF9lPA0rMVjogr\ngHOB71asV5JUQdepnhFxJ3BBh5dunLqTmRkReZb3WQZ8EdiUmT+ZocxmYDPAypUruzVNkjRLXcM/\nM9fN9FpEPB0RyzLzaBHux2Yo92rgq8CNmXnPWeraCewEaLVaM36QSJKqqdrtsxfYVGxvAm6bXiAi\nzgX+Gbg5M2+tWJ8kqQZVw387cE1EHAbWFftERCsibirKXAe8HfhgRNxf/Lq8Yr2SpAoiczB7V1qt\nVo6Pj/e7GZI0VCLivsxsdSvnHb6S1ECGvyQ1kOEvSQ1k+EtSAxn+ktRAhr8kNZDhL0kN5DKOI2bP\ngQnX+pXUleE/QvYcmGDb7oNMnjwFwMSJSbbtPgjgB4CkM9jtM0J27Dv0UvCfNnnyFDv2HepTiyQN\nKsN/hBw5MdnTcUnNZfiPkOWLxno6Lqm5DP8RsmX9GsYWLjjj2NjCBWxZv6ZPLZI0qBzwHSGnB3Wd\n7SOpG8N/xGxcu8Kwl9SV3T6S1ECGvyQ1kOEvSQ1k+EtSAxn+ktRAhr8kNVCl8I+I8yPijog4XPx+\nXocyr4uI/46I+yPioYj4UJU6JUnVVb3y3wrsz8zVwP5if7qjwFsz83Lgl4CtEbG8Yr2SpAqqhv8G\nYFexvQvYOL1AZr6QmT8udl9eQ52SpIqqBvHSzDxabD8FLO1UKCIuiogHgCeAT2XmkYr1SpIq6Pp4\nh4i4E7igw0s3Tt3JzIyI7PQemfkE8Kaiu2dPRNyamU93qGszsBlg5cqVJZovSZqNruGfmetmei0i\nno6IZZl5NCKWAce6vNeRiHgQuAq4tcPrO4GdAK1Wq+MHiSSpuqrdPnuBTcX2JuC26QUi4sKIGCu2\nzwN+GXBpKUnqo6rhvx24JiIOA+uKfSKiFRE3FWUuAe6NiG8D/wb8VWYerFivJKmCSo90zsxngKs7\nHB8Hbii27wDeVKUeSVK9nHYpSQ1k+EtSAxn+ktRALuM4B/YcmHAdXUkDzfCv2Z4DE2zbfZDJk6cA\nmDgxybbd7clNfgBIGhR2+9Rsx75DLwX/aZMnT7Fjn7c2SBochn/NjpyY7Om4JPWD4V+z5YvGejou\nSf1g+Ndsy/o1jC1ccMaxsYUL2LJ+TZ9aNH/2HJjgbdvv4uKtX+Vt2+9iz4GJfjdJ0gwc8K3Z6UHd\nps32caBbGi6G/xzYuHZF4wLvbAPdTftZSMPAbh/VwoFuabgY/qqFA93ScDH8VYsmD3RLw8g+f9Wi\nl4FuH38h9Z/hr9qUGeh2VpA0GOz20bzy8RfSYDD8Na+cFSQNBsNf88pZQdJgMPw1r5wVJA0GB3w1\nr5r6+Atp0FQK/4g4H7gFWAU8BlyXmc/OUPbVwMPAnsz8aJV6Nb/qnprZxMdfSIOmarfPVmB/Zq4G\n9hf7M/kEcHfF+jTPTk/NnDgxSfLTqZk+sVMablXDfwOwq9jeBWzsVCgi3gIsBb5WsT7VqMwjmJ2a\nKY2mquG/NDOPFttP0Q74M0TEy4BPAx/r9mYRsTkixiNi/Pjx4xWbprMpe0Xv1ExpNHUN/4i4MyIe\n7PBrw9RymZlAdniLjwC3Z+aT3erKzJ2Z2crM1pIlS0qfhHpX9oreqZnSaOo64JuZ62Z6LSKejohl\nmXk0IpYBxzoUeytwVUR8BHglcG5E/DAzzzY+oDlW9op+y/o1ZzyOAZyaKY2Cqt0+e4FNxfYm4Lbp\nBTLzdzNzZWauot31c7PB339lr+g3rl3BJ999GSsWjRHAikVjfPLdlzlbRxpyVef5bwe+HBHXA48D\n1wFERAv4UGbeUPH9NUd6uaJ3aqY0eqLdVT94Wq1Wjo+P97sZI81HK0ujJyLuy8xWt3Le4dtgXtFL\nzeWzfSSpgbzy10Cza0qaG4a/Bparfklzx24fDSwfLSHNHcNfA8tHS0hzx/DXwPLREtLcMfw1sFz1\nS5o7DvhqYLnqlzR3DH8NNG9Eq0cvU2bLlnUa7nAz/KUR18uU2bJlnYY7/Ozzl0ZcL1Nmy5bt5T3L\nrBin+eeVvzTEynS99DJltmzZsuX8hjC4vPKXhlTZpTh7mTJbtmzZcn5DGFyGvzSkygZrL1Nmy5Yt\nW67XbwjdPshUH7t9+swZE5qtssHay5TZsmXLllu+aIyJDu3s5RuC/x/mhuHfR/aHaiZlLgrKBiv0\nNmW2bNky5cquGNfrozy8aKrObp8+8sFl6qRsF8gw3AFddg3oXsYl7CKqh1f+feSDy9RJ2S6QYbkD\nus5vCGAXUV0M/z7q5Wu7zm6UugF6uSgYlTuge/kg6+XnM0r/Lupm+PdRL1c7mtmojZ009aKg7AdZ\n2Z/PqP27qFulPv+IOD8i7oiIw8Xv581Q7lRE3F/82lulzlFStj9UZzcsYydl57EPQ19+P5X9+XiP\nwdlVvfLfCuzPzO0RsbXY/9MO5SYz8/KKdY2kUfna3k/DMHbSy1XosPTl90vZn493IZ9d1fDfALyj\n2N4FfIPO4S/NmWHoJul1kNKLgrMr8/PxHoOzqzrVc2lmHi22nwKWzlDuFRExHhH3RMTGmd4sIjYX\n5caPHz9esWlqimHoJhmGbyejpu67kE8blS6irlf+EXEncEGHl26cupOZGRE5w9u8LjMnIuL1wF0R\ncTAzvzu9UGbuBHYCtFqtmd5LOsMwdJMMw7eTUVP3XcgwWl1EXcM/M9fN9FpEPB0RyzLzaEQsA47N\n8B4Txe+PRsQ3gLXAz4S/NFv97CYpM53QmV394T0GM6va578X2ARsL36/bXqBYgbQ85n544hYDLwN\n+MuK9UoDoeyV4DB8O2mqpt5jUDX8twNfjojrgceB6wAiogV8KDNvAC4BPhcRP6E9xrA9Mx+uWK80\nK3X/h+zlStBB3MHVxHsMKoV/Zj4DXN3h+DhwQ7H9X8BlVeqR6tDrf8i6F0rR8CvbRTQM3UM+2E2N\n0etNP3UvlKLhV/bGzGG4KPDxDmqMXv5Dlr1ycyC3eeq8x6CfvPJXY/Ryld7LQik+okPT9XLvSb/u\nG/DKX43Ry1X6XC2UomYoO4OonwPDhr8ao5cpfXbnqKoyFwX9HBg2/NUovSxRCM7L19zq58Cw4S/N\nwO4czbV+Dgw74CtJfdLPhxJ65S9JfdLP7kXDX5L6qF/di3b7SFIDGf6S1ECGvyQ1kOEvSQ1k+EtS\nAxn+ktRAhr8kNZDz/Bn8tTYlqW6ND/9hWGtTkurW+G6fXpb2k6RR0fjwH4a1NiWpbpXCPyLOj4g7\nIuJw8ft5M5RbGRFfi4j/iYiHI2JVlXrr5ALckpqo6pX/VmB/Zq4G9hf7ndwM7MjMS4ArgGMV661N\nPx+pKkn9UjX8NwC7iu1dwMbpBSLiUuCczLwDIDN/mJnPV6y3Ni7ALamJIjNn/4cjTmTmomI7gGdP\n708psxG4AXgBuBi4E9iamac6vN9mYDPAypUr3/L444/Pum2S1EQRcV9mtrqV6zrVMyLuBC7o8NKN\nU3cyMyOi0yfJOcBVwFrge8AtwAeBz08vmJk7gZ0ArVZr9p9KkqSz6hr+mbluptci4umIWJaZRyNi\nGZ378p8E7s/MR4s/swe4kg7hL0maH1X7/PcCm4rtTcBtHcp8C1gUEUuK/V8DHq5YrySpgqrhvx24\nJiIOA+uKfSKiFRE3ARR9+x8D9kfEQSCAv69YrySpgkqPd8jMZ4CrOxwfpz3Ie3r/DuBNVeqSJNWn\n8Xf4SlITGf6S1ECGvyQ1kOEvSQ1U6Q7fuRQRx4E6bvFdDHy/hvcZFp7vaPN8R1dd5/q6zFzSrdDA\nhn9dImK8zK3Oo8LzHW2e7+ia73O120eSGsjwl6QGakL47+x3A+aZ5zvaPN/RNa/nOvJ9/pKkn9WE\nK39J0jQjE/4RcW1EHIqIRyLiZ5aTjIiXR8Qtxev3DtI6wrNR4nz/uFgv+YGI2B8Rr+tHO+vS7Xyn\nlPutiMiIGNoZImXONSKuK/5+H4qIf5zvNtapxL/llRHx9Yg4UPx7fmc/2lmXiPhCRByLiAdneD0i\n4u+Kn8cDEfHmOWlIZg79L2AB8F3g9cC5wLeBS6eV+Qjw2WL7vcAt/W73HJ/vrwI/V2x/eNTPtyj3\nKuBu4B6g1e92z+Hf7WrgAHBesf/z/W73HJ/vTuDDxfalwGP9bnfFc3478GbgwRlefyfwL7SfgHwl\ncO9ctGNUrvyvAB7JzEcz8wXgS7TXF55q6nrDtwJXF0tPDqOu55uZX8+frpV8D3DhPLexTmX+fgE+\nAXwK+NF8Nq5mZc71D4DPZOazAJnZaRGlYVHmfBN4dbH9GuDIPLavdpl5N/CDsxTZANycbffQXg9l\nWd3tGJXwXwE8MWX/yeJYxzKZ+SLwHPDaeWld/cqc71TX076SGFZdz7f4anxRZn51Phs2B8r83b4B\neENE/GdE3BMR185b6+pX5nz/Anh/RDwJ3A784fw0rW96/f89K5We56/BFxHvB1rAr/S7LXMlIl4G\n/DXttaGb4BzaXT/voP2N7u6IuCwzT/S1VXPnfcA/ZOanI+KtwBcj4o2Z+ZN+N2yYjcqV/wRw0ZT9\nC4tjHctExDm0vz4+My+tq1+Z8yUi1gE3Au/KzB/PU9vmQrfzfRXwRuAbEfEY7X7SvUM66Fvm7/ZJ\nYG9mnszM/wW+Q/vDYBiVOd/rgS8DZOY3gVfQfg7OqCr1/7uqUQn/bwGrI+LiiDiX9oDu3mllpq43\n/B7grixGV4ZQ1/ONiLXA52gH/zD3CUOX883M5zJzcWauysxVtMc43pXtFeWGTZl/y3toX/UTEYtp\ndwM9Op+NrFGZ8/0exYqBEXEJ7fA/Pq+tnF97gQ8Us36uBJ7LzKN1VzIS3T6Z+WJEfBTYR3v2wBcy\n86GI+Dgwnpl7gc/T/rr4CO3Blvf2r8XVlDzfHcArga8U49rfy8x39a3RFZQ835FQ8lz3Ab8eEQ8D\np4At2V5SdeiUPN8/Af4+Iv6I9uDvB4f4wo2I+CfaH96Li3GMPwcWAmTmZ2mPa7wTeAR4Hvj9OWnH\nEP8MJUmzNCrdPpKkHhj+ktRAhr8kNZDhL0kNZPhLUgMZ/pLUQIa/JDWQ4S9JDfT/DEQwhhfzPXgA\nAAAASUVORK5CYII=\n", 73 | "text/plain": [ 74 | "" 75 | ] 76 | }, 77 | "metadata": {}, 78 | "output_type": "display_data" 79 | } 80 | ], 81 | "source": [ 82 | "X = np.linspace(0, 1, 30).reshape(-1, 1)\n", 83 | "Y = f(X)\n", 84 | "plt.scatter(X, Y)\n", 85 | "plt.show()" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": 4, 91 | "metadata": { 92 | "collapsed": true 93 | }, 94 | "outputs": [], 95 | "source": [ 96 | "def make_dgp2(X, Y):\n", 97 | " kernels = [RBF(1, lengthscales=0.1), RBF(1, lengthscales=0.1)]\n", 98 | " model = DGP(X, Y, X, kernels, Gaussian_lik(), num_samples=10)\n", 99 | " model.likelihood.likelihood.variance = 1e-4\n", 100 | " for layer in model.layers[:-1]:\n", 101 | " layer.q_sqrt = layer.q_sqrt.value * 1e-5\n", 102 | " return model" 103 | ] 104 | }, 105 | { 106 | "cell_type": "markdown", 107 | "metadata": {}, 108 | "source": [ 109 | "We'll create the model twice and optimize one with Adam alone and the other with natural gradients for the final layer with Adam for the inner layer and all hyperparmeters and inducing points" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": 5, 115 | "metadata": {}, 116 | "outputs": [], 117 | "source": [ 118 | "model_adam = make_dgp2(X, Y)\n", 119 | "model_nat_grads = make_dgp2(X, Y)\n", 120 | "\n", 121 | "iterations = 5000" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": 6, 127 | "metadata": {}, 128 | "outputs": [], 129 | "source": [ 130 | "AdamOptimizer(0.001).minimize(model_adam, maxiter=iterations)\n" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": 7, 136 | "metadata": {}, 137 | "outputs": [], 138 | "source": [ 139 | "ng_vars = [[model_nat_grads.layers[-1].q_mu, model_nat_grads.layers[-1].q_sqrt]]\n", 140 | "for v in ng_vars[0]:\n", 141 | " v.set_trainable(False)\n", 142 | "ng_action = NatGradOptimizer(gamma=1.).make_optimize_action(model_nat_grads, var_list=ng_vars)\n", 143 | "adam_action = AdamOptimizer(0.001).make_optimize_action(model_nat_grads)\n", 144 | "\n", 145 | "Loop([ng_action, adam_action], stop=iterations)()\n", 146 | "\n", 147 | " " 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": 35, 153 | "metadata": {}, 154 | "outputs": [ 155 | { 156 | "data": { 157 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlUAAAEyCAYAAADTHyXNAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXd4HNW9v98zW9VlW3LBBUw1YGNTTYmBhBTnl0AKIQm5\nyb0JBEISQIhebdlpdFkQQm8hBC4lhQC5pIAJhBbTwRTb2OBu9bZ1Zs7vjyk7u1rJkr0r7UrnfR49\n2p2dnTm7OzPnM98qpJQoFAqFQqFQKHYObaQHoFAoFAqFQjEaUKJKoVAoFAqFIgcoUaVQKBQKhUKR\nA5SoUigUCoVCocgBSlQpFAqFQqFQ5AAlqhQKhUKhUChygBJVCoVCoVAoFDlAiSqFQqFQKBSKHKBE\nlUKhUCgUCkUO8I/ETmtqauRuu+02ErtWKBQjxKuvvtoipawd6XHsLOr6pVCMPQZ7/RoRUbXbbrux\nYsWKkdi1QqEYIYQQH4/0GHKBun4pFGOPwV6/lPtPoVAoFAqFIgcoUaVQKBQKhUKRA5SoUigUCoVC\nocgBIxJTlY1kMsmGDRuIxWIjPZRhJxwOM23aNAKBwEgPRaFQKEYdY3l+UQyNnZ2PC0ZUbdiwgYqK\nCnbbbTeEECM9nGFDSklraysbNmxg5syZIz0chUKhGHWM1flFMTRyMR8XjPsvFosxYcKEMXfACyGY\nMGGCuoNSKBSKPDFW5xfF0MjFfFwwogoYswf8WP3cCoVCMVyo66xiMOzscVJQokqhUCgUCoWiWFGi\naojcc889nHnmmSM9DIVCoVCMYv70pz+xcuXKYdlXrue1I488EoB169bx+9//Pi/7WbduHbNnz87J\ntnKJElUKxRglrhtIKUd6GGMHQwfTGOlRKIqEnRVVuq7ncDRD44UXXgD6iqqxwE6LKiHEdCHEM0KI\nlUKId4UQdbkY2Ejx1a9+lYMPPpj999+f2267DYC7776bvffem8MOO4x///vf7rp/+ctfmD9/Pgce\neCCf/exn2bp1KwANDQ38z//8DwsWLGDXXXflD3/4AxdeeCFz5sxh4cKFJJPJEflsCoVD0jDpiCTp\niY/chXfMEW2DWOdIj0IxAqxbt459992X0047jf3335/Pf/7zRKNRAG6//XYOPfRQ5s6dy4knnkgk\nEuGFF17gscce44ILLmDevHmsWbMmbXtr1qzh8MMPZ86cOVx++eWUl5cDsHz5chYsWMAJJ5zAfvvt\nB2Sf06D/ee3hhx9m9uzZzJ07l6OPPrrPZ/npT3/KY489BsDXvvY1TjnlFADuuusuLrvsMgB3PBdf\nfDHPPfcc8+bNo7GxEYBNmzaxcOFC9tprLy688MKs39fSpUs59NBDmT17Nqeffrp78/fqq68yd+5c\n5s6dy0033ZT2/S5YsICDDjqIgw46yBV1y5cv55hjjuErX/kKu+++OxdffDH3338/hx12GHPmzOnz\nveaCXJRU0IHzpJSvCSEqgFeFEH+XUu6wxF7yl3dZuakrB0NLsd8ulSw+fv/trnfXXXcxfvx4otEo\nhx56KF/60pdYvHgxr776KlVVVXz605/mwAMPBOBTn/oUL730EkII7rjjDq6++mquu+46wDron3nm\nGVauXMkRRxzBo48+ytVXX83XvvY1nnjiCb761a/m9PMpFEPBMK2LlGmO8EDGCkZSWakKhb9eDFve\nzu02J8+BL1454CqrVq3igQce4Pbbb+eb3/wmjz76KN/97nf5+te/zmmnnQbA5Zdfzp133slZZ53F\nCSecwJe//GW+8Y1v9NlWXV0ddXV1nHzyydxyyy1pr7322mu88847bkmAzDntxBNPJJFI9DuvLV26\nlKeeeoqpU6fS0dHRZ98LFizgueee44QTTmDjxo1s3rwZgOeee45vf/vbaeteeeWVXHvttTz++OOA\n5f574403eP311wmFQuyzzz6cddZZTJ8+Pe19Z555JosWLQLge9/7Ho8//jjHH388P/jBD/j1r3/N\n0UcfzQUXXOCuP3HiRP7+978TDodZtWoVJ598stuf88033+S9995j/Pjx7L777vzwhz/klVdeoamp\niRtvvJFly5YN+LsNlZ22VEkpN0spX7MfdwPvAVN3drsjxQ033MDcuXM5/PDDWb9+Pffddx/HHnss\ntbW1BINBvvWtb7nrbtiwgS984QvMmTOHa665hnfffdd97Ytf/CKBQIA5c+ZgGAYLFy4EYM6cOaxb\nt264P5ZCkYYjqsQgrwC6YRJLKlGww+hx679pgHK5jklmzpzJvHnzADj44IPdeeCdd95hwYIFzJkz\nh/vvvz9tHumPF198kZNOOgmA73znO2mvHXbYYWk1ljLntFWrVvHyyy/3O68dddRRfP/73+f222/H\nMPqe846oWrlyJfvttx+TJk1i8+bNvPjii24s1UAcd9xxVFVVEQ6H2W+//fj44759ip955hnmz5/P\nnDlzePrpp3n33Xfp6Oigo6PDtZ5973vfc9dPJpOcdtppzJkzh5NOOinNbXrooYcyZcoUQqEQe+yx\nB5///OeB/M3FOS3+KYTYDTgQeDnLa6cDpwPMmDFjwO0MxqKUD5YvX84//vEPXnzxRUpLSzn22GOZ\nNWtWv37ts846i3PPPZcTTjiB5cuX09DQ4L4WCoUA0DSNQCDgpmlqmjaivm6FAsC0J3ZtkOnDrb0J\nAMIBX97GNKoxEqnHpg4+1T1hxNiORSlfOHMCgM/nc91/3//+9/nTn/7E3Llzueeee1i+fPlO7aes\nrMx9nG1O214NpltuuYWXX36ZJ554goMPPphXX32VCRMmuK87Fqz/+7//4+ijj6atrY2HHnqI8vJy\nKioqtju+zO8hcz6MxWL85Cc/YcWKFUyfPp2GhobtjrmxsZFJkybx5ptvYpom4XA46/40TUubm/Mx\nF+csUF0IUQ48Cpwjpezju5NS3ialPERKeUhtbW2udptTOjs7GTduHKWlpbz//vu89NJLRKNRnn32\nWVpbW0kmkzz88MNp60+dahnl7r333pEatkIxZBxLlQpUHwaktESV3764m+qmSpGiu7ubKVOmkEwm\nuf/++93lFRUVdHd3Z33P4YcfzqOPPgrAgw8+2O+2s81pAPPnz+93XluzZg3z589n6dKl1NbWsn79\n+qz7X7ZsGUcffTQLFizg2muvZcGCBX3WG+gz9IcjoGpqaujp6eGRRx4BoLq6murqap5//nmAtO+q\ns7OTKVOmoGka9913X1YL23CRE1ElhAhgCar7pZR/yMU2R4KFCxei6zr77rsvF198MYcffjhTpkyh\noaGBI444gqOOOop9993XXb+hoYGTTjqJgw8+mJqamhEcuUIxNFxRNcT3KRG2A+hxS1gFSq3nSlQp\nPPzsZz9j/vz5HHXUUcyaNctd/u1vf5trrrmGAw88sE9A9bJly7j++us54IADWL16NVVVVVm3nW1O\nAwac1y644ALmzJnD7NmzOfLII5k7d26f7S5YsABd19lzzz056KCDaGtryyqqDjjgAHw+H3PnznUD\n1bdHdXU1p512GrNnz+YLX/gChx56qPva3XffzU9/+lPmzZuXdi36yU9+wr333svcuXN5//3306x1\nw43Y2YuksPxa9wJtUspzBvOeQw45RDpBZA7vvfde2g871hjrn18xvGzrjiGl5c6rKtm+K2prl3X3\nWFseQtN2rOKwEOJVKeUhO/TmAiLb9atfpITeZkBAWY312BeAknF5HaMindF2fY1EIpSUlCCE4MEH\nH+SBBx7gz3/+80gPa9SQ7XgZ7PUrFzFVRwHfA94WQrxhL7tUSvlkDratUCjygcz4P8S3KQaJk/VX\nMg6EAM2fygJMxsAXBE2VC1QMjVdffZUzzzwTKSXV1dXcddddIz0khc1Oiyop5fOAaqqkUBQhcogy\nybJsq9N90Jh2TTonMF3zQTIB8R6Id0MgrKxWiiGzYMEC3nzzzZEehiILBXWLNFbjNcbq51aMHK6h\nahCHnvf4VEfqEDGSVt0Kzc6a1PzWl57osZ7rif7fq8gp6jqrGAw7e5wUjKgKh8O0traOuQNfSklr\na2taCqhCkU+GKpJMz0pj7PTceTLLJ2j2YyltgWVawkpVYc0rY3V+UQyNXMzHOa1TtTNMmzaNDRs2\n0NzcPNJDGXbC4TDTpk0b6WEoxggyTSRtf5Ix00SYmpSGhDStuCkHv+dxqNxqWxNptaxZpeNV/ao8\nMZbnF8XQ2Nn5uGBEVSAQSKsCq1Ao8oPs53F/pIkqpamGRvnEvl+a5rctWCEIV0O8ywpej3dbwkqR\nc9T8ohguCkZUKRSK4cGxTgkGJ5KUZ2onyaxaXzrBCmDXNNDCVrB6rBOSUesHGWSVe4VCUXgUTEyV\nQqEYHhwdJYQYlDtP96iqYrNUCSEWCiE+EEKsFkJc3M863xRCrBRCvCuE+H3eB6VpqerqDr5QqvK6\nQqEoWpSlSqEYYzjCSBNgDMIKpRsSTQhMKYsqpkoI4QNuAj4HbAD+I4R4TEq50rPOXsAlwFFSynYh\nxMQRGaw/ZFmo9HhfwaVQKIoGZalSKMYYjjDShBiUREqaJkGfdakoMkvVYcBqKeVHUsoE8CDwlYx1\nTgNuklK2A0gptw3zGC2EsALalaVKoShqlKhSKMYYKUuVsJ/3r5RMU1pt6/z2unkfXU6ZCni7wW6w\nl3nZG9hbCPFvIcRLQoiFwza6THzBVAV2hUJRlChRpVCMUYR99g9kfTJkyqplBbYXmazaPn5gL+BY\n4GTgdiFEdeZKQojThRArhBAr8paW77dr4yQj+dm+QqHIO0pUKRRjjD6WqgHWNT2iClF0lqqNwHTP\n82n2Mi8bgMeklEkp5VrgQyyRlYaU8jYp5SFSykNqa2vzM1qf34qnSihRpVAUK0Ujqpq740QS+kgP\nQ6EoelIxVdZzw+xfKnmD2p3n5gDrFxj/AfYSQswUQgSBbwOPZazzJywrFUKIGix34EfDOcg0/GGr\nYKihrnUKRTFSFKJKSokpJd0xdaFRKHYWRyj5Nev0N7O49KSUxJKGK7gs958gaZg098SJJgo/7kdK\nqQNnAk8B7wEPSSnfFUIsFUKcYK/2FNAqhFgJPANcIKVsHZkRk6q+bsRHbAgKhWLHKYqSCv3dSRum\nRDdNQn7fMI9IoSh+Aj7L/KR7zq+WnjimlIT8PmJJg6BPQwCaJhAidS52x5OUBAv/vJNSPgk8mbFs\nkeexBM61/0Yen99qvqzHIVg20qNRKBRDpCgsVU6wbGah4UhCpzOaHIERKRTFi7f4p08TGEZKVBl2\ntp/j4ksYJsI+8TTPCSglJAdT5EoxdPxhS1QpF6BCUXQUhahyCjpb+Uee5ZKii5xVKEYaKaV7JvmE\nSKuY7uAVUE48lS/jriaWLHwXYFESLLf+qyxAhaLoKApRlUrr7vua0lQKxdCQgKOqfD7hnl9evMsc\ngeW33YU+TRDya8R1ZanKC5oGvoDVdFmhUBQVxRFTZbsn+lz67QVSStdFoVAoBkbKlNVXEyJrnSrD\nTFmzAn7r3stn39WYUlIRDma9yVHkCM2vqqsrFEVIUYgqJzspM0vJSQ1Xjd0ViiHgOV8cYWSaEs2j\nkkwpKQ36qAgH3GUBn+a+36cUVX7xBSAZtWIftKJwKCgUCopMVClfn0Kx80hSVijHtWdKiZYRs5gp\nnHyaIOz3EQ6qST7vaPal2UyCphosKxTFQlGIKun573X1Ka2lUOwcjsUqW9USLYv5t6o00HdFRe4Z\nTA8hhUJRcBSFqPK6/aS0hFVzT9yzTALKHaFQDAbLXW6dL05sVbYCoMrFN4K4okolAygUxURx2PFl\nuptCL542GQpFweG9BfHqpsxmyX4lqkYOJaoUiqKk4EWVlFY4unPXLMkWsK5QKHYE781KprFKZdSO\nIEJYf0pUKRRFRRGIKuu/LyOOKts6CoVi+1hxidZjzS2TMIIDUmRHaEpUKRRFRsHHVDlWKSerOGmY\n6Eb20goKhWL7ZJ4tQtiWqhEZjaJ/lKVKoSg2ikBUWf8d919PXFUZVih2Fm/LJ00IpJmKqQr5NcpD\nBX9pGP2IgnckKBSKDAr+rHXunwfKREroptsAVqFQDExmsqxPCBKG6d7AhAM+/L6CvzSMflRMlUJR\ndBT8ldOJl9JEZjvlFJGEQUc0OWxjUihGE2UhP6aURO0GySo+vUBQMVUKRdFRXKJqgKt90lAXH4Ui\nG9GEQVtvqo+cRKYJp6BfQ4jUOdT/7YtiWFGiSqEoOgo+cMIJVBdYNXWUl0+hGBpdMcuK63YjkH1L\n5fqEUPXfCg2hOdWOlflQoSgSCt5SZUirT5m32Ws44Cuaa4xumCR0dbepGHkGKj3i9zTtLZZza9Tj\n/BCqZoxCUTQUvKiKJ02CfmuYzqUl5NfSJoFCprU3QXsksf0VFYo84HWLe3toZrrSfb7Uc6WpCgRV\nVV2hKDoKWpkkdBNTSsIBX9rybI1eoW+bDYVirGN4XHrZ+vs5+DznlKqkXiAoUaVQFB0FLap8mqAs\n5CdkW6ocMaWJ7HfThooJUSj6xWlGDn3PH7+yVBUeSlQpFEVHwYuq8pDfvXOuKglQFvL3W0OnkANt\nVR0txUjgNU5JUv39Mo1RAZ+KqSo4lKhSKIqOghZVmTgiKxtCWPWqCpWBXC8KxXCwvUOwNGi52ZX7\nr0BQokqhKDoKvqTCYCkN+umN66m08QJDGaoUI4G3o5+UqWD1bLWoKsIBKsKBYRqZYru41zF18VAo\nioWislR5ydRNWoFnHytLlWIk6Ov+s2OqCu++Q5ENVQBUoSgqilZUZeLceReqdFGaavQhpaQrlizo\nBAnvyAp4mIr+UKJKoSgqRo+oci1VOzdzJPT8FOtUlqrRRyxpEk0Y9Cb0kR7KdhFY54Y6CosMp6q6\nQqEoCnIiqoQQdwkhtgkh3snF9naGzMtP0jDZ1hVDH2RvwPZI7op1egWeElWjD920jqn+6qYVAtLu\nSIDVnUbNz8WGEMpSpVAUEbmyVN0DLMzRtgZFyJ8qCOrXhGupyhQvcd1EAtHk8GcGpsezKEYbulH4\nv6oEEJbw887NBawDc4oQYqEQ4gMhxGohxMUDrHeiEEIKIQ4ZzvFtFyGUElYoioicZP9JKf8lhNgt\nF9saLCVBHyG/5vYEdFx2mdcfZ+4YiQnQu0d1XRx9OHXRCtkKKaUVbyiwA9Vxin+OflUlhPABNwGf\nAzYA/xFCPCalXJmxXgVQB7w8/KPcHtYvp1AoioNhi6kSQpwuhFghhFjR3Nyck216myxr/cwRzoSX\nNEfYhK6ui6MOx71b0N4Zt9inGIvC/jBgtZTyIyllAngQ+EqW9X4GXAXEhnNwg0IFqisURcWwiSop\n5W1SykOklIfU1tbmfPtObarMicNtIjvECSUXfQRVL8LRTapBcWH/zkKk7B39VVQfpUwF1nueb7CX\nuQghDgKmSymfGGhD+bgpHBTK/adQFBWjJ/vP/p85wXnbwwxF5OTiOpbm/ivwiVcxNNKTEEZwINtB\nIrM6+saGphoYIYQGXA+ct711831T2D8FXoBPoVCkMXpEVT/XnrRg8SFcl3IRJ7Oj+x6NSCnzUqpi\npPD+ngUfU+VacQt3nHliIzDd83yavcyhApgNLBdCrAMOBx4rqGB198I2es4dhWI0k6uSCg8ALwL7\nCCE2CCFOzcV2hzgGoG/oknfC296Ukmvrg7JOpYjrJu2RBJ3R5EgPJSekF9Us3N9ZYtk6Mm86CrGV\nUx74D7CXEGKmECIIfBt4zHlRStkppayRUu4mpdwNeAk4QUq5YmSGmwW3/1/hHmMKhSJFrrL/Ts7F\ndnYWp8ChF1N640mcKSY7mS09dhZv/MpYvyQ6wiOWNKgqKf7+cs5xpglRVBagVPbf6EdKqQshzgSe\nAnzAXVLKd4UQS4EVUsrHBt5CIaD6/ykUxcSoaagMgOhrYZJSomkCw9y+TMpXCYSxkL6+Pbzfp26Y\n+H358zzHkgad0SS15aG0DNFc4nwcTYAuKdhG3s7xP1aRUj4JPJmxbFE/6x47HGMaEsr9p1AUFaMm\npgps8ZIWx2QJKV8/mYGZ5LoCurMJTYzJeJY0vJ9ez3Nkd3dMz/t+nJ/TZwuWQg1Wt2yzKdf4GMv+\nK36U+0+hKCpGl6gS6W47p9Gtc6e+PVtVri1Vzv40Ica88d4rKvMtqhxBPBwxbYUeBO54vJW1tFhR\n7j+FopgYVaJKE4K4bpK0+/wZ9kTn1wZrqUo9zqWlSlkFbIuJ3S7FGKbq9vnUOSnBnL995AKJTDv+\nnK+kEF2Viiwo959CUVSMKlHlNE3ujVvuH6eIut83uAkk15YNdwJjTFazTkOa1vegifxmy3kbZxvD\n4P7rL+u0EJFO7NdID0QxeJT7T6EoKkaVqCoLWXH3jqvDsVQNPqbK8ziH4xKaKq/gWExEnl2hw13q\nwLFUFeycZ2e/KiVVpAjl/lMoiolRJ6r8mnAFjGFINCHQxOBiqnKNsgqkkNJy/WUre5FLzLRkg7zt\nxpOEMDLH1mCx3K4i7bk6KIsMIZT7T6EoEkaVqIL0xrGGlPg00W+19UxyXQHdmcCE+2Ts4i1CmVdL\nVUb2Z97249R7KmBLlfP5RdoyFbRedAitMA8whULRh9EnqkhN2oYp8Qkx6LiXtIlSXcNyilXHafji\nyzQhhtVSVYh4v+fMzFhFMaEuSApFsTDqRJU1mdop9VKi2Z9wMG4nN/g4R3fyjlUg33FExYBTLynf\n8WXOb++3C77mi1QSgv28gH/gNN0nVTZq0SFEYR9gCoXCZdSJKkQqy0nisSQMwu3krZKdk4nfnsBS\nE+/YvTCm6iWR15tu14Lkia3Lz36k7c4s3Jgqb/ZpalnhjVOxHZT7T6EoGkadqHLcHGaGe2YwbicV\nWJ4/hjv7T8uzxyQtXq5AcWOqMhLICnnMiv5QokqhKAZGn6jCuqkzMyaUwcRJeQsj5qqiuhOcDWP7\nZlPaqf35ttqZHgtSvgPiLddu6nmh4R2S102pCn8WGSr7T6EoGkadqHIsU26LGtdSNUjXRw7nm8wJ\nrADn3WHBEVCayL8Icb7zvLtci7D4p6JIUe4/haJoGHWiypm0U6LKWT4Y959tfaDvJGmakoQ+tLvF\nbDEtYxFvux5vc9/87MzeT77Fm6f9S75rb+0o2dokOaUtFMWEyv5TKIqF0SeqMqupa15L1XYYoMZR\nd0ynPZIYsrBKd/+NzQujV1zm+7swpbSLjOZXvDnuTGBQSRAjgVsixJOB6pS2UBQRKvtPoSgaRp+o\ncixVRnp2lnVd2v6FyWvl8OKINKev4GBQE5hFn4Bp8ih2SBey+WxVk+baLcA5TzX0HiU4/f9MFVel\nUBQ6o1ZUJQyzT0DuYIp/WsHUfYOcnclZH0Lto8w1C3DeHRbSAqbz7JYzM4RsPkOqnN2IQjVVZcGp\nF6YocEzTc/Cq/n8KRbEw+kSVZ8IoC/myLu+PgSZg0xZTppRDcl0VeobYcJBW2dt1y+Xny3Di4vLd\nk897DBR6tfLMNjWKAicZg2Wz4cpdYdt7Kn1YoSgiRp2ocmKo/JqgNOhPvTDIsARhV+v0rmua1pQZ\n9Flf12CtVdJrzlAMXwC5NjzzkDdQvRBJuf882ZDIwh2wwmLDK9C1EeKd8MrtKfefKqugUBQ8o1JU\nTSgLMqE8lLZ8MNaE/jSQI6KCfuvrGmz7E2+gsPf5WCMtYDr/O0urh5XH3bifJld1zXJN6ntPW6hi\nrAqdtf8C4YNZX4a3HgIjab9QgAeZQqFIY9SJKgC/r+/HGkx7FG9AtVcAOfFUjqgaSlzVcMT3FDpp\nJRUct1w+Y528TbTzWA8Lj6WqEH/a/koqKAqcdc/DLvNg9omQ6Ibm96zlY/UColAUEaNSVGVjsBW2\nM1t6QEpU+ezilYPOKHOsJsoyAKRbkPKRleeKYs9+8hZT5dnuYDNLRxLvMSiAzkiS3S5+gqff3zpi\nY1JkwTRg85sw7TCYdoi1bONr1n/l/lMoCp4xI6ocBpr8HJdOpgYyPXf8mhCDvra5VpMdGegoJZ8C\nM90ilr4sH6RiqvLbEmdHcXPHMr50IQQfbO0G4Obla4Z5VIoBaV0NyQhMmQtV06FsYkpUFeRRplAo\nvIwZUTWYudwb2CvTlqdqXmlC5LX20WgkLWA6j21d0ks3DEPldu/TAjwkBmoQnuqNqSR/QbH5Tev/\nlLmWat9lHmx5x1pWiAeZQqFIY+yIqsFaLrLMMaZMTT7aIN1/aa6oPMf3FDqZAdP5auvi7TGYz/1A\nygrp7KgQkxAkZD2eBaAbznc1nCNSbJfNb4I/DDV7W8+rd4Wu9aqpskJRJIwdUTWItiVOPzc3Hsee\nkKWUaT0EBxOnnt7vLrX9schwVfbu8+0KBvVb7SyFauzJVh/MoTehZ12uGGE2vwmTZoPPLgdTPR1i\nnRDvQbn/FIrCZ+yIqsH0nOvnJVOmrB9aEQQlFxp9YnvyVIA882fJV6VzrxXS/V+gh0Q20SQE9MQs\nUaWNmStAEWCalqiaMje1rGq69b9789g1dSsURcSYu6QOxvuX6Sp0mvSCHajO9oVVqsGEqqieiSA/\ndZ1cN6NrVcyPdTDT8jbYzNJhp5+aVALhWqq0QjWz5QghxEIhxAdCiNVCiIuzvH6uEGKlEOItIcQ/\nhRC7jsQ4AehYB/GudFFVPcP637VJuf8UiiJg7ImqAWa/tzZ2ct3fPky5/ezlpkxFJTuT0PbcSsPZ\nRLjQyQyYFnku7ORYZ7Q8FeXM3GSmu7hQcHpZQl9x1W1bqkZzoLoQwgfcBHwR2A84WQixX8ZqrwOH\nSCkPAB4Brh7eUdqYJvxjifV4lwNTy6umWf+7Nw//mBQKxZAZE6JKSsmWzhixpDHgOpf/8R3ueWEd\nmzpiGS/iiamy/g8lA3A0T1yDIVvA9LBYkMivkE1VVE/ff6EgZfZjTwjojVuiyjBHtfXjMGC1lPIj\nKWUCeBD4incFKeUzUsqI/fQlYNowj9Fiy1uw8k9w5Nkw5YDU8rKJ4AtB54bCO8AUCkUfxoSouvP5\ntRx77XIW//ndASfzmvIgAK990g5YQktK6x1e9x9sX1Rls2YUmiVjuHCaHDvkW2J6rTP5zDLM1CvF\n9Os6oir/gMjXAAAgAElEQVSS6P9GYxQwFVjveb7BXtYfpwJ/zeuI+mPL29b/g7+fvlzToHwiRFqV\n+0+hKALGhKh6c0MnAOvbI/3e7EkJk6vCALxqiypIufm8gerO+gMxXBlvRUFmAHm+3HKeeliQv6Kc\nfQVzYf7Ink46fei2RVW0H1GVNEzets+bbGzqiLKlM9bv68WGEOK7wCHANf28froQYoUQYkVzc3Pu\nB7DlLQhWwLiZfV8rq4FIG8Ul2xWKscmYEFWftPYCqTiSbEggZk8wq7f2uMsyrRKDrTnVxyI2hES0\nWNKgM5rc/oqDJJowaO2Jj6ilLLNNSn7ETnpWHmJ4PCaDyiwdAaSU2QPVPe4/J2A9k0v+8DbH//p5\nNnVEs76+6M/v8oN7/pOzseaJjcB0z/Np9rI0hBCfBS4DTpBSxrNtSEp5m5TyECnlIbW1tbkf6ea3\nYPLs7OmYZbW2paqwji+FQtGXsSGq2qyQiY5IAnOACPNeW1R5XSKZqzuWqu26/xyrCR6rib0saZj9\nWggAOqNJYkljSJN00jDdiTKTrlgS3ZTE9ZFxH3gDpt1leXHLWf+HO/vPXZ7zPe0clqUqS0wVgp4B\nLFVSSh55dQMA727qyrrtrliSyrA/d4PND/8B9hJCzBRCBIFvA495VxBCHAjciiWoto3AGK0g9a3v\nwOQ52V8vrVHuP4WiSBj1oqorlqQ9kqS6xE/CkESS2YWHlNK9a3cC2qVMTcpule4htj/pM/FKSVtv\ngq5YkkQWkeNdljQGtxfDtLbZE9f7iBXv83hyhERVRsB0vlyimfWw8lU/KmURK/xA9f78fz1x6xjv\njfcVVau39biP39/cj6iKJqksCez0GPOJlFIHzgSeAt4DHpJSviuEWCqEOMFe7RqgHHhYCPGGEOKx\nfjaXP9rXQqIHJh+Q/fWyGuhtAWThHWQKhSKNgr/V3Fk+abWsVHOnV/Pshy10RHRqyvuuJ0m5Qpy7\nd4nMapUQg2xVA+lB08h0y1e2beiebCxjkOXAve8xJfg8Y00Yprt/5/Fwkzm3W7FO+QkgTy/dkKeY\nqmxFRim8ivlOhwDoe/w6Vs1o0sA0JZqnX81Kj5B6b0t2UdUd06kMF7aoApBSPgk8mbFskefxZ4d9\nUJm4/f76E1W1YCYg0evcoQzf2BQKxZAY9ZYq5677oBnjAGjvTfS7bsS+a49mKb3QRxRsR59kBk2D\nNcl5hVQ2UWVKeOKtzSz5y0qSg0x3d4uUmrLPNh1hFg74MKUc0P2ZL/q4+vIU65RZusF5mK/PnClY\nCs6IIPs1VKW5ijOP9w+2dOPXBMfNmsj7W7qzvr8rmqSqwC1VRcOWt0ALQO2+2V8vs2O4lAtQoSh4\nRr2oenltGxVhP4fNHA9AW8QSVX3dZOl3784yR6R4K09rg4jVydZE2NnmWxs6ueDhN+no7RuMrhsm\nSx9fyZNvb6Z7kMHqppTcvHwNn7rqaeJ6+gTpiKqgz/qp9REQVZAt/ig/sU5ppRscsZPzPdnbz/hf\naEj6qVOFdYw7xqnMsgofbu1m99oydqspY2uWDD/DlHTHdSpLRr2he3jY/BbUzgJ/MPvrZTXWf5UB\nqFAUPKNeVL2ytpWqRAt3/Pp6wLJUSSmpr6+noaHBXS9hGCQMiU9YsUyOGMnu/tt+U+WsrUwkRBM6\np/12Bf9a1cK7m/umrL/8UZv7eJUntmUgogmDe15YhyGhI5IuxEzTEoQBV1QN/51uZsB03rwXGZ4R\nRwjnOig+mxXSu7wQyOxP6EUIQSxpML4sBNAnweH9Ld3sM7mS2ooQvQmDVz9u58s3PscHttWqO2Yd\nY8Xg/isKtrzdv+sPUqIq2lZYB5lCoejDqBZV27pirGnupSq+jXtuvQmA1p4E9fX1NDU10dHR4U4+\njuuvtsKaaNxgdXtbItNSNdjefxkTb6vH/djc3Td7e/W2lLtl1dbsrpdM3ljf4T7OFFWGlPg0gU+z\nZM1IWKoyA6bzHUDed3l+KcSK+X2ry3vc0FISS5pMrrKO9fZIIu21zZ0xpo8rYaJ9Ltz977W8s7GL\n79/9CgBdUUuEFXqgelHQvQV6t/UfpA7K/adQFBE5EVXba1o6Ujz7oVWk7+pzT+HM038AwOU/+wVN\nTU3U1dXR2NjoTohd9t13SlSZKfeflKxr6UVKSU9cRzC43n/pcVjW5O6tP9XS01dUbemO4RMQ9muD\ntlR1eoRUR4bL0DAlPqdwqSZGJqYK2cfSl68Acq+bNl+xTo5464klufCRN9nYEUlbXgi4oj6LrcrJ\nMJ0xvhSAbR5x35swMExJVUnAPRf+tnIrAJs7Y0QSunuuFEFJhcJn81vW//7KKYBVUgGU+0+hKAJ2\nWlQNsmnpiLD8g2YmVoTYb5dKljU2YsYjaEFrIvEKKsCt21NTbk0k8aThZv899OoGjr12OQf97O/M\nXvwUTU+vGkRMFenWGdu61eURPVu7+oqqrZ1xaipCTB9faleA3/5F1CukOiPpgfhSSreeoE+IQWcU\n5pR+AqZz7pajb0KBtTw/7r9r//YBD63YwCMrNqQtLwT6a6UDuHF3021R5bWYOqK/qiTAxAqrw0BC\nN6kutaxSmzqi7jGsLFU54JMXQPgGFlWBMATKLVFVSAeZQlGsGEmIdoCZ+zZdubBUbbdpaT6547mP\neOCVT7K+9uaGDubvPgGA+vp6zGQcESxxn3sn9R672rpzdx5NGkgJkYTOrc+uAeDTsyYyfXwJdz2/\njo+aewcclzQl/1nb3scy5HXPtWS4/0xTsrUrRm15iHFlQdp6E32uod2xZJplCtKtX+2e10zTkhM+\nzWOpGhlNlV6nKl/7kZkWMWd5jvdj7+uxNzcDKXFcSNNd5li830vMrlc2bVwpQqRbqpxjy2upAvj6\ngVaf4Q3tUddSpbL/csD7T8Jun4Jw5cDrlakCoApFzjCSkMzeLWJnyYWoGmrT0pzy8yfe45I/vN1n\nuZSS5u44kypD1NfXc8NNN1NZGuIrJ36Turo6mpqa0oSVY6mqtS1VMftufm1zLz1xg9/810Fc/815\n3HfKfADe3tg5oCvtuVUt/PT3r3Hbcx8BqT50nR7XSWtGeQdTSlp64tRWhBhXGqCjN5lWIiGuG0QS\nBjHdSNt3RzS1nQ6PpSozc9GnCUy7SfRwkS1gOp9iZzgaN0sp6YwmXTH7wdbugmuYPVDvSaeyflnQ\nx4SyEM3dqQw/r6Wq2iOaTj7M6vayqSPmrqMsVTvJ8qug5QOY9aXtr+s0VS4o6a5QFCmmbl0cNV/O\nNz1sger5aEjqtdBk9ijrievEdZOJFSGqq6v50U/PZurkiXTHda677nrq6uqorq52LShOYPoEW1RF\nEyamHbQLMG1cCVJKpo8vJeQXfLStJ03wJA0zrRq6s72n3t3iLpMyNeYZ40tp642niSPdNF1RVV0a\npCOaSHPXeSuse2tYdUSSVIR9fb4TM2NidWKrhtNalTV70nXL5WFfGbFb3jHkEue4mFpdwqqt3Xb1\n/cLBW/VdN8w04eTcMIQDPmorQryyto3Nndb54xVM3oKgM2vK8GmCT9oi/PM9q5uLiqnaCdb+C5b/\nEmZ9GQ787vbXL6uFaLty/ykUucDUQcvP9SsXWx1U01Ip5W3AbQCHHHJITq4M3ky5V9a28dUDUwYy\nJ06kpjzE6Q0NbOuKcfpvVxCJG0j6xlQ5LpFxZdbd9wMPPcyTPR8z6/gfAdbkWV9fT3V1NXtOPI41\nLT1p4qQ9YrnqqkoChAM+t9aV2zvN3pUTjzJjQikvrG7FkBLNfrEzqhPXJbtUhYnrkrgu6Y3rhAKW\nYDIMacdmWQIr5E9tszocRDfiaTFbmZYqJ7bKMKXrEsw3EssNedMzq5kxvpSTDp6+3ffs8L4y3X/u\nGHIfu+WI+OP2nchvX/yYrd0xdg2W5XQ/O4Mz9yYMgy/d8Dwb2iL868JPE/Bp7rEeDmhUhPy8srmL\nE3/zAi9ccpx7/DiuvcfP+hTjyoL4fRqTK8M88MondEaTnHHMHlSokgo7zr9vgIpd4MQ7IFCy/fXL\nJsD6l5X7T6HIBaYOvvxcv3Jhqdpu09J8sWprKjvumQ/Se6G29FhuMCfwHKA05Lfacsi+afCOCAr5\nLAHTE4lx88238Mhfnybs11h6+UVuGYa9J5XzUXNvmqXK2ywZrDYeYAX5RhOG29y3K5akJKAxsTxE\nZzTprg+wxbZ+TKkuoabcKgTodREaUuLXNHyaZX1w6IrqlJf4qQgH0ixVzphcUeVaqobX/bemuYeb\nnlnDRY++zV3/Xutx/+Ve7PTEdL7+m3/zn3Vt+XMzSssNBrhFZZu7Y4VlqrJ5cU0rH7dGMCSsb4+C\nSFlRw36f6zre1BnDMGXK/WcHps+eWsXUamvSn1lTRmc0iU8TnH3cniPwaUYJUsKm12DPzwxOUIFl\nqYq0WYXnFArFjiOlFaCeJ0vVTouq/pqW7ux2B9if+3jVth7CAY1vHDyNp9/bllZN3ClXUFsRQkrL\nVlES9Nmiqu/s1xO3JpPSkCWqTvj6Nzjjxz/mvXWb6dr6MTd4yjDsMbGclp6EG9zuddE51zxne5Aq\n1wDQE9UpC/mtcZFeVmGzbf3YxSOqmj2vOxYmvybcelNSWkKtMhygPOSnK5Yq5JiyVFnPfSMhqkh9\nhvKQn5ueWZ32Ws72Y3+mNzZ08NonHZx0y4vu75LzzythY0eU8WVB9pxoNZJs7k4UVkkFeyje42tt\ns3UT4txAhIM+rv7GXOZOqwKsSuqd0SSagPJg3wvOgr2s1P6KsJ/SLK8rBknPNis+atIAGX+ZlNUC\nJsQ6truqQqEYACfjr1BFFVhNS6WUe0sp95BS/iIX28zGH1/fwD5X/B8Nj1ma7ZO2CDPGl7Jw/8l0\nx3Ve/bjdXdfr/nM0T2nQRzSePYUymrDUkOP2iOuSX/7yl/gqajC6rBgwx2U4pdJKNd9mx6l4RZXh\nBL7HUvvpjiWtQGZT0hXXKQ/5mWhvw5vOvtEWVVOqwkyw09nbelJtdUy7kKe33pSUVvxYRYmfypJA\nmoAz7VpZwnX/jUxMVUu39Rm+ftBU2iNJuu3ikbnUOs62Pticcgm/vbHTrQ+WSyRWlubU6hImu8dC\nvKDCXRyB1+axdH7UEkEgiHssVfOmV3PDyQcC8PonHXRGk33iqRw+PWsiAGVKUO04sU6rgjrApP0H\n/74SyyJKtG3g9RQKxcCYtuFB5D5IHYqkonp7b4KtXTGufepDErrJE29bqezrbVF1wHTrTvsDT/PX\nlp44moDxZUHXUlEW9BNJ6lknP8cl4mQ0RRM6l156Kb6yavQe60LmZAs6qeZbuixR5Wzfr6XqQHV7\nWn90xXSu/NWvWLS4ge5okvKQn0mV1jZuvP1edz1ne5Mrwyn3n21pcLbrE5alyolVMm1LVVU4QHnI\nR2+apcoSVP/3zma+cfML3PvCOoQYbkuVdEtHHLKbNTGsb899sUxnSx9s6Sbotw7r1z/pyEvzZimt\nemDjy4JUlQQI+TWau2MFZKdKfea2ngQlAR/TxoX5qKUHIUiLqQIraaIk4GNNcw8bO6JpWX9e9ppY\nzk+O3YNbvnvwsHyGUUe0Ha6cAf/7X9bzIYmqansbylKlUOwUMr+WqoK+5WzpibP0Lyt57M1N7rJZ\nkyv4qNmqbr6+LcLhu0+gtjxEdWmADz0xVtu64owvC1rxR54U8rguSRomJaSr1FjSIOgTlNhB4U8+\n9Xeeue12Zpx1Hz88/VQ69pI0NTUB8K2zLgdSMVCO4An4NaIJw6q87rEYdUWTdHR2cued9zDrxwcx\ne88Z3H1zE4SOpDWi2wHWgm1dcSrDfkqCfre0gxMb5ljAfJ5aU7opMU1Jb9ygqiRAT0wnkkxZyKSU\nbGiPUPfgG8R1kw+2dPPZfSe6cWPDgZTQ3BujusTPXrarbEN7lKnjSnJsqbI29t6WLj6zz0Te3NDB\nG+s7OGHeLjk3VUmgPZpkr0kVluWyKsy27kRhlVSw/7dHkowvCzJjfInbHNmNqbKPdSEE08eXcOfz\nawE4eNdxWbcphODChbPyO/DRzEfPWv/1mNWWpnT84N8btkVVrG+/UIVCMQTccgr5sSkVtKXqwy3d\naYKqIuTnqwdOJWGYbGiP0pswmDG+FCEEe0+s4ENPr7z3t3S58S7OXFduZyv1JtIbyIItqgJWEHjQ\nr+EPlXDq6WcghY+AT6OxsdEtw+BUmnYmKcfNFrB/JMOU9MQNQn7LhdIT17nqyis59dQf0tET5Z9/\nfZx7b7kBgAWfP951z23tjlFTHkITUBbyE/QJN1DdidVyYqqc/bbbgcbjSoOUhnxEE6kaVqaEJ9/e\nTNIwuf6bc+mO6zy3qsUVaMNFc3eCSZVht4L3J20DF07dESSgGybr2yLsMbGMedOreX19u+3+y3VF\ndUlXJEl1qWVNnFQZprmr0CxV1jHZ2ptgQnmQkoCVpCHoK6oApo8rdR9f/Y0B+tApdpw1/0w93v+r\nQ3tviS10VUyVQrFz5DFIHQpcVB25Zw33nnIYPzp6d372lf353x8dwTg7K+ntjdYdmzNR7z25nA+3\ndmOYkoRu8t7mbuZOs+7uHEtSmV2DIJIlriqWNAn5rUmmJOBj3sGHcs6FlwJQXuLHMCWNjY1cfsUi\nykI+Qn7BVjumyjSteCVH+JrSCk53xFd3TEcIwZIlDYhgKWYigkxECfs1WnviVryUaRUrdZrY+jRB\ndWmQtl7LdaabJsJe7pRDMExJq23JmlARpCzotyxlOOOQ/PO9bRw2czxfmTeVmvIQz3zQPKzuP7Bc\nmJMqw5SH/NSUB/mkzXb/5TimqrkngSlhanUpsyZXsr4tSsyujJ9LErpJT8JgfJklqiZXhdnaHRv2\n73UgJICwYqrGlwUpCfrcRI6Ybbkt8Yoq+zz63H6T2KO2fLiHO/rR4/DBX2Hm0XDEmXDY6UN7v3L/\nKRS5wTRA5E/6FLT7D+CYvWs5Zu9a97kzIb/+iRWUvusEazI4ao8afvfSJzz66gZmTCglYZjMsbOa\nnMmuwhZV3ngnh5huELZjccIBjWhSp9utfh4gYZiU+vyWuBHCrkRtxztJq2mxN7uuJ5ZkYmWY9e1R\nezuSRYsb0IKfwoz2WEos0cu/X32L+ufv5qprrqOlO87+u1RSX19PZVU11dWfpq3XGoMj3MBywwh7\nv07M1YSyIKVBH0lTktANSoJ+tnbG+Kill+/Mn4FPExy7Ty1/e3cLSX340rJNKWnuTrDfFKsNx6zJ\nle5vl9uYKum6Y6eOK3F71X3cGmHWlO20ABkiHa510NpHbXnIEreFo6mQ0ir82dabYK9J5fiFIJqw\njl3HUhXypy4sTgzfLlXhERnvqOfdP0JvM3zqNtjjM0N/v3L/KRS5QZqgBfO2+YK2VGXDmcieW9VC\nyK+xe41VcHHh7MnMnlpJ0z9XcfLtLwFw4AzLZC6xXKhOuQRvMLdDNGkS9FiqYklJt53BVx7yu9Yu\nxxhRUx5ye6YZprQsVbao0m33X015EE1YVaovuugi7v7t7xCaxhUXn0dd3dl0bt3ApvZempqaqLvg\nYjpjOu+veJ6mpia6OjuoLgm67j1HuDlomkCa0GK7ByeUhyi3RWOvbYn7wC6OOmeqJS6P3aeWrpjO\nO5u6hi3+xzQlHZGEW6n+2H1qWbWtl82dsZxbqrZ0WdmT08aVuK7ftS29Of+snXb24jjbUlVTESKm\nm0SyuJVHCiklmoDW3jgTbEtVTHfcfyZBv5aW4eccOzMmFE4B01HFe3+Bqhmw+6d37P3+IPhLIN6p\nqqorFDtFRuuNHFN8osqeyN7f0s3+u1Ti91kfQQjB3pMq2NgRRUqo/+zebtFC5649JTr6Tn6JpOlm\nQ5UEfcSSBp0xS7BUlvjd4HDH6jWuNGBXUU8vdSCwKp/3JqzSCeUhPz0xneqqKk7+71Ot7ZUGaWxs\nZNeJlQSrJ3JW3Tnc/ZBVL/WFvz3m1sMaV273/zMlummmTYKaEBhS0m6LqpqyEKX25+uJ65imZM02\nK3B/1mTLUnPIrlZg7AdbuoatrEIkYZA0pSuGj93Hsjq+8lFrjutUwZaOVOuY3SZYbVXWtvTm3IDk\n9FccZ8dUTbCPybbeZMEEq5sSogmDWNJkfFnIOqYTtvsvmbLKOpw8fwaLj9+P/z5i15EY7uhn0+sw\nY372ZozbwzSQva0QroJYF0izYI4zhaLokHLHzsNBUnSiypvufYAdM+XgTHKQmrwBsL9DJ6aqJ5Eq\n2tkTt7Lv4rqRFlMVS+puranKcMAT/G0FAI8vC9IZ1d0inI4VSQhBTLcms4qwn/Kwn+6YzqIrruCH\nP/6p+xmEEHzp6PnECPHzX12Ff9wUAPTOLW49rPGlQTqjCXTTRErcAHVnf2aG+88RjT0xqxHz6m09\nTK4MudWxJ1WGqAz7Wd3ck1ZbK584om+8banao7ackF/j47bcWpAkks1dMSaUWW2Cgn6NXceX2paq\nnO0GgM6I5ZJ1XIw1dhxcWyQxrDXABkJK6dY9mzG+lNKAD0NasXmxpJEWpA4Q8vv4wVEzCfiK7pJQ\n+HRthq6NMHXHSlFcufQyFl9+CTJUDrFOpDSpr6+noaEht+NUKEY77mSgRJVLtUc4HTgjU1SlBJcT\nRAzWhCvwBKrbd+ytvXF64zpJQ1oxVQEnpspHLGnS5VqqAm7GnGlawqm6NEB3THfv/oVIVS93+qdV\nhANUhAJ0xXR8mqAnbhcYtbMQd7VdLWdfuhR/9S4A6O2b3XpY48uCJIxU6xBvvz5Ns1xrbZEEJQGN\nkqDPFVXdcR1TwprmXvaeVOG+RwjB3pMrWLOtd9iCqr3Zic4Ypo0rYVN7brPlpIStXTF2qU61/dhj\nYjkf5cH9126LKucYqymzRZWddFAImBLWtVpZlntMLKMkaImoWNISVc5zxTDwyYvW/x0QVdI0iHS2\ncevtd/DJ1g5ktIPzzjvPbZlVKMebQlEUOOdLHi1VBR+onknQ47b4jF3h2cEruKo9AkvaRTDL7Ymk\nN55EN0z3+zVMScKT/RcO+Kw2NFHD3a7XUnXdtdfwRk85BPaitTdBOKBx4QUXML66inMuvNRtX1Me\ncixVSV5Y0+Jm8jmWo5k1VpD9w08u58Dj/5v2oI8fn/rfNDUtA2DuSXUAbO2MU1sZcmO2wHL/Sazs\nrspwAE2k3JvdtqVqc2eUI2aOd+tgAew7uYJHX9uIYZpA/ifWlABJ/R7Tx5eyuTPapzdsa0+cgF+j\ncgca9ZrSKjI605O5tufEcp5+bysJI3eB+VKmRK7r/rODvNsjyYKJVZdSsq61FyFgtwllrAhaBWyj\nCYOEYRJUFqnhQUr49zKongFT5g357cJIsGRJAzEZ4L21dxNp38pNtz7jhghk9jBVKBQD4Ew6ecz+\nK9or6y5VYSoyJl9nkgv4UgIDUmFprvsvZqRNtLppEtM9MVUBH/GkQXc8iU9AeciHxJqoDNOkq7OT\n5//xf4DVSHfx4gZutO8cBdBt9/0rD/upLgnw8to2vnfnK9z49Oq0ce4x0bIiferLJ1E9Y19mTCil\n8brrUvWw7BYoWzotN45fE7yzsZPnVjW7VquOSJKqUqutSFnYEkm33HkvXdEkvXGDydXhNFfBPpMr\niCQMNtrxR/m+03Xcf17BO31cKRs7YmnZfwndRDdlWp2toWCaVo9Bp1I9WBXADWkVG93e57SKqOpp\nDa4NU/Z5bpiS1t445UGf60JzRFVbT6IgYoidXpcft0SYNq6EcMBHqX1DEdcNErpMuzlR5JFVf4fN\nb8IxF1nB5kNFjyE0P1dd10RXHKrCVtzmDgmqQjg4FYoRRbn/srLi8s/yj/OO6bPccf9VlwbTLjiW\npQZ3YumJ6yR16X6tTm0rN6Yq6CNix1RVhP1unIlhWo7EX/3yF/y/4z4FwDELj+eOO26jru5sGhsb\n8fk0NxarIuRn3vSUi3KrnS3otMKZUBZkXGmAifvO5/0tXRw0vRq/XWi0oaGBmbZ7cG1rLwL4y1ub\n+PKNz/O9O1/hHyu3Im3rjOOGKrM/39+ffpbzF1stGJ/+y6Ouq2Dx4sX89SGrLc5HzT1IKXMam5E0\nTDojybTG1h2RdKsOWNl53TE9rU+h9z0xPXt/xoGI6jqdUZ1JFamSAE4G4LqWXnriOtu6Y/1m6HVG\nk/TEddp7E66oa48kaOtNuPFnrb1xWnsTtPYkqPGIt5DfR3nIR3s0URC1qpwhrG3pZfca6ztwGiBH\nkwZJw1SxU8PFCzdAxS5wwLd27P2mgdT81J93Hl1xSXlAIESqZdag0ePQvQX0xPbXVShGK8Pg/ivK\nK2tNecidJLw41pDxpel3hJalSuDTNEoCGpGkTsKwRJTAKoEQ0w23GGI4oBFLWJaq8nAgVWxTSitV\nXRNceNaPAfCFK8E03TtHvybotmOxKsIBjtyzps84q8LW2AM+jSN2n8Bzq1vRTTh05gQg1QD5/lsa\nEdLgw63dbOuOc8kf3qba7KJSRLnggZe5bPHP+KQtwl4Ty5FSsuzqXwGw4LjP88gTVvXmJx66j7q6\nOq6//no6Ozv539usVjsfNfdQX1+fs9gMKSXtkQQx3aAzksqCa7cz5apK0t1/ABvbY+6ypCEJ+DSE\nsB4PlW1dlmCdVJkSVU4Ry49bI0QSVhHQ3iyFXyMJ63goCVoWyWjScK1SYIlwbxHR1p4EtWWhtG3U\nVoRo6YlTCJh2Mdm1Lb3ud1Biny8xW1Qp998wkIxBoBSO+An4hu7SBpBGkksuvZymphvYY/95VIQF\nZ//0xzQ1NQ1NWMW6rP96bOD1FIpRjbJUDYlxZY6lKv0CJm3/nxCWay8SN6wyCD6rDIJumFZJhWDK\n/RdLmnTHdSrDfvx2qXTdkK4r8ZamawAQpRVgGu4FzqcJehKp+lZ7TSxzLUgOTj0sTRMs8BQ2PXRm\nquealJKernbi2z7mH6+8wwMvf0w0nuSdW8/hgNjbRAny2DqJIWHWpArq6+u545abAPj05xbir7AE\nmkAUMoYAACAASURBVN7TSmNjI5pmWcB+8sP/xoz3csXVv7bqY3liMzIv0EMRWnHdilGLJa2K7o4w\nao8kqSzxpwXZT7YLTG7ujLoWKt008WmCgKah2y63uC3QYsnslisprezNaMJgm13dfqLHglQW8jOl\nMsRHLVZpicpwANPO9PTSGzcI+KxYrqBPI5Iw3H0GfBpx3XouhPXbt/TG3Iw/h6nVJWm1t3TDpLk7\nTlcsuUPuzJ3BlLCtO05MN9m91rJ2lnoC1ZOGScCvYnHyTiAM//WQVUF9R7C7KFRUj6Ouro5jv3AC\nArhyyWVuiMCgXIBSWv3OIPVfoRiLKEvV0KgusS1VZZmWKul+hyUhn5v9pwkr4DtpSEzwWKqsyuS9\ncZ3KkpSlKqFb9WEuufgi7vyN1bvvgsuWcMYZP3LvHH0CN1C9LORDCME7S77Ag6cfDsC+UyrS6k19\nbr9JnH3cnjz64yOY4LF+CCFY1tjI7hPCtPvGcf8r6+l++x+c+YOTue/6Bo7Zu5ayWZYL8gdf/zxN\nTU2cecYPAfjrP57GV1mDlCbY6ddSSnRTcumSX6F3bsU/3so2dARVQ0MDZ9efR9R2jw3VNRjXTW77\n10ccc81y/vz6RjcWqT2SYFxJ+u/hWJNauuN0RJJ2WQsrFi7g19BNiW6YdNiuxM5okmiir7Dqiur0\nxnU6o4k0S5VXDM6sLWdday/hgI9wQHOLX/bGk65gM6V06zaVhnwYpklP3MrYLA36ME1JXDcJ+TQC\nPs2yVJWH0vYzfXwpm9qjrvsvam83mjBca91wIZF80mp1HnAtVQEnpsokYVsFFcPEjl7AbQF02WVX\nWOdp2CriKxI9bojA4LZjnztCA0O5/xRjmGEIVC+67L+BKAlaE2d1hvsPaVkYrLgqv9tQWbNbyzgx\nPF5RBdDSk2Ca3WjWEl9Wm49xVVWc9ZMf8c+gj/ZInCuvvJIAhnvn6LTBKQ/7rbpWQnD47hN47YrP\n9okXKg/5+a/5VsHFUEbwsBCChlOO55Q7nseMddP+zN00PtGJEIJL/t8snv2wGYBky8cAXH/tdTx+\n2RO8+c4H7Hvwp4iXh1j4gx9ww01WNuHiX1zF4sUNJNv9hKbOAs1HfX09119/PS0d3dxy570kCNB0\n9S+4+MLzXUuWaZpono7e3mxChw+3dnPn82upCPm59m8f8KUDplAa9NHWk6CyJEAkobsu21q7ZlWz\n3bewN64TTeg8smI9pSE/X9h/Ml22MJ1QHqIzmqQ7lrREkXAErkFMN2i67mraOnvY98s/tLcdpL6+\nnupqK5Ztc89ENodnupmfDYsu561332P//WdbWVW6ZaG75KLzGWe/Z2tHL4uXLEUg+Li1l+uu/AUr\n332HA/ffh4af/ZJo0qS2MrWfhoYGZowvpTOm0xlNUhayejAGfRqhgEZX1Mo2dQrVOt9f5veY7Xvd\nEaSEdXbT6j1sS5VTQsGUVvzgWBFVQoiFQBNWqusdUsorM14PAb8FDgZagW9JKdcN9zizIu1rhea3\njouw3W4p2jm048TZjj9ouSRNAzRVUkMxFsm/+29UiSqAX35tDvvtkt7rzWpTI6yq6kGf21BZE1YT\nZOe5k8rvWC2au+Nuv0CfJlyX1pKGReiGyXHXPUtXRKcs6KexsZElS5ZQX19PxbGn4tcg6NO45JJL\nmVhdTkNDAz6hkVkeKOTXqAj7rfiujAullJI/3nYNW373CEa0EzPWTX19PY2NjewzqYLPxp7nd4/+\nxQ0+Pe/88wiGj2GvOfOYvNs+RJIGP6tbgoZJZWUpF1+2iNtvvpHPnHk1qwPjOeOsepqarkUCFy36\nBWt9U/nfV17njnETMSPt1NXVUVVVxbnnnktjYyOxpElCN7ji0osYX13l3inrhskTb21CABcs3IdF\nf36XF9a08vWDprGtJ8beEyvojRuuqAr6NapLA7T0xAj6NBKGya/++j5PvbsVsEorfO+I3Qj5NTQB\n11/9K5o7Iyy7+hckTUlSN7ni8ssoq6wm0dXG7bfeyZH+PSkrncHPrriYG5qaOPvsswFY8exLTFh4\nFps6Y1y39FJuvuFGDjj0SH73t1fY6PsNjReezrJfLeE3nvfccuOv6RalrJ9yDB+3RYm3TmRiZwe/\nbmokKkIQ+hTPPP5H/miLzsWLF7OyOwzBA/i4tZcJZUGuWLSY2qoyNEy2dka59sqfU+HTXAvgG2+8\nwbx589Jcr16RNpDg8j52qu1b8WjWsqRhsra5l/Kgj1rbTem4/yKJsRNTJYTwATcBnwM2AP8RQjwm\npVzpWe1UoF1KuacQ4tvAVcAORpXnGMfC5AigkH1di3cNcTu2y89niyo5POVUFIqCQ7n/hs7XD5rm\ntmWBVFyQwHL3lQb9bgaYTxP4NOE+L7MDyL2FEZ2yDY4LUAhLoPk0jZqKEO3RpOvO6+jooKmpiX89\n/wIV4QCLFzdw6y23uIHgummm9e+zticoDabHHDnjrq+v5zdNjZzy3W/Rsm0rdXV1NDU1cc4553DO\nOedw9+03c8qJX8Q0Terq6rihqYmAGWPa7rPY2hNjcmWY8rCfxUuWcv6ll1NVVcmZP/4RZ5x8AgDf\nP/si6urqqKgaz90vfMy7gX2oPurbVBz0JYQ/6Aa3NzU1cWb9+XRGE1xw6RXcdPs9tNufyapGb/K3\nlVuZP3M8n96nltKgxnOrWkjoppUpVx7Ca4STUlJTFqSlO0FF2I9umDz9/jZm+bZSY7Ty8IoN6IZJ\nadDHOeecwz+e+it33H4r51x4GZG4zqWXL+K2O++mt6uDxuuv58wfncqHm9poW/ceN9hCZ9myZSxb\ntoyvfXo+APvM/4xleTv7LE5efCu1J5zPO4FZfOYXf+G2e3+f9p6fnn0Oz/RO5aNPNtH1nz8RmjqL\n7yy5g7q6On736OMA/PWP/5uWAPDYA3cDVsPv+gsu4s47bqelo4ttHT3ccetvuODiy0joBnX159J0\n46/p6Oyi6cabOKv+PGIJ3V5+E20dnSxatDgtCNk0Tc6pr2fR4gYWLW6grv5c4kmD3rhOc3eMs86/\nlIsafkFnNEEsacV/vfhRK/P3mOAKLccKG0042X9jIqbqMGC1lPIjKWUCeBD4SsY6XwHutR8/Ahwn\nCqX4k6lbbgpnODssqlIWL4A+BeIUijGDKv6503hjrYUQlIb8RJLpMVU98VQJBCCthUdF2FmmpQVM\na5pgQnmID7Z0u9tubGwE4P7VawlO9nPH727jR2ecQWPjNQi7V59/kBYCIQTV1dXU1Z3Nkl/+nLhh\nutuvrq7GlPDD037ENVf9PG3fL/qsfoAtXXEW7FlLScDnBnOff/551JaHeGuD1el+zbZeGhsbWdca\n4XPXL2eKsZU16z6h8uDj6Xnzr5x77rlcf/31SOCme+/n4Y0VlMw8is+cMY2lP/8RS5YsoaOjg9Mu\nXMKmjhjfOmQ6Vy5dhBHbnX/Gumn5wt7EkiYTyoNcetnlTKq2XFEdHR1MmX0yrb1xfJrgzJ//mmRg\nfybF1/Gvx59i4omLeGVdG8/eey033HADZ599NofOP5yb776H3z/zBmYyzg9PPYVl116Jpmksa2zk\n0TPvI7buDSC9hs9tV13BPpc9TmiXWURXv8z/nLuYr9/8Al+dtws3n/9fTDxxETVfPo/rrqt337P3\n8WcQ+PuHbHnwMuIfv8l3Tv0xd7+wjn8tvZK7njsJgGTLJ+5+GhsbSeDnCWny48uvonvFYxx5xq94\nKjANgMPO3I+77vsld9x5JwCnn3UeixYt4pKlV3LHnx7n1t/vhYx2cfpZ53JJw2IaGpZw5533ESPI\n0qVLuWLRYu688z5OOcXqHXnXXb8lTpDzL72MS6+8kSf/9hJ77j6TbRc3cOXSK1i1tYfNnTEmtbxG\nQ8MTNDQ0uDXYokmD5Nhx/00F1nuebwDm97eOlFIXQnQCE4AW70pCiNOB0wFmzJiRr/GmI82UEIKU\n+y82RFElbXef8KW2q1CMRf4/e+cd31Z19//3uVredjaBAAmz7JVCaaEtBQp9Hlpo6Y9RKCskgRBw\nnAVJSCxnb8eBBMgopRTooi2ztNAHCrSFEjaUVUYhELI9Zc17fn+ce6UrWbLlLdvnzctIlqWrI0W6\n93O/4/Pt5rl/MBBElXUphB2pcsWLnm0LBLvGqsjnRpBc22SLKp/bRWl+8r/HkCJvUhu9fYB94OKF\nyHAzSMmiRYvi6Z3U+X1tYaeBmiMxglETUyYEQ3M4Rl1zOO6tZT/35Ade4c9vfUnUhJGl+QghKMnz\nEAjH4jVJY4YWIoAPrIHLf3lb3f+1393O9y+4kH/5Cjln/BxqVk8DoGrRMu770E3+mBOR4WY+KTiY\nd76oZ29tLWtravjQ2B+8X+Ffj93P/TXVHHbxLTSW7M/NC1aCbyzPP/k4f9q4kcnXXgnA2rVrOX3a\nkYQGH0RFRQXPf+hh8JEhfraykjJZz+9qv+T6lW+z/f5Ed2J9c4QHmw7HO1Qd0BoOHhI3ZJ1cMQN3\n8RlEdqvjp50iBZgxcyaRutGqhgyYfNvvKS4cRcMLvyVWt53av9/PkLMmcsmMpfxm1Sz2NoVZ+5e3\naf74DaK7P8PIL2HvSw8T9H2Vq6vuwDvyUGKBesxQY/x5hBCsq17Bb38wk+Ljz6XoqDPY6snnslMO\noCTPw6bnDUZesZrgJ68T+vIDCk+9lPPX/4NdeV9jn0tUA0O0bgfmicfw6y1bOfK8cXzbfQC/fetd\nHrxyAa78QZxy3TLGnPF1BhV4+dizPw993sgTK55GeI9n2A+Opw542oxxwcIHKBhxEIaM8cidixk9\n/koqKyupravDyD87bh/x0osv4H/7D3p+XJZIKTcAGwDGjh3bM+2cZjTZisHXQVFlmkpQ2cW5WlRp\nBirS7NYidRgIokraM/kEQggKve6kTjJDQFMwUVgupYynSgBK8hNvUeoQ2mFFPgJhlYYp9LnjKTsj\n7wDMUACkZM6cOayrXhH3PEpN87WFECIeVYjEzPgaIqaJIURS5EsIwdAiH1Frn7lvmS++bufa870u\nRg3K54PtDQTCMZ5+bwcFhLjq/DOpqprKxXe9gCv/q9xQXkFxSTE33rqYwiO+ScNrT9Dw8iMceO1t\nLH3iPX6+eBkmBr/6cAd5Y0bxwB0rKC+/iVN/MpGbf/8Wj2/5gLJvjOXpP/2R8eMnsKZ6WXydP3/x\nWUpP3Yea29Zx2PRfcfqR+xOMShYsWc49546n7NSLMApK4+Lo8rm34R36FXY+vAJXYRl/Zzybn/uI\n9x9ax6bfPsHIn57Bbzbfxp827kdNTU38333dXZs5bfJqduYfynlTZvGkHIzvPy/w89+t5frrJjKr\ncgHnLHmEfzQMpnzKFN70HE7EtT9n7idZvXUrlZV+Nm+6jQMvX8yrkTz2PfJkjjj4QIZOuoGamtUA\nrF69mqlTp1L7wh/xjDgYd9EgTg6+SuV55xIzJW//+T4eeXMH+QePJW/0cTzyyid85+hRfPbaczz7\n5OO4iofiHTaav79VxJP/Vo0Rbu8YCg4bhBlqIhaogyEHsen5T9TnyH0QRsEn1G/5I8Gt7/DUnx6h\nIRhl+c//yAehYvhsG7v/dBs3jr8yvra1NTUcNPM7hKImdQ1NfPTaKxx/dH6XFcfnKJ8D+zt+H2Xd\nlu4+W4UQbqAUVbDe+5gx8CRmWWJ1/7U7/Sdj8MhN8MXr8KO7YN8Tum6NGk2fwjZF6j76fQ4g9ZSy\nwOciGDUZVODF7/czdepUGu2xMl4XlZWV/Pr+X8bvX+LLbNo31BpPsrMhFBdUNTU1FI8cw0Xnnc2E\nCRO4664NVFRUEDWV0jE6cACzo1vOkSnRWPpU4rCihPml3bmYbntH7VvC29vq+aK2mZc+3sOPTjmU\n1cuXUJLv5dyjR/LG1jrGVcxhV30TT3wUwhDw3N2LGffTS9n57P289lktL328l1mVC8k74DhCn76J\njIaprq7mmFGqiy5v9HFqrY17qKqqUs0CVkQtuvcLhOEif8wJhEQeJx04iIZghBmz5hLa+rZ6/IHH\nMWXKFMqnTOHl2jxKzXo+/OefuPQbhxP87C1u/9PrFJeUcu4lKi125MgSqqurKS8vZ9CgQQwaNIjr\nrruOpdf9kHBM8pTvGxguN4PqPmDSdRO4rXolHrdB1cVfxzNkf14qPIWP3QcyOvYFq/0zGV6cx7pV\nS5gw7mpKd/8b79ADqDeK+crIYhYvWZJUyF+z9jauuexitqy8gu/yGr/bVM0N02Zx04zZ/HLDbVx8\ndAkvL7+cc8PP89H6cbywbip/Xl/Jld85jv88cTc/Gm3y3roJfDf4N/456wzODjzNF5uuZ/v9t7Dr\nj0s45ssn+Nfs7/C36d/i3OAzbP/VHBq2PEx423vcd/tyThkzmPvnXsW2u29i288mE972HitWror7\nk5WXlxMONbO6uoZQJMqJxx87EGbHvQQcKoQYI4TwApcAD6fc52HgSuv6j4H/k7kwpThug+A4kfMU\nAAaEGtq3rYYv4b0/QcMX8OkLiW1rNAMNU0eqOk3q7tGeCdgcjsYLy7/nOhA8h7Fy6UI2bdzIuPET\nIE+li4rzM4sqW8DsaAgxemghZWVlXF8+jceFlwOHFHD1woUAlJUVIqU6eLU3UgWJNGXU6j6UUvk4\n5ae2EpJsfjnaGnOTbnvHjirjibe385stW4lJOOvIEeR5VAfiuUfvwx1/+5BH3tgGxcMp/MoRXHrK\nAYwaVMj61UuJTp3N08T4w6ufs2nTRlyFJ9L88atgRqmoqGDR0uV4ZBj2OxKAWMNu5lX6uWP1EoQQ\nVFRUENmjAgZFx50LwEkHlLGg8lY2rlvHxBsm8/c8N/v/7+WsXTmB48+5CO/x32Xy/xxBvsfN8sUL\n+PLWGl438jn3ihuRb27jw9e+iBdfO8VCXSBMMGoy5cxDWf/Mh1xx6kFcM2cTw4rUKCOfy8Xphw7j\nwhP348FX4McnjWLW987ElMoY1kCwbOkSmkJRbn7wDRpDMS756gGASPL4mnxTOXOq/AghWLHITyHN\nFJUWIZDcOP4qVq9eRkMwRlVVFQaSf7/+MjdOvCZeb1ezYhEGJkUlJVRV+tm4YQMTJkxg6eJFzJ49\nizvvXB+Pvm3atJGJ48ezctkSZt08nbU1K/HIMDFUdyFmDOFyUzF9JrdVr4wL2QdvvA/h8iBcbr57\n5pn9XVDZNVKTgT+j2t1+JqV8WwgxH9gipXwY2AzcK4T4D7AHJbx6H7tjz2l9YBjgLYJQXfbbkRJ2\nvJP4fed7Ov2nGbhIE1zdK3v6v6iyYlX28cMel1LbHImnln7+4hbKTjuMjetvZ8KkySxbvIC/zH8S\nSE7/pTKiVAmYT/cEOHnMYG6cPov/7g7w+B3/YPSQAoQQLFy4kCFFvniHYUcPY27L2RvUWB0JaYuN\nh1st9ALl8ZSJEw9Q7u0bn/uIUYPyOeGAhDvzwcOLOO2QoTz48lYOGfl18r6o5/KvHUihz4VhGKxe\nMp+Fj73Db176jJh5KF4Z4+2//obV8+dQU1PDM88+R+2o71B45Lc5et8S/ueqy9mweRM+GUSgaqqu\nL5/G40DBoacQa6rl7rVLGV5axE03XMeiZUuY9ts3+OeHghvLp/BqwUmQ5+Gco0fgswqua2Zfz/dq\nnudPb23j39vqOWR4kWoGMJMjeLamvvIbo7nhjENoCEUIRRO+W26XIBiF5Rcey5SzDmPUoHz2BiI4\nDcdtu4INPx1LOGZaI28SaTO/309DMEIgHGN4sS9u3JpqgVBaYFCKh/XVy1v4VOV53KyvXg5AVVUV\nN064mmrrfrdXr7R80JRIvnH8VfG/ramuBil58cUXefHFF+M1aJMqZnLHhg24icVTgNI8DuH2IFwe\n/vrUX5hxzuEDQVg9Djyects8x/Ug8P96el1tEjcpdIgqYYCvEEKN6ozbyOKM24zBznfV9X1PUNdz\nIBCn0fQKMgaiA4PN20H/F1V2B6UlZ+zBvnsDEQ4cos7gf3HWBMxIEGSMBQvmA4KDhhXy0c6meGQr\nHQcNK6KswMOz7+/k4de/4Nn3d3LcKFX3MHpokRJyMmUdHTyGeVyCYEQNdbYjVumK3g8colJ+0885\nvNWo2DGjSjh2VClvbK1j/OkHke9JvM58j4trTxvDNfe8xMuf1jLxmwexX1lBXKwUeN2MO20MD235\nmFB+Mdd+8yBGlOTFReprr73GQYcM5RXgf48dyU8nLCGGi7LSfAwB5eXlzFmwkMcXqfmE++WFGVRQ\nSlXV3LhFw9lHjuCJt77kkO9P4pGn3mfiV/fH53bhdRlI1Cy7bx42lF++8CkA/+8k1WkXkzLpQ21P\niJGm6tiUkiRbC7cV3YpKGZ9JGI2Z5DmigPb7LEmItNTJM6aZqNsDksRKqnBJdx/n76k+VXakKZ1P\nlRCCNWvWUFVVxde+9rX4/ZYtWYREUFqar9KTNTUcMeO3fO/in/Dkv3fwwt+fp6Lio4GQAuybpHpU\nASDAWwiRJloWNmRAmipSVbo/HPgN+NdG7aquGZhIaXX/da9HW78XVTa2vhhkzQXc2xSO10EJbwEy\n3IyUknlz57J40ULuv/Zr/OqlT+ORn3S4DMHJowfz8OtfxG97fWsdAth/UD4CEY+UJboQO3YAs+cP\nRmImEWsmWLqaqlGDCnh6+rco9LX0vnLidbuovvh4Xv7vXs44fBheR8ejyxCcOHoQ91xzMrsawnzj\nkCEU+hwiw2UwoiSPh6d8h492NnLigYPxWtEc+yBd3xzmlU9rOWrfEtwug6oqP0OKfHgsA8wdDSFO\nO2Qoz/9nF/4rz+X0Q4fF3x+3IThlzGAOGlrI6iffJ89j8OMT97fMWhN+YdecNiZuGPr949TYnViK\n2rF/t8fHxEyZ9L7Y76t9v5gVBXQKVvv+UVPGAwipz6MMOLtOnGQSXJn+lirEXIaBv6qK4cV5zJ9f\nRXl5Oa8PH0qD1ZRx2tdPpay4nc7cmp5DmrYpXuI2IVRdVTiQvYGnjEHtpzD0UBh8EMRC0LQDSkZ2\n29I1mpwk7YlK19OvRZWUMn7wswvE7UjVnqZwvLD81Omb8Azbjwsm38D6DRsQSNZVr+DSkw9IGs+S\niiEEZx85gr+9v5OfnHIA5x27L1N/8xqXn3JgXKTYESpTyk71HHhcKtYWjpmt+gy5DdUBaLuXZ96e\nwfBiH98+bFjLsT5AkdfNMfuVEolJivPcLQSc7T4/tMhHcZ6nRfSl0OfhuFFlFOe5MaW63UxJO1Sd\nfxQl1t9dKSLG4zK455qTefj1LzjriBGUFXqShI7bMDhseDH3jjsZ04TD9ykGWooduw4p5rj0GMkC\nUojEAGi7ocC5HiEEhhDEYjL+GlJrmVO32xs4BZJ6Xeo99/v9mKbJuTXPxedeXvjDC7jmtDG9tVRN\nW8hYmjNqAV6nqMoCMwaNO2Df46FwqLotsKdLl6rR9Al6YO4f9HNRtaMh4SFlRzgGWcOWa5sjlrlm\nOXu+cjS7GsOsXLGSKG5KShOO7K0JIUPANw8bxjvzz41v/28zzmBnQ0ilGx0Plp3s5LStFYKRGFKS\nFDlKvZ/tAt8WxXke5c2VJlphGCKt2Mr27y5DxN/rqNW1aOuQqCV8inxu8jxqwHU6EVOS7+GGMw6J\nR7acthAuQxCOmpx+6DCaQlEaQ1ElfByiyjSlI10n415hqa72HsNIilRBIoJl4zaUeastpmyPLKcH\nWUc6O7sLey2JdKj6/Nh2Il53v2/87duk61ISAjyF0Lgr+7qoWBgCu6BkXyhwiKoeMEHUaHIKmaaj\nthvo16LKxrnvKMlzYwiV/rNTJhfe8Q+rCFsVlqsDZRbbJVFrk+aPCJEolKcLDro+j0E4qARK6vDl\njtIT6Z9ELZD63Sl87PqwVKHjMhICKZKmhsxtCIKWyIlZUUC3kSKqpEzaVjSDV5jLJQiGE00AItN9\nIrH4v7naPrhEor4q9TX0Jvb6rcAbppR4XAa1AWUfMhBm//Vp0nYpWaIqml2kSkqJaNwJSPAWI73K\n9JdmLao0A5C4TUn37vv67Z7VmZ5xihm3y6Akz8PegCrWFELQEFRDkQUthU9r+x37b6lpLYlMPE46\nbuvQK0mQ71GF2h7rp6+QeCvs1Fnib3Z9mJEqYgwRT8XZl87XbIuGSEzGi9ANK5pkY4sdOy0XirZM\n7dl/l6iIWiwm09aiuSyhbUoZ/4zY/+72ZS4do+yXEHNE1jwuQbM1asnjzqHFalqSLv0n7PRfc5ui\nyu/3q/mRDdvU5goGs2TlGvXHwF5tq6AZeMiYNVpFi6oO4YxYOIWSEMpWYW9TogNmTyBMWYEHZ3dV\nvLC8FSmUSVQhlZBwPlLKzkeFhFAptcGF3dsS2tXYL9tZX2YjZXrvLpfVqSelJBJTItV5P1tgRU0z\nLnTsx8S3bf0r2gKiORyLR7RSn0ttSxIxzbQNAK6UKJnzdaTW7eUCymgVRw2Yes+aQqpQvS+J8gFH\nvEspTfrPWwCRAK11/0kp4x58996xEoC77v8j6zb9AlOCDOxu9fEaTb9Emt2e+oN+nP5LbXm3EQhK\nCzzssdIgpqkGEA8q9GLYzTYyEelq7ThppKS1bCSOlJfzto69lD6PSInspP7TpBNVbqfQiZkt0lXO\nAvOYlLgNEY/OmKaM2ydAQkDYKbBUcWt37YUipiU+Mq8HwOM2CMfM+Pbty44Yu3YnLiEwTfs9V6/d\njtZpUZXDtOa/4ilUHXy2OWganAPW3/jHevhuPnfd90fGjbsWUfAkollHqjQDENPs9s4/6MeRqhbR\nIwshoKzAw54mVcRe2xzBlDDYKrpWNgjZncelO4TGxVjKbaqoOfv19zdUfZnCtN4L++1ITf1BQqDE\nTNnC0NPGYxhEY6a1PZFUnG0/DyTXOqUTTEIIvC6DoGWumlqk7lwPJOrZ4pGq+HzJ9K+9t3DWBqaK\nRV1TlcO01qXktaYkhBpb3YQtrPYpMjCl5LM6k6oqPyJ/MDTv1QagmoGHjHV7PRUMEFGV2v6+1n3Z\nbwAAIABJREFUT0ken9cqX6rdjUpcDS70tjs9lxqBUc9l/y1ZWJmy9VRif0eQfIC3xSukP8DbQsju\nVksnhjxuQ3lHWSnEuKiKe1Op+xlWVAvIaDXh7CzMJLwSf7dFFdalJRJzTDUbIvmz6YxO6UhVLpMo\nPmiBLarCTa1vwfLgG5IvqAtCcxQqK/3IgsFWobqOVGkGENb4Lh2p6gSt1VTtW5ZPUyjG3kCEXY2q\ntmpQoWVDIBIlDfb9M2FHJpLreOzNiKRaImkXWg1Qkt3lZVJUJ52oMgxVIxW27BjSeUA5OyB9biMe\nTZIpYgdUJHJIoTdjii7PY1Cc506qrUulyOemOM8dfz3xdKaZW/VUNgKRcJSXqaIq99arsWht55OF\nqLIF1dqaGk486hDKRuzH9ZNu4M4Nm3j74y+Qgb3omirNgCLd2Kduol/XVBlCUORzJx18DSHYtywf\ngM/2BNjdZEeqkp3T4zMDWy1Ub2mp4KzFSgqQydxLD/UkSlMlIkjKd0t1+KVL/4HyUmoOx/C4jLT3\n8bgM9a8jiDu1g6PjzSF20qUPk9YnRJuGqYWOkUWGEAl3dSlzyk7BRhgg7SHcyCQh5dE+VblLPIqU\noaYKINyQ8eFCCMrKyphSfiPHHvYRomknK1auwitixDwvI0I6UqUZYMRUDTVG90uefiuqsGxY8r3J\nylQA+1mi6tM9AfZYXYBDrI4621sq21l9guT0okz6W0J0OYvXByLJ9T3KtqCswNsiNevEZ4mqAm/m\ns4uhjqHRqR1vTvuDrsZwOMSbOeCmng712VTXUyNVuqYql7F3Pmn+jXy2qAq0ugW/34+MhhCbvwu+\nYoThZtGSpYinquDVX+qaKs3AIhpUB3NXdsbYnaHf7lkz+UIJIdi3LA+Az/YG2N0YwrBsFjqESI1U\n2c/jvK1l8fpAQ+AUOwnB2ZrQ9LldDC3yJdU7pWL7U8V/d3S8mSlpxq7EWa9kmrIn6h/bTaI7VcUI\nnS7quqYqh2m1+08N/W6rpgpASAmhBvCVgDAQhht8xap7MNLchQvWaHKcWBhc3h4xE+y3e9ZMJ2IC\nVaw8rNjH65/VsnVvM4MLvfH0kIAkldTWP4GzAFs9NJE2TPhYWfcdwKrKEInCdEn2IqS9NgUuIRwF\n5N3XHCCs57EFS06m/xyfP2nZTtjomqocprX0n7dIXYZb7/6LbycuqoSqJ/FZI7iCdV2yVI0m57GL\n1F094+/Yf9N/pI+C2Dd9/9iR3P2PT5ASzjlqRIvdV+JksfWDj7MA2/m4pG1lUZ/V7xHJqajuTMuF\n47MGZbeZ5xqCtAO7c4nUSJXu/usrxNtWW/4py+6/+HbCDZBXqn4VBuTZoqq206vUaPoE9niaHuj8\ng/4cqSJ9lMkWSZeeckD8IH/6ocPiYksI26cqu5SdM62V/DyOtehIVbxWzU7NdddbYRgJXzBJ94o3\nicOjKge/SUmRKjO5OF0PVM5hWpvL57MjVYG266KiYZXms6NTho5UaQYgPTRI2abf7llbM9sUKK+q\ne645mTO/MpzTDh3aIoqUbR2nM63lfJxIc9tAxk6r2kON3d2UfrL/PaLdHEGyt2sPhM7FSFWiUUL9\n50kzakeTg0iTjKcddvdfpKntHYstnPIdkSotqjQDDXv6QF+IVAkh/p8Q4m0hhCmEGNtVi+oKWt3d\nCHX2/q3DhrH5qq9S6HXH92H2rizuN9VW959I7f6zLRUcPlXx29r1EvoVIi52VGounWt5VxCf4xfr\n3giS/W9pP08u1lQ5fdRSu/+K87q/C0bTUWRm52e3T51xh1uf/wckUnzx9J9IXNeiSjNQ6GPpv7eA\nHwHPdsFaupxMNUwipWVPptk52Sm9NmuqEGl3bc5HZZpDOJCw349ItOVw5C59HmuzEUu8dXekKhwz\nrcHnuSeqnI7/Ep3+6zO0lv4ThuoAjDS17TVlCyc7OgVQMFhdNuuaKs0AQfaMk7pNpwrVpZTvQG76\nL8lMRVUk6ntSb3NeymwN0FNMPpPH1CQKhdVdc+996imcIqS7olSQiBh1d1ouPrzZGtKcizgjVWqd\n6oZcG/ysSaGtnZenACJZ1FSlpv8gEakK1bcu3jLRXKvqtNw+yB80sMPvmr6BafZYPRX0YE2VEGKC\nEGKLEGLLzp07u/35JJlrqgyRbIOQdoJMltGlVIGWSBuKNLdlt83+iHCIkO6qpwLlnC4ERGJ2pKp7\nnscp1nJVpDgjVaaUccPPkrx+3fTbD2hN7AjwFljdf23spEKW67rPIao8+eDyQaiu3cWeMtKsBJXL\nC9EQMpTZ1V2jyRmk2aMH3zZFlRDiKSHEW2l+zm/PE0kpN0gpx0opxw4bNqzjK876CTNHmlJcEJLc\nzu1okpllqCrV10rKhOmoM+oVv+8AJamdv5tb5XwudVYi6L4oqtFHPJ+EnemWiVE9up4qx5Fm5pqq\neKSque30n+1lZdswgFWsXgzBBtoz/8/v9zNrxjT1iILBSJeXubNm4Pf7s96GRtMrtPZ96gbaPGWV\nUp7VEwvpalrbXQhBvLU/1e08UVyeXboubsEgZdwQMvVA3tooloGCyxDxeYjdHdnJ8xqEorEWI4q6\nGpehRtX43D0XWm4vAuUwr3yq1Ptekq8jVTlNqwcBS1SFs+j+C1miylfseLjlVRWst0RZ259dKSW1\ntbXcv/FORrr2cNOYj/jvl7u592cf8+Orb4jv+zSa3KSVxo9uoF/vXTN90VVxuS2q7Psm36c1S4bk\nbdn3TxiBxuuzHH9rbT0DBY9hEI6Z3R7Z8bldDC/pfqHjnDuYqxiCFgalJTpSlbvYrZqtRaq8BdC4\nve1IlS2qbBd2SESqQvVtPz7+lILq1asoEQGO2XkfotjNaDdsvu5UzvTPH/D7NU0O09b3qRvorKXC\nD4UQW4FTgceEEH/ummV1jjZn7TmKy+P1Tin3zrZjL+5aHX9cyyG+iecY2Pg8Bj63oXfCPYgQIi6q\nmkLKr6VY11TlLnb7d2uRKm+hKlRvK30XbiQe2Yo/3PKqsgvVs0TEIlRNvpTvHORmxpNBHn4vwpmD\ntiKa21+bpdH0GPaJQ18RVVLKP0gpR0kpfVLKEVLKc7pqYZ2hLQdzZ3F5XIClRqoyDGROxS4PijmG\n+CZSicndfwOdAq+bsoKemb+kURgO9xA77Tp6aGHmB2h6F/sgkKkFXBjKALQtR3UplajyFiabtdmi\nKliXdaQKQMbCPLXhVgDufzPCmhfCiGAt8t3HIBbJejsaTY/S10RVLhCMxAhFY0m3tSVhBJn3R3Fx\nlaY2Kh12C7/ta5VuNIoeqKzpLZwR2G8dPoy55x3B1LMPA7TYz0naOgjY6b9IoHVRJKXysvIUkBQj\nt0VVuJFsC9WllMyZdTMFO19ja3QwW7du5bQLrubTOpN3Hr0dGQ1mtR2NpsfRoqr91DVHqA2kP1PK\naP6ZMmg2Ha04xSRhn/0nRaoM+/ntbWVnJKrRdDXOfYnbMPjhCaPwuV2YpmRHQ4hAONp7i+tGhBCD\nhRBPCiE+sC4HpbnP8UKIf1pTId4QQlzcG2tNos2DQJY+VdJU0SxvQfK2hKG8qoL1iVRjGwghGFqa\nzyn7e9jv1AsRCKqqqvg873AO9W5HBOuze20aTU+jRVXHSRoVkyGlZ5M6vsN5X6cQy6pQ3RpHE3MU\naaXzvNJyStMbOKOmzuv257Uh2D9FFXAL8Fcp5aHAX63fUwkAV0gpjwLOBdYIIcp6cI0tySZSZddI\nRQKtbUiJKk9h8o7MsLr/kAkfqzbXJJl6xQ9wC4kY9hXIK0F4C/naxdPwEIPPt+i6Kk1uokVV+3AK\nqaijsrzt9F+iuDweReqE7HEJq23dinzZBy+nPYNG0xuIDNfN/n8QPB+4x7p+D3BB6h2klO9LKT+w\nrn8B7AB6wESvFWyjwtYKQr2WqLK7+zJtJ9Jk3TdlW3mWbgxmOarGjMHu/6jrQw4Gdz54ChH7nqBu\n+/wVXVelyU3iNYpaVGWFs0PPHksC2RWqq/vJlpEqZ/lBlkLLZagOq1Zrp3SoStML2ALfneINJpO+\nO9kXLPchRkgpt1nXvwRGtHZnIcTJgBf4MMPfe2YihIy1PVLDNvMMtyaqpPKy8ha23CG1d6iyjMHu\njwABQw5VByiXGwqGwJDD4POXIRbOblsaTU/Sw8af0Md9qpxn2zFn+i/bAkzH9bSaJ0shZBiCcMyM\nR84SkSpHKlGrKk0vYH9H3C4jbnjrvF1d7/FldQlCiKeAfdL8aY7zFymlFEJkfJVCiJHAvcCVUqav\n/pZSbgA2AIwdO7b73jHTbHv4q8cWVU2Z7yNNlR70FNFiR5ZvRaqyHapsRlWkqnQU5DmMRN0+2O9E\neOcRiAbBV5R5GxpNb9DDI2qgj4uqmDPllyadkblQ3X5MS2NOkeZ+bWHPEmwtUqVr1DW9gc9t0BSC\nQq+LhmA0/j1xCqlsT0JyjdamPQghtgshRkopt1miaUeG+5UAjwFzpJQvdNNSs0fGwGjDdsSbhaiy\na6q8BS1TH+2NVJkx2PsJDD4IDIdxrDsP9jsJ3vgVbH8LDvp2dtvTaHqKHjb+hD6e/stUFtJm+i9e\nUyVbHFCSo0vZYWdWbJGXVOye5TY0mu7A7TIYXpIXHzRtY8qW6fJ+xsPAldb1K4GHUu8ghPACfwB+\nIaX8XQ+uLT1SKgHTVqQqq/SfXVOVxpOsI+m/+s9VpMq5NpcHRp2krm/douuqNLlHL6T/+rSoslN+\nqQOSbTIJmrSRqk6sw073RUyVOUg3206LK00uIc3EZ7KfFq0vBc4WQnwAnGX9jhBirBBik3Wfi4Bv\nAlcJIV6zfo7vneXi6FTKUlRFmzPfJxZRdU7OuX82tqjKtvuvuVYJuNJRYDiSG0JA2WgoGmGJKl1X\npckxdPqvfZjWfD6BSDrbbmvWXto6ctHyb9n6StmiKhw1EaSIqvg8QC2rNLmDKSWG1WDRHzWVlHI3\ncGaa27cA11rXfwn8soeXlhnTsrfIuqYqYNVgpTk3tgVT2khVifX4LEVV7afqMjVSBeD2wn5jYetL\nEA2nfz6NprfQ6b/2IU2H9047akRsgWM6zD/TiZ6OpP/SRanasy2NpjuRjsvUaQCaXibbSJXPEi6R\nJjIattipwXQix+UFVx40Z2naWfeZuizdP822fDDqq9C0A3Z/EL9Zu/VrcgKd/msf9vDiFnP72kjp\nJZt/yozF6dkGl5xCyp1y1qi7/jS5gvOz6Pzu6MNfjpCtUaEzUpVJvMRFVZqOPGEoYZZ1pMoSVWUH\ntvyby8Pdf30LAPnZixBTzRAVFRX4/f7stq/RdAdmzxt/Qr8QVXaGraX5Z2afKqf5J50OIzmjXC5X\n6x2HGk0uYEpl8y8Q7Zmrq+lOsjUq9Oary0iAjJI4ZHUGphNVCHV7qKHtLgXThIZtyvCzYHDLJQMf\n1LnZFTB59ZFNyFiYiooKampqqK2t1RErTe/RC27q0OdrqsBtn20n1VS1PWtPkDD/TB5N0zlvqXxP\ncuhe2DVVOmKlyTGEUFHbvmqp0O/ItqhWuMDdxvy/cCs1VcJQoircaD1nK+lGaULjDigcmlykbm9K\nCBYtWcE7C55jSNNHlJUUUh+C8vJyqqurdS2ppvfoJVHV5yJV4ahJMKIGgUorUgXJ52tZDUO20h7O\nbbS4Szv2B4MLvQwt8mWsqdKaSpNTWPMoDSH6rPlnvyPbolp7VE04QMYwo+1hla77TxhKbIUb245U\nSRMCu5R7eoYCeuH2ccTZV3BgmcGBpWr9WlBpeh0tqrJjbyBMXXMkac5e6ldXZtFtZ3cMmjJ50GxH\n8bgMbaWgyXlSj6EqyuuYTGDKJFNdTQ+SdVGtNVS51fRfK4XqdqQq2Jj58fE1xaBpjxJVGSJa0nBz\nxyMvA3DaAWr9FRUVOvWn6V2kCr602fjRxfTZ9J+937eLbe2BxsKqvG1LJxnWwcSUssVctLICD6Go\nidfVdZozUwBLo+kxHJ9BiTrxECK5+29XYwiAESV5Pbw4TbsiVZ4s03/pRscIoW6PNGaOdMXXZELz\nLij8atr0n5SSiqnTeGDTg1wzbRDrbrma/Od8rK6pAXTEStOLmDGrxkHXVGVF3L3c+r5GTcmOhhDD\ni31IZJsRIhEfLSMRKTsyn9uFz9016tYWf6ldgRpNb+GMILgMgSkl0ZiJuwtPIjQdoK36pjh2+i+D\npYK0RtQAeNOl/4RKC4ab2k7/RUPKeb1wWNqDkxCCsrIyrr72OnxjPkN8voWVS58mhouysjItqDS9\nh4z1eOoPclxURWMmwahJvsfVIrVmn12r9F/y37JJ/xnCEmaye6NI9joz1lppNL2EQDVWBEJRmkIx\nSgu0qOpVpAnC0/b97EhVsC69KJJmoqbKW5B+G74SVVNlxlp/rsad6rJwaMa7+P1+ZDiAeHYlPP8s\non6bjlBpeh8z25OUriWn96IxKWkKRYmaLUPUkZi6zW17KlhImV2hukAQc9RldTdaVGlyBedsTMMQ\n5HldhKIxTF1L1ctk6/4sVK1UppoqKZWocuenTdkBSlRB6/MDQZl6AhQOb31Fbh+MGqt++ewFhPbp\n0PQ2MtbjqT/IcVFlp8xqAxHqg8nDOkNRE5chMIx05p+yzZoqYSQOLlpUaQYCIoN9Qr7HhUR9p2x0\nkXEPYw8ibU/3XyRD91/SMOUM+x27K7Ctocp2pKpoWOv3M1ww8ljlsL51i0obajS9Sdbp9K4lp0WV\nU4c0h5PD1DFT4nW3XL5t6NmWL1QmF/WuxusydAegJqeIm+Nan0yP9Rl1RoR10KqHaVf7d1uF6lKZ\nf3oKMu/csh2qnGWkClDPt99J8NmLENOiStOLmKb6brQ1R7MbyGlRZXcnZcJjRbLSWSq0pWSc0anu\njFQNKvQyXHdSaXKIhDlu4jaR4lelI1U9jF3b1J7uv1ArherZRqpCbcz/a7IiVcUj2l6XywsHnAq7\n3oe6rW3fX6PpLnrJTgFyXFS1hZ0udRZEKv+qLNJ/jr/rzJxmoGNYtiQ2OlLVw8RH1GRxEBCGElUy\nCrFIy79LU3X/eQuziFS1IqqkhKZdavhyOhPRVFxeGH2auv7x8xANt/0YjaY7aM9JSheT+6KqlZ27\ny9phOHcb0vpfWzqppyJVGk2uke4rZQjVuGHTFIq2iFYFwtEWaXhNF9GuM2sBHmv+n93ll7wxdbun\ngIx7wjy7pqo1UWWqSFXh4OwOTi4PDP8K5A+B//4dos1tP0aj6Q7s75NO/7XErpuyZ/U5SSeG4t1/\nbTmqOyNVOlSlGQAIAJm+DMcQIslJPRwzaY4kC6iGYJT6YEQ7rncH7TEqtNN/kHBOd2IXqvuKMooh\n6bW6/yxRlTbda8YSI2qyEXtCqEL10acpUWV7ZWk0PU17Ir9dTM6LqtJ8Dz5LWKV+720x5BRIdndT\nNpYKqY/VaAYSSTVVafYEkWh68ZQqtjRdQLuMCh2Rqkga4WJbKmRI//n9fuYtXqV+CdUrV/SKCvx+\nf8p2TAjshoKh2a/N7YMDvw7Ne2D727oLUNM7mLFeEVTQB0SVEAKXIeJdfWnvg7Omyn5cW9tVlx7t\ndK4ZYCROPFpPgYdjye369l1ipqQxFG1hc6LpBFJmX1RrGImZful8pqSpxFaaQnUpJbW1taxat1H9\nHqyjoqKCmpoaamtrkyNW0oSm3Vakqh2iavTpgAHvPwERnQLU9AK9ZKcAOe6obmOn8jJ1JDm9d2wH\n87YsFTwug0KfmwJP77zxGk0ukS4DbkqJacp4RNj++pmmJBw18bh0mLfLMGPgasfu2E7/pUuxmTGI\nBsHTMlIlhKC6uhoXMRrD9/DA+jXUPBqivLy8pQt6LAzB2owjatLi8ihPq4O/De8+CqdPU0XxOiWg\n6Ul0pKp17K9jNqUc2UaqAIp8bl1PpRlwpPuOOCNVpfkeSvPVuJSo9aVzntCEYyamlOTpE5KuQ5rt\n61SyI1Xp0n/2MOUM6T8hBCtXrqIhJCnxqb+nHSsT2A1IKMrCo8qJywtfOQ8atsFnL3QoBZh6Aq0t\nPjTtwoxmnibQzfQNUWV9150RqQJvYofu/L6ZaTx4NBqNNUTc+bvjutPx320INf4Jlerb2xSOu60L\nx2N9acx3NR1AyvaLKjtSFUkzFNkuXvcVZXg6ydTp02kMJ0RVRUVFS+HSuF1dFrbhpp6KOw8OOkOt\n8d+PZOhQzIzf709aT8aaL40mHbGoutSiKjN2Ks/+zpcVeCjOSwwede4KbFPottJ/Go0mgdshquw6\nRoBQNEY4ZhK0itPdLrXLcBlCD8ztKuJu6u2I/DnTfy1ElRWp8rQUVbZAqam5jfzSoZx7xmlMKb+J\nmpqalsKq0XZTb6+o8qkxOod/Dz54UomqLDsB7Zov53oy1nxpNOkwe1dU9ZGaKnWZqV7K5zZoCiXf\nR2f1NJr0JNJ/yUIqfp3ENIOwFaGyL+3vlVs3eHQd7RpRY2FHoSLNtGjhaSVSJYSgrKyMm8rL2W/M\nO4hwI6tXrUSibk8SyvaImvam/4RQ0apDzoK3HoStL8GY05XQavOhquYLoKamhpqaGoB4zVcqas6r\n3tlrHMQ9qnSkqk0agkqBpn6HPC6DoUU+wJn+0180jSYd6YYqQyIFaH91XI50oX2Z53HhcRkU5fWJ\n87G+QUfcn501VanRm0jr6T+/369qqHxFEG5EoGqqWqTX7GHK2cz9S8VTAAd8HbxFsOVnqq4qnadW\nGpzCyhDqp3rFEpZWzWb21EnIYB3EIjotqEmPGVXfpV468etTosomnVxKLWbXkSqNJj3xSFXK7YML\nvJQVeOInJOmiUS5DMLjQm1SDpekkHTEqdFvzRFuLVHnTiyqwTjq9JSpVKM30J6FNO8DwQH5Z9uuK\nr8+rIlNnzIH/Pg+v/1o9VyTY5kOllEyvuInB+YIRhepn3oybCNTu4u6Nd1E5awayaRezp07itrU6\nLahJwYz1WpQK+oioSv2+txaFkjpSpdGkJfUbkfoVMQyBz504sOdbzSDOu+mRTt1AR9J/hgHuAitS\n5fATs40/oVVRBUBeibqvNFv+zXR4VHW0Nd2TD0f9EMZ8C15Yr2wegnWJyFwqkWZk4w4qp07kgc3r\nuGHitXy+s5bLxk1i+e2bqJf5XDzuRhau2UjZ8H35+aa7mFU+IX3nombgYkZ7LUoFfUVUpRwO0p0k\nJzoEdeefRtMa2Z7Te90Ggwq8DC70JnX9aboYaaqdVnt2XMJQkaDUQnVpJgxB2xRVpUropLM8kM4R\nNR08TLgt1/dvzVR+V3+tSri0N+9FhhrVcwfroGk3snkvQhgUlA7mp+Ouo2rVnQhvEctnT+buqf/D\n5YNeZc1ZghNGGrgE1AYlC6oqEe3sLtT0Y6Ts9UhVnyiMcO5rhhX52p7rp3f9Gk1G2hPNtWdvul0G\nkZjZZ3zdhBCDgV8Do4FPgIuklHsz3LcE+DfwRynl5J5aY5x2jaixsUbVRAIkyWQplc0CZKypimOn\n9ZprW6b4bDf1kn067kztcqtOwCGHwLdvgacXqRE2R13IimWLaairo6rKjxAGUriYOW8xhWVD8c9d\niNzzIeLJufDKLxChen5SANJbTGjLO7w8Qb0u/zMhbq30s7DKj4gGIX9wr0YoNDlAL3f+QZ+JVCXI\ntFN3HiD6yH5fo+kVOlJ94nGJvhYBvgX4q5TyUOCv1u+ZWAA82yOrSkesA0aF9lDlaHOaSFUzIBLF\n7JnIH6Qug2m0Zkfm/qVB+qzBzWPHwcHfgcdnIB+5kca6Wpas3cjMeYuRhcOouHURd92xjgObXkWu\nORqx7mT45zo47Bw4fz1y0otM3XEh+yzbzSPNJyFHn47/2z7+p+4+lvpnIWMRNW/QTJPK1AwczN7t\n/IM+IqqyJZ6i6GN7f42mJ5Gy/Wm8Ip+bIYW+bllPN3E+cI91/R7ggnR3EkKcBIwA/tJD60pGSnV2\n7fK0fd8kLFEVaiRZJls1VZ6CtsVQniV4muta/i0Wgea97RtRk4Lf76di2nSk2wdmBHnRL3kyeCzh\nd5/EP/L/+MfNJ3DUJ5u550dFnPbFBmpvKeXqwmcQRSPgB7dB+etw4SY49mLE4DEMLSviuuuu57yq\nPyB++hDyh3dxwqgCKkoeR3z6gnofg7UdWqumn5ADkao+kv5r3yFAR6o0mpbEv0aSdqsqIQR9bNTf\nCCnlNuv6lyjhlIQQwgBWAZcDZ/Xg2hLEwurS5W3f44Q1VDlYm1KobipLhQwjapLIsyJVzWmESGA3\nYKo5fh3AaeIpMFk9fxa33jKNxWueZ2nFT5l5SICxnncZNlodgobkC8Tor6vxNiddperFzBgE9sRr\nvuZMnYxEIsIBiIYRR/2Q/P2/hvjdVfC7q+Env4Z9jlWisq0onaZ/YkZVY0UvBlY6JaqEECuA7wNh\n4EPgailll58qZP32CEDqmiqNZiAghHgK2CfNn+Y4f5FSSiFEuqznJOBxKeXWNus0hZgATAA44IAD\nOrbgdNiiymhnpEoI8BVD/daU9J+EkC0q2tgP2nVU6aI7HXVTjy8v4TW1pqaGO9fdxqA8wc3l1zNz\n1TqAuFO6TXn5YVRfMQERaVL1XLGw9TqL1PtjuNUJthlVQjCwB+EthCsegc1nwUOT4YqH1UHVU6A7\nlgYivVykDp1P/z0JHC2lPBZ4H5jV+SW1pL3fjU6UAGg0/R6J7BcnHlLKs6SUR6f5eQjYLoQYCWBd\n7kiziVOByUKIT4CVwBVCiKUZnmuDlHKslHLssGEdExppiQRVMXe7U2xCdfelpv+kaaX/sohUxQvV\n09RUxef+dcD4016hQ1gFo9AYliyZfysE9jCz4kZqamooLy/HNE3Ky29i8x01VE6fhAw3KUFluJWo\n8xWDJ08Vvhsu9X4VDVfdizKm6sp+/DPV9fjMYnVgDdV3eN2aPoxt/NmLdOrZpZR/kVJaSUxeAEZ1\nfkktaX/6r+8fMDSa7kLKAXES/zBwpXX9SuCh1DtIKS+TUh4gpRwNTAd+IaVsraC9a2lWarPUAAAg\nAElEQVSuVQcB28izPQihUmSpPlVY3X++ItqMVOU5uv9SadqlLjsYqYLEnEGbhjDcPG8RyBj7lOUx\nZ8oEqhfMQjTtpHr+LVRMGk9B6WBE4XAl+AoGZ/bIsl9//mD1+otHIk+5Dt55BHZ/AOGAKl7XDBxM\nU30WejlS1ZXPfg2qhTkt3RY+d6Ld1DWaNhkg3tNLgd8IIcYB/wUuAhBCjAWuk1Je25uLwzSVG7on\nL6uZeC0QhopURQLJHW92+q9scNtn7J4CMLwQSieqOjj3L76MxCBke25fRUUFK2pqCOOhevE8MKMI\nq5ZMuA2qVtyGcFvNEK4sD01uL3iLWLXET6B+N7futw/iV5chr3yEWyur8JSOwO+v6tBr0PQxopZb\nf3vrE7uYNj+5rdUtWGF2hBBzgChwX6btSCk3ABsAxo4d2+79uhBQ6G19ufZGdaRKo2mJM+XX378h\nUsrdwJlpbt8CtBBUUsqfAz/v9oXZ2F1KtkFmu3FYJoQboXCIui5NJbSyKVQXQnUABuuVMHOmIBt3\ngnAr88+OrM4a3GwLKmcqsKysDFEwON0r6hDSW8iO2gD3br4H7/jLmFn6GM+tncjdm17iymsn6qHL\nA4VoUEWp3DkuqqSUrXbFCCGuAs4DzpTdOIBpeHH2IXItqjSazOg5aTlAZ1u/hUg4pged9UNW+i+b\nQnVhqHqlYL2VQrRElWlCYCcUDOr4iBqUpYJT0NjCqqsFjhCCpatvxy1MVm/YwPAzffzkmHeYMv4y\nZvjnImKRXj/QaroZKVWXaA50fXaqpkoIcS4wE/iBlDLQNUvqPDr9p9FkRo1y0l+SXsWMKmGUbZqr\nBSLhmB5qSNxsp/+yKVRHqNqldLYMTbuhsHPGn9Dyc9ZdnzthGCxcsRaXgCXPhwjFYMYJTSo6G8mZ\nQ1OXknpyNKBPluz6uV5O/UHnu/9uB4qBJ4UQrwkh7uyCNXUaHanSaDIzkPe9OYMZ7fj4F1CpOo8j\n/WcTC4MZzj5SlVemCtVTRVXcTb0Ta+xBpJRUTJ9JY1jSGIaqZ0KID/+K/OhvKi2UaYhzjpNJOPn9\nfioqKuK/2zVsdnSwtW30S9rr92bGkk9GupDOdv8dIqXcX0p5vPVzXVctrDP0lflkGk1Pos81cggz\n1qnUGuCIVDnSf3YqMNuaqvzBaqCxdIgOGbNEVSeGKfcgzqL4a66/ic+/2Ebh167g7R0xdj44HRkN\n57TFQnuFU2VlZdxY1f67/fqfeOIJpkyZklZstfZcfR7bgiNba5IPn4aVh8Pnr3T5UnL/G6PRaLoU\n5VOl6VVkF5gU2jVV4abEbXbUylvYtiASQs3/C9YmR3JsJ/OCoX1iQHFyUfwaRF4pVVVV/KvgTIa7\nGhDvPKT8wJzvU46QjXCaWlGONGNMr7iJO26voaFuL6tXLqeiXHl9GYahuixvupFTTjmFtWvXthBb\ntbW1VFbOY2pFesHVp8WWlEpUtadu7rVfKr+zEUd3+XL6xJiabHEZgpjZhz4MGk1voL8ivYsZUweC\nTkeqitVlqDFhPhZyiqospHPBYDAjKhVizQKUgd0IGYViZafQF7rnkoriPXmISB5Xzb0Dfn8tPL8G\njvgBhAO9WsgspURICUiQJqYZo7F2Fz+7Yx35IsTixYuZM3sW923cwMQJ41ldVUmJCLBp0+38evPt\nANx843iqqm5BBHazav4t3L/p9vg/c/WCW0AYlBrNbNq0ll9vXouUMGfKBOZX3Yy/spJfb95EoQiy\nYMECbp07j3vuuosTThzLzXt3sGz5MoThRgqD6dOnU1w2uG/YUcQi6vPvynI26fa34d3HYOw13dLA\n0K9E1ZDC3i9S02j6Ajl+jOzf2FGhzkaqbFEVaVJ1UMKVHKnKhnxr/l/TLijdD7/fT2n9+1SUAIXD\n4tGMsrKyeAopV0kSfr5iRDQE37oZfvED2HI3nDJRRaw8HTBbbS+madW3RcCMsnzZEurr6lhQVYlA\nIJH4K/3sU1rMlEnj2bDxLn628S5MCZMnXMO8m29E1P6Xqut/zJt/uptCL3hdgqqLT0K8/gAy3Mwz\nTz5G5be9FLgFBR7BS0vO46unn0XVOUMpeNNLzJTEJMw9/yDEu49RNe57jDBq2XjfRjZv2oBAMGni\neAwkmzffSZ4RparKT2Wlnwc2b+Laa69FNmwHlxfh8liNFV6k4VaR7lzZiUSD8bW1yc734Z4fqCjs\nqZO7ZTn9SlTl+tmURpML6EBVL2PbKXS2CNwWVWErUmWaiRSXnRpsC9svKrArPgT51T/9hopLC5EF\nw5IMPPtCxCqOy6OE5dDD4MgLkC/eiTjmx/ExN93WARtusiKHicJ/KQxq6xpYf8cGItLN0oV+Nvgn\n0vDCo1z8nUM58pBiJkwopNgrKPCAx/VbuPO3gGo1+MMlDnPYJ2bGb/+6W3LKicXklw5hZ20TZvMH\nRF78BAPJLac5ojbPLI4/5oZiuOG64sTaCp+GIQdz3uQTePj5e7jsG3dT6BVMHX8p0xavZPnSxTTU\n7U0Sg5WVfkpLS5g24xakOw8hDPV+I6z31ejZtHE0qARVNs/5l1vVv82VD0PZ/t2ynNxPmGs0mi6n\nP8z+67PYReGdTf+5fWrETageJZUtjypIFLG3hW3uGdgT95H6yQ+UNeGp370gyRG9zwgqG18xCIM7\n/11ALBJCvnAHmDFksC6peLtLiIaVYWqwXgmMvBIVBSwchmjczqIfHsLfph7Dzb57ESsPYWLRX6k+\nJ58jhxm8/UUjz/03xt2vhVnxjzAPNY/F/P5aftZ0Bmf+oonqhvOQ1/2dhfU/ZNTqBubWXsQSORF/\n02Xk3/wOYuKzDJv1OjXRn3D+0wcxalUdt9ZdgpzxEbNrL2G/1fUsrT8f8+o/c1fj2Vz9UDOz/xrk\n1v8L8sKeUhCCk4c2sejMPO6/sICN389nesmjUH0UV8QeoOzte7h7wfXIYB03z1tI9fqN7KgNsHz5\nMipvmYoM1kLTLmTTDiqnT2Ll/JmqozRYr7yjurM+K2J1dnqyMNGNNMPHf4NjL4Khh3bbkrSo0mg0\nmp7EjCpB1VmRIgT4SqzuPWvuWbymqrj1x9rYkarmvWCaCCG46H+/BcCXjepg2CcFFYAQSE8eX9TF\nWP+vZuSr9yHrP6dy9kzW3aaKtztdkB2LqPcusBtkDOnJV2Kq7nN4eiFy1eFw1+mI/5vPscNi/OHd\nCPOeDnLOL5swp7zNjE++xcnL3+aV/a+h/KGd1B0zjp+u+T9m3/si2wsO52vnX8uUypWIgiHM8S/m\nmnHXUlZWyqzpU1k8X0WPMNwIM8biRYv49nfO4qprr2PBoiUIGWPR/ErGjRuPu3gY/jU/Y+4dv2ff\nb1/Doj9/SeTEcZy96mWmvnYIUz89m8LF9RxzRyNHrGvkgcA34NiL2eewk6g4NY9rCp9BrD+FyoJf\n8ty0Y1lyybGEG3axqGYj0+YtRfqKmDlvMavWbWR3XRNmpFn5gwX2QON2ZGCPEjWxqPrpKsKNVvQx\ni5Tuh0+rqNahZ3fd86dB9EaV/9ixY+WWLVt6/Hk1moFMMBKjrlmZ5OV5XJTme3r0+YUQL0spx/bo\nk3YDnd5/Ne1WgijNqJZ2EayDjWdB2QFwyX1qm8+uhGeXw02vweAxbW9j7ydQcxx8ezacPhUpDP45\n+xSOd73P8BUNNEXou5EqACmRjTtYXHkLk70P8swnUSY+GmTc+AksXH1H515TsF6l+4QAdx5Vy1Yx\nrPE9rj+qAfHffyANN28E9+Mj7+FcMH09025dwMY7bkOgHjL5+okMKimkvr6Bqip/UnqtpLSU6dNn\nIoWRqGcSBhKh5iW2YhqbmqaVpolAsmj+PJrq9rBo0SKEGUWaMebNm8err77CK6+8yjXXjmfB0lXM\nvPkWbrt9HddPvonV1Wsg3MTJB5Vy9HAXJ+/n4rozDkQ0bFNvgXTz5/ea+b9PYjz0boTLrp6Au2QE\ndXV1VK9ejYiFkdEgc2ffzKDSYqZNnaYW5fIoMeQpUBHXjhAOqO9AflnbkarmWrjzNOuN39Kh58x6\n/yWl7PGfk046SWo0mp6lORyVX9Y1yy/rmmVtINzjzw9skb2wv+nqn07vv+q/lDKwt3PbkFLKYL2U\nG76jfkJNUoYDUj46XUr/ICkbd2W3jcBedf9Hp0szHJBTyyfLRy/Nlztu3VeaoSZZXl4uAVleXi5N\n0+z8mnuDUJM06z6Xc073SllZIs8a45Jm3efq/eoI4WYpG3dKWfeFlM21UkYj0nz1fvmfOYdKWVki\n98zdR5rPrZFzpkyQhkBOv+l6Obt8vBxZJOTcKeOlWfeFnDtlvBxRKOTN5dfLWCQkZTQspfX+duf7\nnLptMxqRCytnyxnlk6TZsEPKui+kWfe5nDtlvFxROU2ajbvkzPLrZaEH6TawPgs3SXP3R1L+a6M0\nHy6XH91UJGVliZSVJdL0l8k3Zx8lb/2mV66b9v+kGQ3HP0NTyydLM9QkzeY69fms/1I9X9Me9Z62\nh+Y69f437szu/o/PlLKyVMrPtrTveRxku//qV4XqGo0mO/pi0KFfIFU7fac7/yDhiF77iVUYLZTn\nVF5p9vVahhsKh0HTdoQZZXBZCccXDWfomCMRhit5CHIf/dBITz5zb67g3jciTDzJy7Kz86isrKRq\n/kKEO699X4ZYBNm8F2G4VYTks38hn1mC+PSfHDT0AH73xde4fPVf8C6bQr5HMKd8PFVVt7JidQ2X\nj7ueqlVrEYaLqtV3US8LyCsrw0hp6+/O97nF2CCXmzn+RYnolhlDSEnVitsgEuDW2bO4d/MGpt4w\nnqoqP/Mq/dx+51o8xFi2qoap06ZRs7aRrww1uPAIN9/7xnF8fd8YC87IA/4MC4ZyeSDGV6f/Lz+Z\nM5Oqxcuora1TkU9AhuqpnDXDKnyfpTozXb7W0+PhgIoQegvaTnOHm+CVe+HFu5SFwqiTuuR9bJVs\nlFdX/+hIlUbT8wQjiUhVXbOOVHX0p1P7r2hYnWF3NEriJByQ8rfXSLn8YHXmHqyX8t4fS1lzgoqg\nZEOoSco7T5dy8zlSBvZIGdgjzaWjpfzdOCmjESll90ZOuhvTNGV5ebn0GKgo0Wu/krKyRC4+0yfn\nThkvYykRvVZfa7BBrqicJm+1ok3y1z+1IlMj5SOVF0gZDkqzabccWSTkyCIhB+WhojIZ3se+8L5W\nVlaqyFQ0ImU4IM2mPXLOlAlyxbypcs6UCbLYa0WgopGkqGYssFf+6Ai3XHCGT75zQ2E8khWYN1Su\nOdcnq6deIs1YLP4YFSnbriKIdV+on4bt0myulTISlDISUp/35lop67e1HYmNRaV84S4p156knnvj\nWeqz3gmy3X/pSJVGMwDpmzGHfkBXeVQBIFRUKmh1/0mZqDHJdryMMKBwOOz5WK0tEkA074HifePb\n6KsRKki4rU+6sZyqRfMQsQjy2EuYIX/DhC2v4791FlWLlyPyy5Ay2ZNL2tEb6301w03sqWviqd9s\npqL4MQZ5o/wleBwXrHqOm24Yyf8EdlHpr6IxLAlEICah4pZ58ahMTw2X7kqS3geXG+HJZ8HqOxGx\nCMsWzmXKpPFUVc1CNO2keuEcikQz+SX5TJ81j9+/E+X370SZ93SIVVN/wpSLvkXe1n9xI7/FEI/z\n3k0lfHOnqaJYlZVUrVxPfe0eVi1fgpAmMhpORLHsWixhKPsEX0n6BUsJb/8e/rkePt8CpfvDpb+C\nQ7/b+W7bLNHdfxqNRtNTdJVHFVjpv1KIhVRKRJoQ3KtSgtnKZmFA0XBo2qE62Wo/V7eX7NsnRtRk\ng9/vV8LGVwJIxGkVuPIKufWEPdx2x0YqZ89EBvZQUTHFMdKlkoqKKciw6mCTkQBzK+dzfsnb/Gt8\nEc2N9Xx1/S5+sPI5pk2ewJKqOcyuXMTCNRu55vpyIjGT8vLypPl8fZW0YtDt5Wb/MqpWrkPkDwJv\nIUII5i9YRKh+N7/afDuzp0zEDOxhRvkk5qy9n+kPvAnnr0dM/4Ar/tDM1nrJyfu5uKzwOag+ih+G\nH0S+eBfLbpmE9BRSMWcBC9ZsZFttEDOvVDV2FI+AgsHIdAKp4Uv41WXwu2tUGvx/V0HFW3D493pM\nUEE/M//UaDTZ0RfOkvslMqaETFcIFltUATTvAfdIaK6DEce0L1JVNEJ5XUWaoe5Tdfug0Z1fXw5h\nR1rwFoKUiHOXctBDN/B4xVjOrd7Ipk2biJows/x6lizxM2vWHO7feAclopmqqio2zZ/MpeG/cLRw\nIU+5juMuXI5EMCRfsGDBAkReGXllw5M6JftDPVpbCJdHdfJZ3XcG4Crdh8vHXc/ChVWIWISl8+fg\nExFKS30Q2EVlpZ/HPojyyPsRIjFYdOMl3HTGvhz74VNUn5MH/JFPp/2e0e9GeHDG2ezKjzDn5mks\nXr1e1WHFI4ql+G+ZCtvegDd+BW/8VtVhfXchfG1SjwopJ1pUaTQDBG34mQOYsa7b2SeJqlpVcN68\n10r/tTNSBersvm6ruj4oCzuGvoi3SBlGHnI2nDaVU59fzV8uL+TqhwLsDMDS+XMRkWaWzp+DR0RZ\nd+cGCt78Bbec5mNnfhnmpZuYecejeFwCKaE+JKm4dRHV1WuSU2UQF1b9VVBlotI/P/l9kJKqlevA\njDFzxnQ23LmRyddPZOF8P5Xz5rFswya2mxNYtORZZOMOJpxzFN8/zM0NX/Xicb0IvAilsGPeoww7\n6nQ+fuslbjS/ZJT0wtI16kldXjjxCiWmhhzcey8eLao0Go2m5+hSUSWsVB9KTIWbwAyr27KNVBkG\nFO2jrofqoWGbciIvGtY1a8w1hFDmnIFdyFNv4Od/foULhjzNv8YXsfDZEEv9s7hlxSbEJ8+x4Jtw\nS34xRV7BHVvCTPzNG8ybM4v7N2/kmmsnsHDVOiqmTqOmpgZIL6AGmqCySXrdQsQjWoVlQ7n6unIW\nrlbvVdWK2wlJD2WlhRAOULloBQ+/F+XXb6ko1owbx+Mf9z0evfd2PFtf4IDdD7O9SVK632EcdOq5\nKso6/EgYNbbzvm9dhBZVGs0AZGDu6nMAGQOjg2aHqQiholMAjduheKS63p5CdYBhX1GX29+G3f+B\nsgO7qJA+R3G5kXmlVM4s584Nj/DphGuYd9JelnqfAv5AZP7DeESMoPRw/5sRfvlGhLd3mmybcwvF\nJaVcNm4SC1etRRjGgEjxdSUtonluL0urbwegomIKd63bSMXk61hUNZfKSj/r7tpEk/SxYvWTuFyJ\nkxHTfCVn3+/+UYmo0Wg0uY4ZU91JXSlYbCHVsB2adqnrBUNol2wuGqY6AL98E/Z8aImq3qlH6SmE\nJx9P6XCum3At86oWIC66F3njKzwQ+AavcDS/aPom+y3fzX8PuZK/vfkpkyZey6p1G9lWF2HZaiWo\nIJHi69I5gv2cdNE81aU5iIk3lLNo9XpE0T5ULV3NpInXMrLUy63Tbkh6TC4X//fj0xGNRpOJHD3J\n69/E1IigLhVVeaXKLLFpOzR+qW4rHtm+SJUwYPgR8O5jKgV4zMVd052Y48z1L0RGw4hw4/9v795j\n5KzqMI5/n5ntti5dKMitKookYMRLxGwM/iGKoBJIrImJgQSFpIUURCNeYqWIbIsERFCMCNYr4A28\nROstEQhIJFasAQHxAoKXAtKV2kIpW/by84/zbrtddtsZ9p13zuw8n2TSmdmXmYfZ2d/+9sx5z4Hh\nLWheHyd/8usogt9ccTnnLF/M4KpBNH8hqy6/hqeij0WLFlGbcpJBriMmnWaXUSwJzV/I4GevZsVH\nPsi3vvZlPvmhMxhcfRErzh/kM1deCeS5L6WbKrMukVnt6T4TyynUS9xzsV5P80qeejxdAPoPbu7s\nwlo9zUt5+Nfp9hHvKDdjxtTTC/V903w0Ao2nszM/ct4gUe9Nc4FI4345/gKfa6a+vrV6nb5FB3DK\n0vczePFqFONcsmolvRqhd5/+LL8fbqrMupDPBGyD8ZHdb7/xfEwsibB1I2x9NG0U+4ImJ+yqBke9\nF+77Iex3WNqgeY5//LcLCeYvfO7dzznMPzPt8Jx5WGMjrFp9EYrxtPCtamlj5kzWVXNTZdYl/Cuh\nTcZG0xpJY6PlTwBXPS2I+Ohd8NRBsHBxeq5mH6Nvv7RQ4taNXTNKZZ1jl4a2Pg/17Q/btxQjjMCz\nW9OyCvP60h8WbWyA3VSZdYnJhcl/dFdkfAyeHkqLI46P7lgksTSqEQsPRFuHYOsQ9B9MNDsOuWNU\nSqmhmstn/tncUKulpTHmj6czake2wej2tLRIrZ6+1qY/DvIYLzOzlnMf1QYT86hGnkn/9pS0nELh\noosvZu3t96T1qR75PdF/EOedf35zZ6NNNFGjz6SzEz1SZZ2iVkvv1wX7pEVsX7Bvun/bpp0nhlQd\nqS3PamaV8+hUG0xsoAzFZrDlNSwRwabNT7Lq+tt23PfTP2zgS1dfw+bNmxs/5bxWT9kmGr+amyrr\nUPMWpCVFJNj2RHpPV7z0gpsqsy7RTRNtJe0n6SZJDxT/7jvDcS+V9CtJf5Z0v6RDSw0SRVPV21es\nH1UeSVz+uSs56eSlO+47d81tLF9+dvNnqtV60i+fiT3yzDpVrZ52FYhI2zdtfTxNaK/q6St7JjPL\nRhf0VyuAWyLicOCW4vZ0rgMui4hXAm8ANpaaYny0KPL7tKRZkcTgRRdzzDee5phvPM0zo3DJpZc2\n30BPZKv3lp7RrHI9vbDX/mnLpYido7AVcFNlZnPREuDa4vq1wLumHiDpSKAnIm4CiIitEbGt1BTj\n4y1dSDMi+MTKC3hg0zjrNqRRsY99fEXzq033Lky/gCb2EjTrdPV5aamMBXtDjKefxQq4qTLrQl2w\nTtVBEfFYcf0/wEHTHHMEsFnSjyTdJekyafoOSNKZktZLWj80NNR4ihhr2fo5EcG5557L5794NWcs\nW8b2pzaxbNkyPveFq5rfxqNWT7+AMlnrx6w0O07EGK7k6fzhuZl1JEk3AwdP86WVk29EREiarsPo\nAd4EHAX8C7gBOB342tQDI2INsAZgYGCg8W6lZ0HLzqZL+6UtYvk5H2RwcAUaHWZwcJAni+1UumkO\nndmMJk68GN4CY9t3niHYIm6qzKwjRcTxM31N0uOSFkfEY5IWM/1cqQ3A3RHxUPHf/Bg4mmmaqudt\nwd6lPdR0dqw2PbwZRoZRre7tVMwmq9XSKOzodhgZht6Rli4b4rFesy7UBb9z1wKnFddPA34yzTG/\nBxZJOqC4/Vbg/gqylUoSzC+at1qPGyqzqeb379y+aXR7S5/KTZWZzUWXAG+T9ABwfHEbSQOSvgoQ\nEWPAR4FbJN1LWh/1K23KOzu1Oux1gCeam82kVkvzq8aebenT+OM/sy4018cyIuIJ4Lhp7l8PLJt0\n+ybgtRVGax2vL2W2ez296SPAFvJIlVkXmevNlJnZjGo9xfIKY3s+9vk+Rcse2czyU3RVnndjZl1n\nYnmFiT05W/EULXtkMzMzs1xMLK8wOgxPPwFj5TdX/hDerIsIEVS7waiZWRZqtbR5+LPb0inQKn9c\nySNVZl1kwbz0I+8P/8ysK83vT//2tmYHAY9UmXWR/gXz2Ku3h1rNbZWZdaHevrTTQYu2ZPJIlVmX\ncUNlZl2thXtcuqkyMzMzK8GsmipJqyXdI+luSb+S9KKygpmZmZl1ktmOVF0WEa+NiNcBPwMuKCGT\nmZmZWceZVVMVEU9OurkX+FxtMzMz606zPvtP0qeB9wFbgGNnncjMzMysA+1xpErSzZLum+ayBCAi\nVkbEIcC3gXN28zhnSlovaf3Q0FB5/wdmZmZmGdjjSFVEHN/gY30b+AXwqRkeZw2wBmBgYMAfE5qZ\nmdmcMtuz/w6fdHMJ8JfZxTEzMzPrTLOdU3WJpFcA48A/geWzj2RmZmbWeWbVVEXEu8sKYmZmZtbJ\nFFH99CZJQ6SRrXbYH/hvm557d3LNBflmc67mtTPbyyLigDY9d2lcv6aVay7IN1uuuSDfbNnXr7Y0\nVe0kaX1EDLQ7x1S55oJ8szlX83LOZnuW6/cv11yQb7Zcc0G+2XLNNZn3/jMzMzMrgZsqMzMzsxJ0\nY1O1pt0BZpBrLsg3m3M1L+dstme5fv9yzQX5Zss1F+SbLddcO3TdnCozMzOzVujGkSozMzOz0rmp\nMjMzMyvBnGyqJJ0g6a+SHpS0Ypqvz5d0Q/H130k6NKNsH5Z0v6R7JN0i6WU55Jp03LslhaTKTmtt\nJJuk9xSv258kfSeHXJJeKulWSXcV388TK8r1dUkbJd03w9cl6QtF7nskvb6KXNa4XGtYrvWrkWyT\njqu0huVavxrJ1o4a1vH1KyLm1AWoA38HDgN6gT8CR0455mzgmuL6ycANGWU7Fugrrp9VRbZGchXH\n9QO3A+uAgYxes8OBu4B9i9sHZpJrDXBWcf1I4B8VvWbHAK8H7pvh6ycCvwQEHA38ropcvpT63qq8\nhuVavxrNVhxXaQ3LtX41ka3yGtbp9WsujlS9AXgwIh6KiGeB75E2e55sCXBtcf0HwHGSlEO2iLg1\nIrYVN9cBL8khV2E1cCkwXEGmZrKdAVwVEf8DiIiNmeQKYO/i+j7AoxXkIiJuBzbt5pAlwHWRrAMW\nSVpcRTZrSK41LNf61VC2QtU1LNf61Wi2ymtYp9evudhUvRj496TbG4r7pj0mIkaBLcALM8k22VJS\nR95qe8xVDLEeEhE/ryDPZI28ZkcAR0i6Q9I6SSdkkutC4FRJG4BfAB+oIFcjmn0fWrVyrWG51i/I\nt4blWr8azXYh+dWwrOvXrDZUttaRdCowALw5gyw14Arg9DZHmUkPaQj9LaS/jG+X9JqI2NzWVHAK\n8M2IuFzSG4HrJb06IsbbnMuspXKqX5B9Dcu1foFrWNPm4kjVI8Ahk26/pLhv2mMk9ZCGNZ/IJBuS\njgdWAu+MiO0Z5OoHXg3cJukfpM+x11Y00bOR12wDsDYiRiLiYeBvpCLV7lxLgWYSbIgAAAFbSURB\nVBsBIuK3wALShqDt1tD70Nom1xqWa/1qJFu7aliu9avRbDnWsLzrV7sndZV9IXX9DwEvZ+fku1dN\nOeb97DrJ88aMsh1Fmjx4eE6v2ZTjb6O6ieqNvGYnANcW1/cnDQ2/MINcvwROL66/kjQfQRW9bocy\n80TPk9h1ouedVb3XfCntvVV5Dcu1fjWabcrxldSwXOtXE9naUsM6uX61PUCLviEnkrr9vwMri/tW\nkf5ygtRtfx94ELgTOCyjbDcDjwN3F5e1OeSacmwlBamJ10ykof37gXuBkzPJdSRwR1Gs7gbeXlGu\n7wKPASOkv4KXAsuB5ZNer6uK3PdW+b30pbT3VltqWK71q5FsU46trIblWr8azFZ5Dev0+uVtaszM\nzMxKMBfnVJmZmZlVzk2VmZmZWQncVJmZmZmVwE2VmZmZWQncVJmZmZmVwE2VmZmZWQncVJmZmZmV\n4P9WIgz916sX9AAAAABJRU5ErkJggg==\n", 158 | "text/plain": [ 159 | "" 160 | ] 161 | }, 162 | "metadata": {}, 163 | "output_type": "display_data" 164 | } 165 | ], 166 | "source": [ 167 | "Xs = np.linspace(-0.1, 1.1, 300).reshape(-1, 1)\n", 168 | "\n", 169 | "\n", 170 | "def plot(ax, model, name, c):\n", 171 | " Fs, ms, vs = model.predict_all_layers_full_cov(Xs, 100)\n", 172 | " qs = [np.percentile(Fs[-1][:, :, -1], q, axis=0) for q in [10., 50., 90.]]\n", 173 | "\n", 174 | " ax.plot(Xs, qs[0], alpha=0.1, color=c)\n", 175 | " ax.plot(Xs, qs[1], alpha=1, label=name, color=c)\n", 176 | " ax.plot(Xs, qs[2], alpha=0.1, color=c)\n", 177 | " ax.scatter(X, Y, marker='x', color='k')\n", 178 | " ax.legend()\n", 179 | "\n", 180 | "fig, axs = plt.subplots(1, 2, figsize=(10, 5))\n", 181 | "plot(axs[0], model_adam, 'adam', 'C0')\n", 182 | "plot(axs[1], model_nat_grads, 'nat grads with adam', 'C1')\n", 183 | "plt.show()\n" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": null, 189 | "metadata": { 190 | "collapsed": true 191 | }, 192 | "outputs": [], 193 | "source": [] 194 | } 195 | ], 196 | "metadata": { 197 | "kernelspec": { 198 | "display_name": "Python 3", 199 | "language": "python", 200 | "name": "python3" 201 | }, 202 | "language_info": { 203 | "codemirror_mode": { 204 | "name": "ipython", 205 | "version": 3 206 | }, 207 | "file_extension": ".py", 208 | "mimetype": "text/x-python", 209 | "name": "python", 210 | "nbconvert_exporter": "python", 211 | "pygments_lexer": "ipython3", 212 | "version": "3.5.3" 213 | } 214 | }, 215 | "nbformat": 4, 216 | "nbformat_minor": 2 217 | } 218 | -------------------------------------------------------------------------------- /doubly_stochastic_dgp/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/UCL-SML/Doubly-Stochastic-DGP/31527b596e52c453c9810f9e936e65f55da8a365/doubly_stochastic_dgp/__init__.py -------------------------------------------------------------------------------- /doubly_stochastic_dgp/dgp.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Hugh Salimbeni 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import tensorflow as tf 16 | import numpy as np 17 | 18 | from gpflow.params import DataHolder, Minibatch 19 | from gpflow import autoflow, params_as_tensors, ParamList 20 | from gpflow.models.model import Model 21 | from gpflow.mean_functions import Identity, Linear 22 | from gpflow.mean_functions import Zero 23 | from gpflow.quadrature import mvhermgauss 24 | from gpflow.likelihoods import Gaussian 25 | from gpflow import settings 26 | float_type = settings.float_type 27 | 28 | from doubly_stochastic_dgp.utils import reparameterize 29 | 30 | from doubly_stochastic_dgp.utils import BroadcastingLikelihood 31 | from doubly_stochastic_dgp.layer_initializations import init_layers_linear 32 | from doubly_stochastic_dgp.layers import GPR_Layer, SGPMC_Layer, GPMC_Layer, SVGP_Layer 33 | 34 | 35 | class DGP_Base(Model): 36 | """ 37 | The base class for Deep Gaussian process models. 38 | 39 | Implements a Monte-Carlo variational bound and convenience functions. 40 | 41 | """ 42 | def __init__(self, X, Y, likelihood, layers, 43 | minibatch_size=None, 44 | num_samples=1, num_data=None, 45 | **kwargs): 46 | Model.__init__(self, **kwargs) 47 | self.num_samples = num_samples 48 | 49 | self.num_data = num_data or X.shape[0] 50 | if minibatch_size: 51 | self.X = Minibatch(X, minibatch_size, seed=0) 52 | self.Y = Minibatch(Y, minibatch_size, seed=0) 53 | else: 54 | self.X = DataHolder(X) 55 | self.Y = DataHolder(Y) 56 | 57 | self.likelihood = BroadcastingLikelihood(likelihood) 58 | 59 | self.layers = ParamList(layers) 60 | 61 | @params_as_tensors 62 | def propagate(self, X, full_cov=False, S=1, zs=None): 63 | sX = tf.tile(tf.expand_dims(X, 0), [S, 1, 1]) 64 | 65 | Fs, Fmeans, Fvars = [], [], [] 66 | 67 | F = sX 68 | zs = zs or [None, ] * len(self.layers) 69 | for layer, z in zip(self.layers, zs): 70 | F, Fmean, Fvar = layer.sample_from_conditional(F, z=z, full_cov=full_cov) 71 | 72 | Fs.append(F) 73 | Fmeans.append(Fmean) 74 | Fvars.append(Fvar) 75 | 76 | return Fs, Fmeans, Fvars 77 | 78 | @params_as_tensors 79 | def _build_predict(self, X, full_cov=False, S=1): 80 | Fs, Fmeans, Fvars = self.propagate(X, full_cov=full_cov, S=S) 81 | return Fmeans[-1], Fvars[-1] 82 | 83 | def E_log_p_Y(self, X, Y): 84 | """ 85 | Calculate the expectation of the data log likelihood under the variational distribution 86 | with MC samples 87 | """ 88 | Fmean, Fvar = self._build_predict(X, full_cov=False, S=self.num_samples) 89 | var_exp = self.likelihood.variational_expectations(Fmean, Fvar, Y) # S, N, D 90 | return tf.reduce_mean(var_exp, 0) # N, D 91 | 92 | @params_as_tensors 93 | def _build_likelihood(self): 94 | L = tf.reduce_sum(self.E_log_p_Y(self.X, self.Y)) 95 | KL = tf.reduce_sum([layer.KL() for layer in self.layers]) 96 | scale = tf.cast(self.num_data, float_type) 97 | scale /= tf.cast(tf.shape(self.X)[0], float_type) # minibatch size 98 | return L * scale - KL 99 | 100 | @autoflow((float_type, [None, None]), (tf.int32, [])) 101 | def predict_f(self, Xnew, num_samples): 102 | return self._build_predict(Xnew, full_cov=False, S=num_samples) 103 | 104 | @autoflow((float_type, [None, None]), (tf.int32, [])) 105 | def predict_f_full_cov(self, Xnew, num_samples): 106 | return self._build_predict(Xnew, full_cov=True, S=num_samples) 107 | 108 | @autoflow((float_type, [None, None]), (tf.int32, [])) 109 | def predict_all_layers(self, Xnew, num_samples): 110 | return self.propagate(Xnew, full_cov=False, S=num_samples) 111 | 112 | @autoflow((float_type, [None, None]), (tf.int32, [])) 113 | def predict_all_layers_full_cov(self, Xnew, num_samples): 114 | return self.propagate(Xnew, full_cov=True, S=num_samples) 115 | 116 | @autoflow((float_type, [None, None]), (tf.int32, [])) 117 | def predict_y(self, Xnew, num_samples): 118 | Fmean, Fvar = self._build_predict(Xnew, full_cov=False, S=num_samples) 119 | return self.likelihood.predict_mean_and_var(Fmean, Fvar) 120 | 121 | @autoflow((float_type, [None, None]), (float_type, [None, None]), (tf.int32, [])) 122 | def predict_density(self, Xnew, Ynew, num_samples): 123 | Fmean, Fvar = self._build_predict(Xnew, full_cov=False, S=num_samples) 124 | l = self.likelihood.predict_density(Fmean, Fvar, Ynew) 125 | log_num_samples = tf.log(tf.cast(num_samples, float_type)) 126 | return tf.reduce_logsumexp(l - log_num_samples, axis=0) 127 | 128 | 129 | class DGP_Quad(DGP_Base): 130 | """ 131 | A DGP with quadrature instead of MC sampling. This scales exponentially in the sum of the inner layer dims 132 | 133 | The key ref is: 134 | [in progress] 135 | 136 | """ 137 | def __init__(self, *args, H=100, **kwargs): 138 | DGP_Base.__init__(self, *args, **kwargs) 139 | 140 | # set up the quadrature points 141 | self.H = H 142 | self.D_quad = sum([layer.q_mu.shape[1] for layer in self.layers[:-1]]) 143 | gh_x, gh_w = mvhermgauss(H, self.D_quad) 144 | gh_x *= 2. ** 0.5 # H**quad_dims, quad_dims 145 | self.gh_w = gh_w * np.pi ** (-0.5 * self.D_quad) # H**quad_dims 146 | 147 | # split z into each layer, to work with the loop over layers 148 | # the shape is S, 1, D as this will broadcast correctly with S,N,D (never used with full cov) 149 | s, e = 0, 0 150 | self.gh_x = [] 151 | for layer in self.layers[:-1]: 152 | e += layer.q_mu.shape[1] 153 | self.gh_x.append(gh_x[:, None, s:e]) 154 | s += layer.q_mu.shape[1] 155 | 156 | # finish with zeros (we don't need to do quadrature over the final layer and this will never get used 157 | self.gh_x.append(tf.zeros((1, 1, 1), dtype=settings.float_type)) 158 | 159 | def E_log_p_Y(self, X, Y): 160 | """ 161 | Calculate the expectation of the data log likelihood under the variational distribution 162 | with quadrature 163 | """ 164 | _, Fmeans, Fvars = self.propagate(X, zs=self.gh_x, full_cov=False, S=self.H**self.D_quad) 165 | var_exp = self.likelihood.variational_expectations(Fmeans[-1], Fvars[-1], Y) # S, N, D 166 | return tf.reduce_sum(var_exp * self.gh_w[:, None, None], 0) # N, D 167 | 168 | 169 | class DGP(DGP_Base): 170 | """ 171 | This is the Doubly-Stochastic Deep GP, with linear/identity mean functions at each layer. 172 | 173 | The key reference is 174 | 175 | :: 176 | @inproceedings{salimbeni2017doubly, 177 | title={Doubly Stochastic Variational Inference for Deep Gaussian Processes}, 178 | author={Salimbeni, Hugh and Deisenroth, Marc}, 179 | booktitle={NIPS}, 180 | year={2017} 181 | } 182 | 183 | """ 184 | def __init__(self, X, Y, Z, kernels, likelihood, 185 | num_outputs=None, 186 | mean_function=Zero(), # the final layer mean function, 187 | white=False, **kwargs): 188 | layers = init_layers_linear(X, Y, Z, kernels, 189 | num_outputs=num_outputs, 190 | mean_function=mean_function, 191 | white=white) 192 | DGP_Base.__init__(self, X, Y, likelihood, layers, **kwargs) 193 | 194 | -------------------------------------------------------------------------------- /doubly_stochastic_dgp/layer_initializations.py: -------------------------------------------------------------------------------- 1 | 2 | import tensorflow as tf 3 | import numpy as np 4 | 5 | from gpflow.params import DataHolder, Minibatch 6 | from gpflow import autoflow, params_as_tensors, ParamList 7 | from gpflow.models.model import Model 8 | from gpflow.mean_functions import Identity, Linear 9 | from gpflow.mean_functions import Zero 10 | from gpflow.quadrature import mvhermgauss 11 | from gpflow import settings 12 | float_type = settings.float_type 13 | 14 | from doubly_stochastic_dgp.layers import SVGP_Layer 15 | 16 | def init_layers_linear(X, Y, Z, kernels, 17 | num_outputs=None, 18 | mean_function=Zero(), 19 | Layer=SVGP_Layer, 20 | white=False): 21 | num_outputs = num_outputs or Y.shape[1] 22 | 23 | layers = [] 24 | 25 | X_running, Z_running = X.copy(), Z.copy() 26 | for kern_in, kern_out in zip(kernels[:-1], kernels[1:]): 27 | dim_in = kern_in.input_dim 28 | dim_out = kern_out.input_dim 29 | print(dim_in, dim_out) 30 | if dim_in == dim_out: 31 | mf = Identity() 32 | 33 | else: 34 | if dim_in > dim_out: # stepping down, use the pca projection 35 | _, _, V = np.linalg.svd(X_running, full_matrices=False) 36 | W = V[:dim_out, :].T 37 | 38 | else: # stepping up, use identity + padding 39 | W = np.concatenate([np.eye(dim_in), np.zeros((dim_in, dim_out - dim_in))], 1) 40 | 41 | mf = Linear(W) 42 | mf.set_trainable(False) 43 | 44 | layers.append(Layer(kern_in, Z_running, dim_out, mf, white=white)) 45 | 46 | if dim_in != dim_out: 47 | Z_running = Z_running.dot(W) 48 | X_running = X_running.dot(W) 49 | 50 | # final layer 51 | layers.append(Layer(kernels[-1], Z_running, num_outputs, mean_function, white=white)) 52 | return layers 53 | 54 | 55 | def init_layers_input_prop(X, Y, Z, kernels, 56 | num_outputs=None, 57 | mean_function=Zero(), 58 | Layer=SVGP_Layer, 59 | white=False): 60 | num_outputs = num_outputs or Y.shape[1] 61 | D = X.shape[1] 62 | M = Z.shape[0] 63 | 64 | layers = [] 65 | 66 | for kern_in, kern_out in zip(kernels[:-1], kernels[1:]): 67 | dim_in = kern_in.input_dim 68 | dim_out = kern_out.input_dim - D 69 | std_in = kern_in.variance.read_value()**0.5 70 | pad = np.random.randn(M, dim_in - D) * 2. * std_in 71 | Z_padded = np.concatenate([Z, pad], 1) 72 | layers.append(Layer(kern_in, Z_padded, dim_out, Zero(), white=white, input_prop_dim=D)) 73 | 74 | dim_in = kernels[-1].input_dim 75 | std_in = kernels[-2].variance.read_value()**0.5 if dim_in > D else 1. 76 | pad = np.random.randn(M, dim_in - D) * 2. * std_in 77 | Z_padded = np.concatenate([Z, pad], 1) 78 | layers.append(Layer(kernels[-1], Z_padded, num_outputs, mean_function, white=white)) 79 | return layers 80 | -------------------------------------------------------------------------------- /doubly_stochastic_dgp/layers.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Hugh Salimbeni 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import tensorflow as tf 16 | import numpy as np 17 | 18 | from gpflow.params import Parameter, Parameterized 19 | from gpflow.conditionals import conditional 20 | from gpflow.features import InducingPoints 21 | from gpflow.kullback_leiblers import gauss_kl 22 | from gpflow.priors import Gaussian as Gaussian_prior 23 | from gpflow import transforms 24 | from gpflow import settings 25 | from gpflow.models.gplvm import BayesianGPLVM 26 | from gpflow.expectations import expectation 27 | from gpflow.probability_distributions import DiagonalGaussian 28 | from gpflow import params_as_tensors 29 | from gpflow.logdensities import multivariate_normal 30 | 31 | 32 | 33 | from doubly_stochastic_dgp.utils import reparameterize 34 | 35 | 36 | class Layer(Parameterized): 37 | def __init__(self, input_prop_dim=None, **kwargs): 38 | """ 39 | A base class for GP layers. Basic functionality for multisample conditional, and input propagation 40 | :param input_prop_dim: the first dimensions of X to propagate. If None (or zero) then no input prop 41 | :param kwargs: 42 | """ 43 | Parameterized.__init__(self, **kwargs) 44 | self.input_prop_dim = input_prop_dim 45 | 46 | def conditional_ND(self, X, full_cov=False): 47 | raise NotImplementedError 48 | 49 | def KL(self): 50 | return tf.cast(0., dtype=settings.float_type) 51 | 52 | def conditional_SND(self, X, full_cov=False): 53 | """ 54 | A multisample conditional, where X is shape (S,N,D_out), independent over samples S 55 | 56 | if full_cov is True 57 | mean is (S,N,D_out), var is (S,N,N,D_out) 58 | 59 | if full_cov is False 60 | mean and var are both (S,N,D_out) 61 | 62 | :param X: The input locations (S,N,D_in) 63 | :param full_cov: Whether to calculate full covariance or just diagonal 64 | :return: mean (S,N,D_out), var (S,N,D_out or S,N,N,D_out) 65 | """ 66 | if full_cov is True: 67 | f = lambda a: self.conditional_ND(a, full_cov=full_cov) 68 | mean, var = tf.map_fn(f, X, dtype=(tf.float64, tf.float64)) 69 | return tf.stack(mean), tf.stack(var) 70 | else: 71 | S, N, D = tf.shape(X)[0], tf.shape(X)[1], tf.shape(X)[2] 72 | X_flat = tf.reshape(X, [S * N, D]) 73 | mean, var = self.conditional_ND(X_flat) 74 | return [tf.reshape(m, [S, N, self.num_outputs]) for m in [mean, var]] 75 | 76 | def sample_from_conditional(self, X, z=None, full_cov=False): 77 | """ 78 | Calculates self.conditional and also draws a sample, adding input propagation if necessary 79 | 80 | If z=None then the tensorflow random_normal function is used to generate the 81 | N(0, 1) samples, otherwise z are used for the whitened sample points 82 | 83 | :param X: Input locations (S,N,D_in) 84 | :param full_cov: Whether to compute correlations between outputs 85 | :param z: None, or the sampled points in whitened representation 86 | :return: mean (S,N,D), var (S,N,N,D or S,N,D), samples (S,N,D) 87 | """ 88 | mean, var = self.conditional_SND(X, full_cov=full_cov) 89 | 90 | # set shapes 91 | S = tf.shape(X)[0] 92 | N = tf.shape(X)[1] 93 | D = self.num_outputs 94 | 95 | mean = tf.reshape(mean, (S, N, D)) 96 | if full_cov: 97 | var = tf.reshape(var, (S, N, N, D)) 98 | else: 99 | var = tf.reshape(var, (S, N, D)) 100 | 101 | if z is None: 102 | z = tf.random_normal(tf.shape(mean), dtype=settings.float_type) 103 | samples = reparameterize(mean, var, z, full_cov=full_cov) 104 | 105 | if self.input_prop_dim: 106 | shape = [tf.shape(X)[0], tf.shape(X)[1], self.input_prop_dim] 107 | X_prop = tf.reshape(X[:, :, :self.input_prop_dim], shape) 108 | 109 | samples = tf.concat([X_prop, samples], 2) 110 | mean = tf.concat([X_prop, mean], 2) 111 | 112 | if full_cov: 113 | shape = (tf.shape(X)[0], tf.shape(X)[1], tf.shape(X)[1], tf.shape(var)[3]) 114 | zeros = tf.zeros(shape, dtype=settings.float_type) 115 | var = tf.concat([zeros, var], 3) 116 | else: 117 | var = tf.concat([tf.zeros_like(X_prop), var], 2) 118 | 119 | return samples, mean, var 120 | 121 | 122 | class SVGP_Layer(Layer): 123 | def __init__(self, kern, Z, num_outputs, mean_function, 124 | white=False, input_prop_dim=None, **kwargs): 125 | """ 126 | A sparse variational GP layer in whitened representation. This layer holds the kernel, 127 | variational parameters, inducing points and mean function. 128 | 129 | The underlying model at inputs X is 130 | f = Lv + mean_function(X), where v \sim N(0, I) and LL^T = kern.K(X) 131 | 132 | The variational distribution over the inducing points is 133 | q(v) = N(q_mu, q_sqrt q_sqrt^T) 134 | 135 | The layer holds D_out independent GPs with the same kernel and inducing points. 136 | 137 | :param kern: The kernel for the layer (input_dim = D_in) 138 | :param Z: Inducing points (M, D_in) 139 | :param num_outputs: The number of GP outputs (q_mu is shape (M, num_outputs)) 140 | :param mean_function: The mean function 141 | :return: 142 | """ 143 | Layer.__init__(self, input_prop_dim, **kwargs) 144 | self.num_inducing = Z.shape[0] 145 | 146 | q_mu = np.zeros((self.num_inducing, num_outputs)) 147 | self.q_mu = Parameter(q_mu) 148 | 149 | q_sqrt = np.tile(np.eye(self.num_inducing)[None, :, :], [num_outputs, 1, 1]) 150 | transform = transforms.LowerTriangular(self.num_inducing, num_matrices=num_outputs) 151 | self.q_sqrt = Parameter(q_sqrt, transform=transform) 152 | 153 | self.feature = InducingPoints(Z) 154 | self.kern = kern 155 | self.mean_function = mean_function 156 | 157 | self.num_outputs = num_outputs 158 | self.white = white 159 | 160 | if not self.white: # initialize to prior 161 | Ku = self.kern.compute_K_symm(Z) 162 | Lu = np.linalg.cholesky(Ku + np.eye(Z.shape[0])*settings.jitter) 163 | self.q_sqrt = np.tile(Lu[None, :, :], [num_outputs, 1, 1]) 164 | 165 | self.needs_build_cholesky = True 166 | 167 | @params_as_tensors 168 | def build_cholesky_if_needed(self): 169 | # make sure we only compute this once 170 | if self.needs_build_cholesky: 171 | self.Ku = self.feature.Kuu(self.kern, jitter=settings.jitter) 172 | self.Lu = tf.cholesky(self.Ku) 173 | self.Ku_tiled = tf.tile(self.Ku[None, :, :], [self.num_outputs, 1, 1]) 174 | self.Lu_tiled = tf.tile(self.Lu[None, :, :], [self.num_outputs, 1, 1]) 175 | self.needs_build_cholesky = False 176 | 177 | 178 | def conditional_ND(self, X, full_cov=False): 179 | self.build_cholesky_if_needed() 180 | 181 | # mmean, vvar = conditional(X, self.feature.Z, self.kern, 182 | # self.q_mu, q_sqrt=self.q_sqrt, 183 | # full_cov=full_cov, white=self.white) 184 | Kuf = self.feature.Kuf(self.kern, X) 185 | 186 | A = tf.matrix_triangular_solve(self.Lu, Kuf, lower=True) 187 | if not self.white: 188 | A = tf.matrix_triangular_solve(tf.transpose(self.Lu), A, lower=False) 189 | 190 | mean = tf.matmul(A, self.q_mu, transpose_a=True) 191 | 192 | A_tiled = tf.tile(A[None, :, :], [self.num_outputs, 1, 1]) 193 | I = tf.eye(self.num_inducing, dtype=settings.float_type)[None, :, :] 194 | 195 | if self.white: 196 | SK = -I 197 | else: 198 | SK = -self.Ku_tiled 199 | 200 | if self.q_sqrt is not None: 201 | SK += tf.matmul(self.q_sqrt, self.q_sqrt, transpose_b=True) 202 | 203 | 204 | B = tf.matmul(SK, A_tiled) 205 | 206 | if full_cov: 207 | # (num_latent, num_X, num_X) 208 | delta_cov = tf.matmul(A_tiled, B, transpose_a=True) 209 | Kff = self.kern.K(X) 210 | else: 211 | # (num_latent, num_X) 212 | delta_cov = tf.reduce_sum(A_tiled * B, 1) 213 | Kff = self.kern.Kdiag(X) 214 | 215 | # either (1, num_X) + (num_latent, num_X) or (1, num_X, num_X) + (num_latent, num_X, num_X) 216 | var = tf.expand_dims(Kff, 0) + delta_cov 217 | var = tf.transpose(var) 218 | 219 | return mean + self.mean_function(X), var 220 | 221 | def KL(self): 222 | """ 223 | The KL divergence from the variational distribution to the prior 224 | 225 | :return: KL divergence from N(q_mu, q_sqrt) to N(0, I), independently for each GP 226 | """ 227 | # if self.white: 228 | # return gauss_kl(self.q_mu, self.q_sqrt) 229 | # else: 230 | # return gauss_kl(self.q_mu, self.q_sqrt, self.Ku) 231 | 232 | self.build_cholesky_if_needed() 233 | 234 | KL = -0.5 * self.num_outputs * self.num_inducing 235 | KL -= 0.5 * tf.reduce_sum(tf.log(tf.matrix_diag_part(self.q_sqrt) ** 2)) 236 | 237 | if not self.white: 238 | KL += tf.reduce_sum(tf.log(tf.matrix_diag_part(self.Lu))) * self.num_outputs 239 | KL += 0.5 * tf.reduce_sum(tf.square(tf.matrix_triangular_solve(self.Lu_tiled, self.q_sqrt, lower=True))) 240 | Kinv_m = tf.cholesky_solve(self.Lu, self.q_mu) 241 | KL += 0.5 * tf.reduce_sum(self.q_mu * Kinv_m) 242 | else: 243 | KL += 0.5 * tf.reduce_sum(tf.square(self.q_sqrt)) 244 | KL += 0.5 * tf.reduce_sum(self.q_mu**2) 245 | 246 | return KL 247 | 248 | 249 | class SGPMC_Layer(SVGP_Layer): 250 | def __init__(self, *args, **kwargs): 251 | """ 252 | A sparse layer for sampling over the inducing point values 253 | """ 254 | SVGP_Layer.__init__(self, *args, **kwargs) 255 | self.q_mu.prior = Gaussian_prior(0., 1.) 256 | del self.q_sqrt 257 | self.q_sqrt = None 258 | 259 | def KL(self): 260 | return tf.cast(0., dtype=settings.float_type) 261 | 262 | 263 | class GPMC_Layer(Layer): 264 | def __init__(self, kern, X, num_outputs, mean_function, input_prop_dim=None, **kwargs): 265 | """ 266 | A dense layer with fixed inputs. NB X does not change here, and must be the inputs. Minibatches not possible 267 | """ 268 | Layer.__init__(self, input_prop_dim, **kwargs) 269 | self.num_data = X.shape[0] 270 | q_mu = np.zeros((self.num_data, num_outputs)) 271 | self.q_mu = Parameter(q_mu) 272 | self.q_mu.prior = Gaussian_prior(0., 1.) 273 | self.kern = kern 274 | self.mean_function = mean_function 275 | 276 | self.num_outputs = num_outputs 277 | 278 | Ku = self.kern.compute_K_symm(X) + np.eye(self.num_data) * settings.jitter 279 | self.Lu = tf.constant(np.linalg.cholesky(Ku)) 280 | self.X = tf.constant(X) 281 | 282 | def build_latents(self): 283 | f = tf.matmul(self.Lu, self.q_mu) 284 | f += self.mean_function(self.X) 285 | if self.input_prop_dim: 286 | f = tf.concat([self.X[:, :self.input_prop_dim], f], 1) 287 | return f 288 | 289 | def conditional_ND(self, Xnew, full_cov=False): 290 | mu, var = conditional(Xnew, self.X, self.kern, self.q_mu, 291 | full_cov=full_cov, 292 | q_sqrt=None, white=True) 293 | return mu + self.mean_function(Xnew), var 294 | 295 | 296 | class Collapsed_Layer(Layer): 297 | """ 298 | Extra functions for a collapsed layer 299 | """ 300 | def set_data(self, X_mean, X_var, Y, lik_variance): 301 | self._X_mean = X_mean 302 | self._X_var = X_var 303 | self._Y = Y 304 | self._lik_variance = lik_variance 305 | 306 | def build_likelihood(self): 307 | raise NotImplementedError 308 | 309 | 310 | class GPR_Layer(Collapsed_Layer): 311 | def __init__(self, kern, mean_function, num_outputs, **kwargs): 312 | """ 313 | A dense GP layer with a Gaussian likelihood, where the GP is integrated out 314 | """ 315 | Collapsed_Layer.__init__(self, **kwargs) 316 | self.kern = kern 317 | self.mean_function = mean_function 318 | self.num_outputs = num_outputs 319 | 320 | def conditional_ND(self, Xnew, full_cov=False): 321 | ## modified from GPR 322 | Kx = self.kern.K(self._X_mean, Xnew) 323 | K = self.kern.K(self._X_mean) + tf.eye(tf.shape(self._X_mean)[0], dtype=settings.float_type) * self._lik_variance 324 | L = tf.cholesky(K) 325 | A = tf.matrix_triangular_solve(L, Kx, lower=True) 326 | V = tf.matrix_triangular_solve(L, self._Y - self.mean_function(self._X_mean)) 327 | fmean = tf.matmul(A, V, transpose_a=True) + self.mean_function(Xnew) 328 | if full_cov: 329 | fvar = self.kern.K(Xnew) - tf.matmul(A, A, transpose_a=True) 330 | shape = tf.stack([1, 1, tf.shape(self._Y)[1]]) 331 | fvar = tf.tile(tf.expand_dims(fvar, 2), shape) 332 | else: 333 | fvar = self.kern.Kdiag(Xnew) - tf.reduce_sum(tf.square(A), 0) 334 | fvar = tf.tile(tf.reshape(fvar, (-1, 1)), [1, tf.shape(self._Y)[1]]) 335 | return fmean, fvar 336 | 337 | def build_likelihood(self): 338 | ## modified from GPR 339 | K = self.kern.K(self._X_mean) + tf.eye(tf.shape(self._X_mean)[0], dtype=settings.float_type) * self._lik_variance 340 | L = tf.cholesky(K) 341 | m = self.mean_function(self._X_mean) 342 | return tf.reduce_sum(multivariate_normal(self._Y, m, L)) 343 | 344 | 345 | class SGPR_Layer(Collapsed_Layer): 346 | def __init__(self, kern, Z, num_outputs, mean_function, **kwargs): 347 | """ 348 | A sparse variational GP layer with a Gaussian likelihood, where the 349 | GP is integrated out 350 | 351 | :kern: The kernel for the layer (input_dim = D_in) 352 | :param Z: Inducing points (M, D_in) 353 | :param mean_function: The mean function 354 | :return: 355 | """ 356 | 357 | Collapsed_Layer.__init__(self, **kwargs) 358 | self.feature = InducingPoints(Z) 359 | self.kern = kern 360 | self.mean_function = mean_function 361 | self.num_outputs = num_outputs 362 | 363 | def conditional_ND(self, Xnew, full_cov=False): 364 | return gplvm_build_predict(self, Xnew, self._X_mean, self._X_var, self._Y, self._lik_variance, full_cov=full_cov) 365 | 366 | def build_likelihood(self): 367 | return gplvm_build_likelihood(self, self._X_mean, self._X_var, self._Y, self._lik_variance) 368 | 369 | 370 | ################## From gpflow (with KL removed) 371 | def gplvm_build_likelihood(self, X_mean, X_var, Y, variance): 372 | if X_var is None: 373 | # SGPR 374 | num_inducing = len(self.feature) 375 | num_data = tf.cast(tf.shape(Y)[0], settings.float_type) 376 | output_dim = tf.cast(tf.shape(Y)[1], settings.float_type) 377 | 378 | err = Y - self.mean_function(X_mean) 379 | Kdiag = self.kern.Kdiag(X_mean) 380 | Kuf = self.feature.Kuf(self.kern, X_mean) 381 | Kuu = self.feature.Kuu(self.kern, jitter=settings.numerics.jitter_level) 382 | L = tf.cholesky(Kuu) 383 | sigma = tf.sqrt(variance) 384 | 385 | # Compute intermediate matrices 386 | A = tf.matrix_triangular_solve(L, Kuf, lower=True) / sigma 387 | AAT = tf.matmul(A, A, transpose_b=True) 388 | B = AAT + tf.eye(num_inducing, dtype=settings.float_type) 389 | LB = tf.cholesky(B) 390 | Aerr = tf.matmul(A, err) 391 | c = tf.matrix_triangular_solve(LB, Aerr, lower=True) / sigma 392 | 393 | # compute log marginal bound 394 | bound = -0.5 * num_data * output_dim * np.log(2 * np.pi) 395 | bound += tf.negative(output_dim) * tf.reduce_sum(tf.log(tf.matrix_diag_part(LB))) 396 | bound -= 0.5 * num_data * output_dim * tf.log(variance) 397 | bound += -0.5 * tf.reduce_sum(tf.square(err)) / variance 398 | bound += 0.5 * tf.reduce_sum(tf.square(c)) 399 | bound += -0.5 * output_dim * tf.reduce_sum(Kdiag) / variance 400 | bound += 0.5 * output_dim * tf.reduce_sum(tf.matrix_diag_part(AAT)) 401 | 402 | return bound 403 | 404 | 405 | else: 406 | 407 | X_cov = tf.matrix_diag(X_var) 408 | pX = DiagonalGaussian(X_mean, X_var) 409 | num_inducing = len(self.feature) 410 | if hasattr(self.kern, 'X_input_dim'): 411 | psi0 = tf.reduce_sum(self.kern.eKdiag(X_mean, X_cov)) 412 | psi1 = self.kern.eKxz(self.feature.Z, X_mean, X_cov) 413 | psi2 = tf.reduce_sum(self.kern.eKzxKxz(self.feature.Z, X_mean, X_cov), 0) 414 | else: 415 | psi0 = tf.reduce_sum(expectation(pX, self.kern)) 416 | psi1 = expectation(pX, (self.kern, self.feature)) 417 | psi2 = tf.reduce_sum(expectation(pX, (self.kern, self.feature), (self.kern, self.feature)), axis=0) 418 | Kuu = self.feature.Kuu(self.kern, jitter=settings.numerics.jitter_level) 419 | L = tf.cholesky(Kuu) 420 | sigma2 = variance 421 | sigma = tf.sqrt(sigma2) 422 | 423 | # Compute intermediate matrices 424 | A = tf.matrix_triangular_solve(L, tf.transpose(psi1), lower=True) / sigma 425 | tmp = tf.matrix_triangular_solve(L, psi2, lower=True) 426 | AAT = tf.matrix_triangular_solve(L, tf.transpose(tmp), lower=True) / sigma2 427 | B = AAT + tf.eye(num_inducing, dtype=settings.float_type) 428 | LB = tf.cholesky(B) 429 | log_det_B = 2. * tf.reduce_sum(tf.log(tf.matrix_diag_part(LB))) 430 | c = tf.matrix_triangular_solve(LB, tf.matmul(A, Y), lower=True) / sigma 431 | 432 | # KL[q(x) || p(x)] 433 | # dX_var = self.X_var if len(self.X_var.get_shape()) == 2 else tf.matrix_diag_part(self.X_var) 434 | # NQ = tf.cast(tf.size(self.X_mean), settings.float_type) 435 | D = tf.cast(tf.shape(Y)[1], settings.float_type) 436 | # KL = -0.5 * tf.reduce_sum(tf.log(dX_var)) \ 437 | # + 0.5 * tf.reduce_sum(tf.log(self.X_prior_var)) \ 438 | # - 0.5 * NQ \ 439 | # + 0.5 * tf.reduce_sum((tf.square(self.X_mean - self.X_prior_mean) + dX_var) / self.X_prior_var) 440 | 441 | # compute log marginal bound 442 | ND = tf.cast(tf.size(Y), settings.float_type) 443 | bound = -0.5 * ND * tf.log(2 * np.pi * sigma2) 444 | bound += -0.5 * D * log_det_B 445 | bound += -0.5 * tf.reduce_sum(tf.square(Y)) / sigma2 446 | bound += 0.5 * tf.reduce_sum(tf.square(c)) 447 | bound += -0.5 * D * (tf.reduce_sum(psi0) / sigma2 - 448 | tf.reduce_sum(tf.matrix_diag_part(AAT))) 449 | # bound -= KL # don't need this term 450 | return bound 451 | 452 | ############# Exactly from gpflow 453 | def gplvm_build_predict(self, Xnew, X_mean, X_var, Y, variance, full_cov=False): 454 | if X_var is None: 455 | # SGPR 456 | num_inducing = len(self.feature) 457 | err = Y - self.mean_function(X_mean) 458 | Kuf = self.feature.Kuf(self.kern, X_mean) 459 | Kuu = self.feature.Kuu(self.kern, jitter=settings.numerics.jitter_level) 460 | Kus = self.feature.Kuf(self.kern, Xnew) 461 | sigma = tf.sqrt(variance) 462 | L = tf.cholesky(Kuu) 463 | A = tf.matrix_triangular_solve(L, Kuf, lower=True) / sigma 464 | B = tf.matmul(A, A, transpose_b=True) + tf.eye(num_inducing, dtype=settings.float_type) 465 | LB = tf.cholesky(B) 466 | Aerr = tf.matmul(A, err) 467 | c = tf.matrix_triangular_solve(LB, Aerr, lower=True) / sigma 468 | tmp1 = tf.matrix_triangular_solve(L, Kus, lower=True) 469 | tmp2 = tf.matrix_triangular_solve(LB, tmp1, lower=True) 470 | mean = tf.matmul(tmp2, c, transpose_a=True) 471 | if full_cov: 472 | var = self.kern.K(Xnew) + tf.matmul(tmp2, tmp2, transpose_a=True) \ 473 | - tf.matmul(tmp1, tmp1, transpose_a=True) 474 | shape = tf.stack([1, 1, tf.shape(Y)[1]]) 475 | var = tf.tile(tf.expand_dims(var, 2), shape) 476 | else: 477 | var = self.kern.Kdiag(Xnew) + tf.reduce_sum(tf.square(tmp2), 0) \ 478 | - tf.reduce_sum(tf.square(tmp1), 0) 479 | shape = tf.stack([1, tf.shape(Y)[1]]) 480 | var = tf.tile(tf.expand_dims(var, 1), shape) 481 | return mean + self.mean_function(Xnew), var 482 | 483 | else: 484 | # gplvm 485 | pX = DiagonalGaussian(X_mean, X_var) 486 | num_inducing = len(self.feature) 487 | 488 | X_cov = tf.matrix_diag(X_var) 489 | 490 | if hasattr(self.kern, 'X_input_dim'): 491 | psi1 = self.kern.eKxz(self.feature.Z, X_mean, X_cov) 492 | psi2 = tf.reduce_sum(self.kern.eKzxKxz(self.feature.Z, X_mean, X_cov), 0) 493 | else: 494 | psi1 = expectation(pX, (self.kern, self.feature)) 495 | psi2 = tf.reduce_sum(expectation(pX, (self.kern, self.feature), (self.kern, self.feature)), axis=0) 496 | 497 | # psi1 = expectation(pX, (self.kern, self.feature)) 498 | # psi2 = tf.reduce_sum(expectation(pX, (self.kern, self.feature), (self.kern, self.feature)), axis=0) 499 | 500 | Kuu = self.feature.Kuu(self.kern, jitter=settings.numerics.jitter_level) 501 | Kus = self.feature.Kuf(self.kern, Xnew) 502 | sigma2 = variance 503 | sigma = tf.sqrt(sigma2) 504 | L = tf.cholesky(Kuu) 505 | 506 | A = tf.matrix_triangular_solve(L, tf.transpose(psi1), lower=True) / sigma 507 | tmp = tf.matrix_triangular_solve(L, psi2, lower=True) 508 | AAT = tf.matrix_triangular_solve(L, tf.transpose(tmp), lower=True) / sigma2 509 | B = AAT + tf.eye(num_inducing, dtype=settings.float_type) 510 | LB = tf.cholesky(B) 511 | c = tf.matrix_triangular_solve(LB, tf.matmul(A, Y), lower=True) / sigma 512 | tmp1 = tf.matrix_triangular_solve(L, Kus, lower=True) 513 | tmp2 = tf.matrix_triangular_solve(LB, tmp1, lower=True) 514 | mean = tf.matmul(tmp2, c, transpose_a=True) 515 | if full_cov: 516 | var = self.kern.K(Xnew) + tf.matmul(tmp2, tmp2, transpose_a=True) \ 517 | - tf.matmul(tmp1, tmp1, transpose_a=True) 518 | shape = tf.stack([1, 1, tf.shape(Y)[1]]) 519 | var = tf.tile(tf.expand_dims(var, 2), shape) 520 | else: 521 | var = self.kern.Kdiag(Xnew) + tf.reduce_sum(tf.square(tmp2), 0) \ 522 | - tf.reduce_sum(tf.square(tmp1), 0) 523 | shape = tf.stack([1, tf.shape(Y)[1]]) 524 | var = tf.tile(tf.expand_dims(var, 1), shape) 525 | return mean + self.mean_function(Xnew), var 526 | -------------------------------------------------------------------------------- /doubly_stochastic_dgp/model_zoo.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Hugh Salimbeni 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import tensorflow as tf 16 | 17 | from gpflow import params_as_tensors 18 | from gpflow.likelihoods import Gaussian 19 | from gpflow import settings 20 | 21 | from doubly_stochastic_dgp.dgp import DGP_Base 22 | from doubly_stochastic_dgp.layers import GPR_Layer, GPMC_Layer 23 | 24 | 25 | class DGP_Collapsed(DGP_Base): 26 | @params_as_tensors 27 | def inner_layers_propagate(self, X, full_cov=False, S=1, zs=None): 28 | sX = tf.tile(tf.expand_dims(X, 0), [S, 1, 1]) 29 | 30 | if len(self.layers)==1: 31 | return [sX], [sX], [tf.zeros_like(sX)] 32 | 33 | Fs, Fmeans, Fvars = [], [], [] 34 | 35 | F = sX 36 | zs = zs or [None, ] * len(self.layers) 37 | for layer, z in zip(self.layers[:-1], zs[:-1]): 38 | F, Fmean, Fvar = layer.sample_from_conditional(F, z=z, full_cov=full_cov) 39 | 40 | Fs.append(F) 41 | Fmeans.append(Fmean) 42 | Fvars.append(Fvar) 43 | 44 | return Fs, Fmeans, Fvars 45 | 46 | @params_as_tensors 47 | def propagate(self, X, full_cov=False, S=1, zs=None): 48 | _, ms, vs = self.inner_layers_propagate(self.X, full_cov=full_cov, zs=zs) 49 | self.layers[-1].set_data(ms[-1][0], vs[-1][0], self.Y, self.likelihood.likelihood.variance) 50 | return DGP_Base.propagate(self, X, full_cov=full_cov, S=S, zs=zs) 51 | 52 | @params_as_tensors 53 | def _build_likelihood(self): 54 | _, ms, vs = self.inner_layers_propagate(self.X, full_cov=False) 55 | self.layers[-1].set_data(ms[-1][0], vs[-1][0], self.Y, self.likelihood.likelihood.variance) 56 | KL = tf.cast(tf.reduce_sum([layer.KL() for layer in self.layers[:-1]]), dtype=settings.float_type) 57 | return self.layers[-1].build_likelihood() - KL 58 | 59 | 60 | class DGP_Heinonen(DGP_Collapsed): 61 | """ 62 | A dense 2 layer DGP, with HMC for inference over the inner layer 63 | 64 | This is only applicable for 2 layer case with a Gaussian likelihood and no minibatches 65 | 66 | This is based on the following paper: 67 | 68 | @inproceedings{heinonen2016non, 69 | title={Non-stationary gaussian process regression with hamiltonian monte carlo}, 70 | author={Heinonen, Markus and Mannerstr{\"o}m, Henrik and Rousu, Juho and Kaski, Samuel and L{\"a}hdesm{\"a}ki, Harri}, 71 | booktitle={Artificial Intelligence and Statistics}, 72 | year={2016} 73 | } 74 | 75 | """ 76 | def __init__(self, X, Y, likelihood, layers, **kwargs): 77 | assert len(layers) == 2 78 | assert isinstance(likelihood, Gaussian) 79 | assert isinstance(layers[0], GPMC_Layer) 80 | assert isinstance(layers[1], GPR_Layer) 81 | if 'minibatch_size' in kwargs: 82 | assert kwargs['minibatch_size'] is None 83 | DGP_Collapsed.__init__(self, X, Y, likelihood, layers, **kwargs) 84 | 85 | @params_as_tensors 86 | def inner_layers_propagate(self, X, full_cov=False, S=1, zs=None): 87 | f = self.layers[0].build_latents()[None, :, :] 88 | return [f], [f], [tf.zeros_like(f)] 89 | 90 | 91 | # TODO 92 | # class DGP_Damianou(Parameterized): 93 | # """ 94 | # The inference from 95 | # 96 | # @inproceedings{damianou2013deep, 97 | # title={Deep gaussian processes}, 98 | # author={Damianou, Andreas and Lawrence, Neil}, 99 | # booktitle={Artificial Intelligence and Statistics}, 100 | # year={2013} 101 | # } 102 | # 103 | # """ 104 | # 105 | -------------------------------------------------------------------------------- /doubly_stochastic_dgp/utils.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Hugh Salimbeni 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import tensorflow as tf 16 | 17 | from gpflow import settings 18 | from gpflow import params_as_tensors, Parameterized 19 | from gpflow.likelihoods import Gaussian 20 | 21 | 22 | def reparameterize(mean, var, z, full_cov=False): 23 | """ 24 | Implements the 'reparameterization trick' for the Gaussian, either full rank or diagonal 25 | 26 | If the z is a sample from N(0, 1), the output is a sample from N(mean, var) 27 | 28 | If full_cov=True then var must be of shape S,N,N,D and the full covariance is used. Otherwise 29 | var must be S,N,D and the operation is elementwise 30 | 31 | :param mean: mean of shape S,N,D 32 | :param var: covariance of shape S,N,D or S,N,N,D 33 | :param z: samples form unit Gaussian of shape S,N,D 34 | :param full_cov: bool to indicate whether var is of shape S,N,N,D or S,N,D 35 | :return sample from N(mean, var) of shape S,N,D 36 | """ 37 | if var is None: 38 | return mean 39 | 40 | if full_cov is False: 41 | return mean + z * (var + settings.jitter) ** 0.5 42 | 43 | else: 44 | S, N, D = tf.shape(mean)[0], tf.shape(mean)[1], tf.shape(mean)[2] # var is SNND 45 | mean = tf.transpose(mean, (0, 2, 1)) # SND -> SDN 46 | var = tf.transpose(var, (0, 3, 1, 2)) # SNND -> SDNN 47 | I = settings.jitter * tf.eye(N, dtype=settings.float_type)[None, None, :, :] # 11NN 48 | chol = tf.cholesky(var + I) # SDNN 49 | z_SDN1 = tf.transpose(z, [0, 2, 1])[:, :, :, None] # SND->SDN1 50 | f = mean + tf.matmul(chol, z_SDN1)[:, :, :, 0] # SDN(1) 51 | return tf.transpose(f, (0, 2, 1)) # SND 52 | 53 | 54 | class BroadcastingLikelihood(Parameterized): 55 | """ 56 | A wrapper for the likelihood to broadcast over the samples dimension. The Gaussian doesn't 57 | need this, but for the others we can apply reshaping and tiling. 58 | 59 | With this wrapper all likelihood functions behave correctly with inputs of shape S,N,D, 60 | but with Y still of shape N,D 61 | """ 62 | def __init__(self, likelihood): 63 | Parameterized.__init__(self) 64 | self.likelihood = likelihood 65 | 66 | if isinstance(likelihood, Gaussian): 67 | self.needs_broadcasting = False 68 | else: 69 | self.needs_broadcasting = True 70 | 71 | def _broadcast(self, f, vars_SND, vars_ND): 72 | if self.needs_broadcasting is False: 73 | return f(vars_SND, [tf.expand_dims(v, 0) for v in vars_ND]) 74 | 75 | else: 76 | S, N, D = [tf.shape(vars_SND[0])[i] for i in range(3)] 77 | vars_tiled = [tf.tile(x[None, :, :], [S, 1, 1]) for x in vars_ND] 78 | 79 | flattened_SND = [tf.reshape(x, [S*N, D]) for x in vars_SND] 80 | flattened_tiled = [tf.reshape(x, [S*N, -1]) for x in vars_tiled] 81 | 82 | flattened_result = f(flattened_SND, flattened_tiled) 83 | if isinstance(flattened_result, tuple): 84 | return [tf.reshape(x, [S, N, -1]) for x in flattened_result] 85 | else: 86 | return tf.reshape(flattened_result, [S, N, -1]) 87 | 88 | @params_as_tensors 89 | def variational_expectations(self, Fmu, Fvar, Y): 90 | f = lambda vars_SND, vars_ND: self.likelihood.variational_expectations(vars_SND[0], 91 | vars_SND[1], 92 | vars_ND[0]) 93 | return self._broadcast(f,[Fmu, Fvar], [Y]) 94 | 95 | @params_as_tensors 96 | def logp(self, F, Y): 97 | f = lambda vars_SND, vars_ND: self.likelihood.logp(vars_SND[0], vars_ND[0]) 98 | return self._broadcast(f, [F], [Y]) 99 | 100 | @params_as_tensors 101 | def conditional_mean(self, F): 102 | f = lambda vars_SND, vars_ND: self.likelihood.conditional_mean(vars_SND[0]) 103 | return self._broadcast(f,[F], []) 104 | 105 | @params_as_tensors 106 | def conditional_variance(self, F): 107 | f = lambda vars_SND, vars_ND: self.likelihood.conditional_variance(vars_SND[0]) 108 | return self._broadcast(f,[F], []) 109 | 110 | @params_as_tensors 111 | def predict_mean_and_var(self, Fmu, Fvar): 112 | f = lambda vars_SND, vars_ND: self.likelihood.predict_mean_and_var(vars_SND[0], 113 | vars_SND[1]) 114 | return self._broadcast(f,[Fmu, Fvar], []) 115 | 116 | @params_as_tensors 117 | def predict_density(self, Fmu, Fvar, Y): 118 | f = lambda vars_SND, vars_ND: self.likelihood.predict_density(vars_SND[0], 119 | vars_SND[1], 120 | vars_ND[0]) 121 | return self._broadcast(f,[Fmu, Fvar], [Y]) 122 | -------------------------------------------------------------------------------- /media/DGP_presentation.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/UCL-SML/Doubly-Stochastic-DGP/31527b596e52c453c9810f9e936e65f55da8a365/media/DGP_presentation.pdf -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup(name='Doubly-Stochastic-DGP', 4 | version='1.0', 5 | author="Hugh Salimbeni", 6 | author_email="hrs13@ic.ac.uk", 7 | license="Apache License 2.0", 8 | packages=["doubly_stochastic_dgp"], 9 | classifiers=['License :: OSI Approved :: Apache Software License'] 10 | ) 11 | 12 | -------------------------------------------------------------------------------- /tests/test_collapsed.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import numpy as np 3 | 4 | from numpy.testing import assert_allclose 5 | 6 | from gpflow.models.svgp import SVGP 7 | from gpflow.models import GPR 8 | from gpflow.kernels import Matern52, RBF 9 | from gpflow.likelihoods import Gaussian, Bernoulli, MultiClass 10 | from gpflow.training import ScipyOptimizer 11 | from gpflow.training import NatGradOptimizer 12 | from doubly_stochastic_dgp.dgp import DGP, DGP_Base, DGP_Quad 13 | from doubly_stochastic_dgp.model_zoo import DGP_Collapsed 14 | from doubly_stochastic_dgp.layers import SGPR_Layer 15 | from doubly_stochastic_dgp.layer_initializations import init_layers_linear 16 | 17 | np.random.seed(100) 18 | 19 | class TestVsSingleLayer(unittest.TestCase): 20 | def setUp(self): 21 | Ns, N, M, D_X, D_Y = 5, 4, 2, 3, 2 22 | self.lik_var = 0.1 23 | 24 | self.X = np.random.uniform(size=(N, D_X)) 25 | self.Y = np.random.uniform(size=(N, D_Y)) 26 | self.Z = np.random.uniform(size=(M, D_Y)) 27 | self.Xs = np.random.uniform(size=(Ns, D_X)) 28 | self.D_Y = D_Y 29 | 30 | def test_single_layer(self): 31 | kern = RBF(1, lengthscales=0.1) 32 | layers = init_layers_linear(self.X, self.Y, self.X, [kern]) 33 | 34 | lik = Gaussian() 35 | lik.variance = self.lik_var 36 | 37 | last_layer = SGPR_Layer(layers[-1].kern, 38 | layers[-1].feature.Z.read_value(), 39 | self.D_Y, 40 | layers[-1].mean_function) 41 | layers = layers[:-1] + [last_layer] 42 | 43 | m_dgp = DGP_Collapsed(self.X, self.Y, lik, layers) 44 | L_dgp = m_dgp.compute_log_likelihood() 45 | mean_dgp, var_dgp = m_dgp.predict_f_full_cov(self.Xs, 1) 46 | 47 | m_exact = GPR(self.X, self.Y, kern) 48 | m_exact.likelihood.variance = self.lik_var 49 | L_exact = m_exact.compute_log_likelihood() 50 | mean_exact, var_exact = m_exact.predict_f_full_cov(self.Xs) 51 | 52 | assert_allclose(L_dgp, L_exact, atol=1e-5, rtol=1e-5) 53 | assert_allclose(mean_dgp[0], mean_exact, atol=1e-5, rtol=1e-5) 54 | assert_allclose(var_dgp[0], var_exact, atol=1e-5, rtol=1e-5) 55 | 56 | 57 | class TestVsNatGrads(unittest.TestCase): 58 | def test_2layer_vs_nat_grad(self): 59 | Ns, N, M = 5, 1, 50 60 | D_X, D_Y = 1, 1 61 | 62 | lik_var = 0.1 63 | 64 | X = np.random.uniform(size=(N, D_X)) 65 | Y = np.random.uniform(size=(N, D_Y)) 66 | Z = np.random.uniform(size=(M, D_Y)) 67 | Xs = np.random.uniform(size=(Ns, D_X)) 68 | 69 | Z[:N, :] = X[:M, :] 70 | 71 | def kerns(): 72 | return [RBF(D_X, lengthscales=0.1), 73 | RBF(D_X, lengthscales=0.5)] 74 | layers_col = init_layers_linear(X, Y, Z, kerns()) 75 | layers_ng = init_layers_linear(X, Y, Z, kerns()) 76 | 77 | def lik(): 78 | l = Gaussian() 79 | l.variance = lik_var 80 | return l 81 | 82 | last_layer = SGPR_Layer(layers_col[-1].kern, 83 | layers_col[-1].feature.Z.read_value(), 84 | D_Y, 85 | layers_col[-1].mean_function) 86 | 87 | layers_col = layers_col[:-1] + [last_layer] 88 | m_col = DGP_Collapsed(X, Y, lik(), layers_col) 89 | m_ng = DGP_Quad(X, Y, lik(), layers_ng, H=200) 90 | 91 | q_mu1 = np.random.randn(M, D_X) 92 | q_sqrt1 = np.random.randn(M, M) 93 | q_sqrt1 = np.tril(q_sqrt1)[None, :, :] 94 | 95 | for m in m_col, m_ng: 96 | m.layers[0].q_mu = q_mu1 97 | m.layers[0].q_sqrt = q_sqrt1 98 | 99 | p = [[m_ng.layers[-1].q_mu, m_ng.layers[-1].q_sqrt]] 100 | NatGradOptimizer(gamma=1.).minimize(m_ng, var_list=p, maxiter=1) 101 | 102 | 103 | assert_allclose(m_col.compute_log_likelihood(), 104 | m_ng.compute_log_likelihood()) 105 | 106 | 107 | if __name__ == '__main__': 108 | unittest.main() 109 | -------------------------------------------------------------------------------- /tests/test_dgp.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import numpy as np 3 | 4 | from gpflow import settings as _settings 5 | from gpflow import session_manager as _session_manager 6 | 7 | custom_config = _settings.get_settings() 8 | custom_config.numerics.jitter_level = 1e-18 9 | 10 | with _settings.temp_settings(custom_config),\ 11 | _session_manager.get_session().as_default(): 12 | 13 | from numpy.testing import assert_allclose 14 | 15 | from gpflow.training import NatGradOptimizer 16 | 17 | from gpflow.models.svgp import SVGP 18 | from gpflow.kernels import Matern52, RBF 19 | from gpflow.likelihoods import Gaussian, Bernoulli, MultiClass 20 | from gpflow.training import ScipyOptimizer 21 | from gpflow.params import Parameter 22 | 23 | from doubly_stochastic_dgp.dgp import DGP, DGP_Base, DGP_Quad 24 | from doubly_stochastic_dgp.layer_initializations import init_layers_linear 25 | np.random.seed(0) 26 | 27 | class TestVsSingleLayer(unittest.TestCase): 28 | def setUp(self): 29 | Ns, N, D_X, D_Y = 20, 19, 2, 3 30 | np.random.seed(0) 31 | self.X = np.random.uniform(size=(N, D_X)) 32 | self.Xs = np.random.uniform(size=(Ns, D_X)) 33 | self.q_mu = np.random.randn(N, D_Y) 34 | self.q_sqrt = 0.001*np.eye(N)[None, :, :] * np.ones((D_Y, 1, 1))#np.tril(np.random.randn(D_Y, N, N))**2 35 | 36 | self.D_Y = D_Y 37 | 38 | def test_gaussian(self): 39 | lik = Gaussian() 40 | lik.variance = 0.01 41 | N, Ns, D_Y = self.X.shape[0], self.Xs.shape[0], self.D_Y 42 | Y = np.random.randn(N, D_Y) 43 | Ys = np.random.randn(Ns, D_Y) 44 | for L in [2]: 45 | for white in [True, False]: 46 | self.compare_to_single_layer(Y, Ys, lik, L, white) 47 | 48 | def test_bernoulli(self): 49 | lik = Bernoulli() 50 | N, Ns, D_Y = self.X.shape[0], self.Xs.shape[0], self.D_Y 51 | Y = np.random.choice([-1., 1.], N*D_Y).reshape(N, D_Y) 52 | Ys = np.random.choice([-1., 1.], Ns*D_Y).reshape(Ns, D_Y) 53 | for L in [1, 2]: 54 | self.compare_to_single_layer(Y, Ys, lik, L, True) 55 | 56 | def test_multiclass(self): 57 | K = 3 58 | lik = MultiClass(K) 59 | N, Ns, D_Y = self.X.shape[0], self.Xs.shape[0], self.D_Y 60 | Y = np.random.choice([0., 1., 2.], N * 1).reshape(N, 1) 61 | Ys = np.random.choice([0., 1., 2.], Ns * 1).reshape(Ns, 1) 62 | for L in [1, 2]: 63 | self.compare_to_single_layer(Y, Ys, lik, L, True, num_outputs=K) 64 | 65 | def compare_to_single_layer(self, Y, Ys, lik, L, white, num_outputs=None): 66 | kern = Matern52(self.X.shape[1], lengthscales=0.5) 67 | 68 | m_svgp = SVGP(self.X, Y, kern, lik, Z=self.X, whiten=white, num_latent=num_outputs) 69 | m_svgp.q_mu = self.q_mu 70 | m_svgp.q_sqrt = self.q_sqrt 71 | 72 | 73 | L_svgp = m_svgp.compute_log_likelihood() 74 | mean_svgp, var_svgp = m_svgp.predict_y(self.Xs) 75 | test_lik_svgp = m_svgp.predict_density(self.Xs, Ys) 76 | pred_m_svgp, pred_v_svgp = m_svgp.predict_f(self.Xs) 77 | pred_mfull_svgp, pred_vfull_svgp = m_svgp.predict_f_full_cov(self.Xs) 78 | 79 | kerns = [] 80 | for _ in range(L-1): 81 | class NoTransformMatern52(Matern52): 82 | def __init__(self, *args, variance=1., **kwargs): 83 | Matern52.__init__(self, *args, **kwargs) 84 | del self.variance 85 | self.variance = Parameter(variance) 86 | 87 | kerns.append(NoTransformMatern52(self.X.shape[1], variance=1e-24, lengthscales=0.5)) 88 | kerns.append(kern) 89 | 90 | m_dgp = DGP(self.X, Y, self.X, kerns, lik, white=white, num_samples=2, num_outputs=num_outputs) 91 | m_dgp.layers[-1].q_mu = self.q_mu 92 | m_dgp.layers[-1].q_sqrt = self.q_sqrt 93 | 94 | L_dgp = m_dgp.compute_log_likelihood() 95 | mean_dgp, var_dgp = m_dgp.predict_y(self.Xs, 1) 96 | test_lik_dgp = m_dgp.predict_density(self.Xs, Ys, 1) 97 | 98 | pred_m_dgp, pred_v_dgp = m_dgp.predict_f(self.Xs, 1) 99 | pred_mfull_dgp, pred_vfull_dgp = m_dgp.predict_f_full_cov(self.Xs, 1) 100 | 101 | if L == 1: # these should all be exactly the same 102 | atol = 1e-7 103 | rtol = 1e-7 104 | else: # jitter makes these not exactly equal 105 | atol = 1e-6 106 | rtol = 1e-6 107 | 108 | assert_allclose(L_svgp, L_dgp, rtol=rtol, atol=atol) 109 | 110 | assert_allclose(mean_svgp, mean_dgp[0], rtol=rtol, atol=atol) 111 | assert_allclose(var_svgp, var_dgp[0], rtol=rtol, atol=atol) 112 | assert_allclose(test_lik_svgp, test_lik_dgp, rtol=rtol, atol=atol) 113 | 114 | assert_allclose(pred_m_dgp[0], pred_m_svgp, rtol=rtol, atol=atol) 115 | assert_allclose(pred_v_dgp[0], pred_v_svgp, rtol=rtol, atol=atol) 116 | assert_allclose(pred_mfull_dgp[0], pred_mfull_svgp, rtol=rtol, atol=atol) 117 | assert_allclose(pred_vfull_dgp[0], pred_vfull_svgp, rtol=rtol, atol=atol) 118 | 119 | 120 | class TestQuad(unittest.TestCase): 121 | def test_quadrature(self): 122 | N = 2 123 | np.random.seed(0) 124 | X = np.random.uniform(size=(N, 1)) 125 | Y = np.sin(20*X) + np.random.randn(*X.shape) * 0.001 126 | 127 | kernels = lambda : [RBF(1, lengthscales=0.1), RBF(1, lengthscales=0.1)] 128 | layers = lambda : init_layers_linear(X, Y, X, kernels()) 129 | def lik(): 130 | l = Gaussian() 131 | l.variance = 0.01 132 | return l 133 | 134 | m_stochastic = DGP_Base(X, Y, lik(), layers(), num_samples=100) 135 | # it seems 300 is necessary, which suggests that quadrature isn't very easy 136 | m_quad = DGP_Quad(X, Y, lik(), layers(), H=300) 137 | 138 | # q_mu_0 = np.random.randn(N, 1) 139 | # q_sqrt_0 = np.random.randn(1, N, N)**2 140 | # 141 | # q_mu_1 = np.random.randn(N, 1) 142 | # q_sqrt_1 = np.random.randn(1, N, N)**2 143 | 144 | for model in m_quad, m_stochastic: 145 | model.set_trainable(False) 146 | for layer in model.layers: 147 | layer.q_mu.set_trainable(True) 148 | layer.q_sqrt.set_trainable(True) 149 | 150 | ScipyOptimizer().minimize(m_quad, maxiter=500) 151 | 152 | q_mu_0 = m_quad.layers[0].q_mu.read_value() 153 | q_sqrt_0 = m_quad.layers[0].q_sqrt.read_value() 154 | 155 | q_mu_1 = m_quad.layers[1].q_mu.read_value() 156 | q_sqrt_1 = m_quad.layers[1].q_sqrt.read_value() 157 | 158 | for model in m_stochastic, m_quad: 159 | model.layers[0].q_mu = q_mu_0 160 | model.layers[0].q_sqrt = q_sqrt_0 161 | 162 | model.layers[1].q_mu = q_mu_1 163 | model.layers[1].q_sqrt = q_sqrt_1 164 | 165 | Ls_quad = [m_quad.compute_log_likelihood() for _ in range(2)] 166 | Ls_stochastic = [m_stochastic.compute_log_likelihood() for _ in range(1000)] 167 | 168 | assert_allclose(Ls_quad[0], Ls_quad[1]) # quadrature should be determinsitic 169 | m = np.average(Ls_stochastic) 170 | std_err = np.std(Ls_stochastic)/(float(len(Ls_stochastic))**0.5) 171 | print('sampling average {}'.format(m)) 172 | print('sampling std eff {}'.format(std_err)) 173 | print('quad val {}'.format(Ls_quad[0])) 174 | assert np.abs(Ls_quad[0] - m) < std_err * 3 # 99.73% CI 175 | 176 | class TestStepUp(unittest.TestCase): 177 | def test(self): 178 | kern1 = RBF(1) 179 | kern2 = RBF(2) 180 | lik = Gaussian() 181 | X = np.zeros((1, 1)) 182 | model = DGP(X, X, X, [kern1, kern2], lik) 183 | model.compute_log_likelihood() 184 | 185 | 186 | if __name__ == '__main__': 187 | unittest.main() 188 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | # # Copyright 2017 Hugh Salimbeni 2 | # # 3 | # # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # # you may not use this file except in compliance with the License. 5 | # # You may obtain a copy of the License at 6 | # # 7 | # # http://www.apache.org/licenses/LICENSE-2.0 8 | # # 9 | # # Unless required by applicable law or agreed to in writing, software 10 | # # distributed under the License is distributed on an "AS IS" BASIS, 11 | # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # # See the License for the specific language governing permissions and 13 | # # limitations under the License. 14 | # 15 | # import unittest 16 | # 17 | # import numpy as np 18 | # import tensorflow as tf 19 | # 20 | # from numpy.testing import assert_allclose 21 | # 22 | # from gpflow import autoflow, params_as_tensors 23 | # from gpflow.likelihoods import * 24 | # from gpflow.models import Model 25 | # from gpflow import settings 26 | # 27 | # from doubly_stochastic_dgp.utils import reparameterize 28 | # from doubly_stochastic_dgp.utils import BroadcastingLikelihood 29 | # 30 | # 31 | # class LikelihoodTester(Model): 32 | # def __init__(self, likelihood): 33 | # Model.__init__(self) 34 | # self.wrapped_likelihood = BroadcastingLikelihood(likelihood) 35 | # self.likelihood = likelihood 36 | # 37 | # def _build_likelihood(self): 38 | # return tf.cast(0., dtype=settings.float_type) 39 | # 40 | # @params_as_tensors 41 | # @autoflow((settings.float_type, [None, None, None]), (settings.float_type, [None, None])) 42 | # def logp1(self, F, Y): 43 | # return self.wrapped_likelihood.logp(F, Y) 44 | # 45 | # @params_as_tensors 46 | # @autoflow((settings.float_type, [None, None, None]), (settings.float_type, [None, None])) 47 | # def logp2(self, F, Y): 48 | # f = lambda a: self.likelihood.logp(a, Y) 49 | # return tf.stack(tf.map_fn(f, F, dtype=settings.float_type)) 50 | # 51 | # @params_as_tensors 52 | # @autoflow((settings.float_type, [None, None, None])) 53 | # def conditional_mean1(self, F): 54 | # return self.wrapped_likelihood.conditional_mean(F) 55 | # 56 | # @params_as_tensors 57 | # @autoflow((settings.float_type, [None, None, None])) 58 | # def conditional_mean2(self, F): 59 | # f = lambda a: tf.cast(self.likelihood.conditional_mean(a), dtype=settings.float_type) 60 | # return tf.stack(tf.map_fn(f, F, dtype=settings.float_type)) 61 | # 62 | # @params_as_tensors 63 | # @autoflow((settings.float_type, [None, None, None])) 64 | # def conditional_variance1(self, F): 65 | # return self.wrapped_likelihood.conditional_variance(F) 66 | # 67 | # @params_as_tensors 68 | # @autoflow((settings.float_type, [None, None, None])) 69 | # def conditional_variance2(self, F): 70 | # f = lambda a: tf.cast(self.likelihood.conditional_variance(a), dtype=settings.float_type) 71 | # return tf.stack(tf.map_fn(f, F, dtype=settings.float_type)) 72 | # 73 | # @params_as_tensors 74 | # @autoflow((settings.float_type, [None, None, None]), 75 | # (settings.float_type, [None, None, None])) 76 | # def predict_mean_and_var1(self, Fmu, Fvar): 77 | # return self.wrapped_likelihood.predict_mean_and_var(Fmu, Fvar) 78 | # 79 | # @params_as_tensors 80 | # @autoflow((settings.float_type, [None, None, None]), 81 | # (settings.float_type, [None, None, None])) 82 | # def predict_mean_and_var2(self, Fmu, Fvar): 83 | # f = lambda a: list(self.likelihood.predict_mean_and_var(a[0], a[1])) 84 | # m, v = tf.map_fn(f, [Fmu, Fvar], dtype=[settings.float_type, settings.float_type]) 85 | # return tf.stack(m), tf.stack(v) 86 | # 87 | # @params_as_tensors 88 | # @autoflow((settings.float_type, [None, None, None]), 89 | # (settings.float_type, [None, None, None]), 90 | # (settings.float_type, [None, None])) 91 | # def predict_density1(self, Fmu, Fvar, Y): 92 | # return self.wrapped_likelihood.predict_density(Fmu, Fvar, Y) 93 | # 94 | # @params_as_tensors 95 | # @autoflow((settings.float_type, [None, None, None]), 96 | # (settings.float_type, [None, None, None]), 97 | # (settings.float_type, [None, None])) 98 | # def predict_density2(self, Fmu, Fvar, Y): 99 | # f = lambda a: self.likelihood.predict_density(a[0], a[1], Y) 100 | # return tf.stack(tf.map_fn(f, [Fmu, Fvar], dtype=settings.float_type)) 101 | # 102 | # @params_as_tensors 103 | # @autoflow((settings.float_type, [None, None, None]), 104 | # (settings.float_type, [None, None, None]), 105 | # (settings.float_type, [None, None])) 106 | # def variational_expectations1(self, Fmu, Fvar, Y): 107 | # return self.wrapped_likelihood.variational_expectations(Fmu, Fvar, Y) 108 | # 109 | # @params_as_tensors 110 | # @autoflow((settings.float_type, [None, None, None]), 111 | # (settings.float_type, [None, None, None]), 112 | # (settings.float_type, [None, None])) 113 | # def variational_expectations2(self, Fmu, Fvar, Y): 114 | # f = lambda a: self.likelihood.variational_expectations(a[0], a[1], Y) 115 | # return tf.stack(tf.map_fn(f, [Fmu, Fvar], dtype=settings.float_type)) 116 | # 117 | # 118 | # class TestLikelihoodWrapper(unittest.TestCase): 119 | # def setUp(self): 120 | # S, N, D = 5, 4, 3 121 | # self.Fmu = np.random.randn(S, N, D) 122 | # self.Fvar = np.random.randn(S, N, D)**2 123 | # self.N, self.D = N, D 124 | # 125 | # def run_tests(self, likelihood, Fmu, Fvar, Y): 126 | # l = LikelihoodTester(likelihood) 127 | # assert_allclose(l.logp1(Fmu, Y), l.logp2(Fmu, Y)) 128 | # assert_allclose(l.conditional_mean1(Fmu), l.conditional_mean2(Fmu)) 129 | # assert_allclose(l.conditional_variance1(Fmu), l.conditional_variance2(Fmu)) 130 | # 131 | # m1, v1 = l.predict_mean_and_var1(Fmu, Fvar) 132 | # m2, v2 = l.predict_mean_and_var2(Fmu, Fvar) 133 | # assert_allclose(m1, m2) 134 | # assert_allclose(v1, v2) 135 | # 136 | # assert_allclose(l.predict_density1(Fmu, Fvar, Y), 137 | # l.predict_density2(Fmu, Fvar, Y)) 138 | # 139 | # assert_allclose(l.variational_expectations1(Fmu, Fvar, Y), 140 | # l.variational_expectations2(Fmu, Fvar, Y)) 141 | # 142 | # def test_gaussian(self): 143 | # self.run_tests(Gaussian(), self.Fmu, self.Fvar, np.random.randn(self.N, self.D)) 144 | # 145 | # def test_bernoulli(self): 146 | # Y = np.random.choice([0., 1.], self.N * self.D).reshape(self.N, self.D) 147 | # self.run_tests(Bernoulli(), self.Fmu, self.Fvar, Y) 148 | # 149 | # def test_multiclass(self): 150 | # K = self.Fmu.shape[2] 151 | # Y = np.random.choice(np.arange(K).astype(float), self.Fmu.shape[1]).reshape(-1, 1) 152 | # self.run_tests(MultiClass(K), self.Fmu, self.Fvar, Y) 153 | # 154 | # def test_exponential(self): 155 | # Y = np.random.randn(self.N, self.D)**2 156 | # self.run_tests(Exponential(), self.Fmu, self.Fvar, Y) 157 | # 158 | # def test_poisson(self): 159 | # Y = np.floor(np.random.randn(self.N, self.D)**2).astype(float) 160 | # self.run_tests(Poisson(), self.Fmu, self.Fvar, Y) 161 | # 162 | # def test_studentT(self): 163 | # Y = np.random.randn(self.N, self.D) 164 | # self.run_tests(StudentT(), self.Fmu, self.Fvar, Y) 165 | # 166 | # def test_gamma(self): 167 | # Y = np.random.randn(self.N, self.D)**2 168 | # self.run_tests(Gamma(), self.Fmu, self.Fvar, Y) 169 | # 170 | # def test_beta(self): 171 | # Y = np.random.randn(self.N, self.D) 172 | # Y = 1/(1+np.exp(-Y)) 173 | # self.run_tests(Beta(), self.Fmu, self.Fvar, Y) 174 | # 175 | # def test_ordinal(self): 176 | # Y = np.random.choice(range(4), self.N*self.D).reshape(self.N, self.D).astype(float) 177 | # self.run_tests(Ordinal(np.linspace(-2, 2, 4)), self.Fmu, self.Fvar, Y) 178 | # 179 | # 180 | # class TestReparameterize(unittest.TestCase): 181 | # def testReparameterizeDiag(self): 182 | # S, N, D = 4, 3, 2 183 | # mean = np.random.randn(S, N, D) 184 | # var = np.random.randn(S, N, D)**2 185 | # z = np.random.randn(S, N, D) 186 | # f = mean + z * (var + 1e-6)**0.5 187 | # with tf.Session() as sess: 188 | # assert_allclose(f, sess.run(reparameterize(tf.identity(mean), var, z))) 189 | # 190 | # def testReparameterizeFullCov(self): 191 | # S, N, D = 4, 3, 2 192 | # 193 | # mean = np.random.randn(S, N, D) 194 | # U = np.random.randn(S, N, N, D) 195 | # var = np.einsum('SnNd,SmNd->Snmd', U, U) + np.eye(N)[None, :, :, None] * 1e-6 196 | # 197 | # var_flat = np.reshape(np.transpose(var, [0, 3, 1, 2]), [S*D, N, N]) 198 | # L_flat = np.linalg.cholesky(var_flat + np.eye(N)[None, :, :] * 1e-6) 199 | # L = np.transpose(np.reshape(L_flat, [S, D, N, N]), [0, 2, 3, 1]) 200 | # 201 | # z = np.random.randn(S, N, D) 202 | # f = mean + np.einsum('SnNd,SNd->Snd', L, z) 203 | # 204 | # with tf.Session() as sess: 205 | # assert_allclose(f, sess.run(reparameterize(tf.identity(mean), var, z, 206 | # full_cov=True))) 207 | # 208 | # 209 | # if __name__ == '__main__': 210 | # unittest.main() 211 | -------------------------------------------------------------------------------- /tests/test_zoo_models.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import numpy as np 3 | 4 | from numpy.testing import assert_allclose 5 | 6 | from doubly_stochastic_dgp.layers import GPMC_Layer, GPR_Layer 7 | 8 | from gpflow import settings as _settings 9 | from gpflow import session_manager as _session_manager 10 | 11 | custom_config = _settings.get_settings() 12 | custom_config.numerics.jitter_level = 1e-12 13 | 14 | with _settings.temp_settings(custom_config),\ 15 | _session_manager.get_session().as_default(): 16 | 17 | 18 | from gpflow.models import SVGP, GPR 19 | from gpflow.kernels import Matern52, RBF 20 | from gpflow.likelihoods import Gaussian, Bernoulli, MultiClass 21 | from gpflow.training import ScipyOptimizer 22 | from gpflow.mean_functions import Zero, Identity, Linear, Constant 23 | from gpflow.training import NatGradOptimizer 24 | 25 | from gpflow import settings 26 | 27 | from doubly_stochastic_dgp.model_zoo import DGP_Heinonen 28 | 29 | from doubly_stochastic_dgp.dgp import DGP, DGP_Base, DGP_Quad 30 | from doubly_stochastic_dgp.layer_initializations import init_layers_linear 31 | np.random.seed(0) 32 | 33 | 34 | class TestHeinonen(unittest.TestCase): 35 | def setUp(self): 36 | Ns, N, D_X, D_Y = 5, 6, 3, 2 37 | 38 | self.X = np.random.uniform(size=(N, D_X)) 39 | self.Xs = self.X #np.random.uniform(size=(Ns, D_X)) 40 | 41 | self.D_Y = D_Y 42 | 43 | def test_vs_single_layer(self): 44 | lik = Gaussian() 45 | lik_var = 0.01 46 | lik.variance = lik_var 47 | N, Ns, D_Y, D_X = self.X.shape[0], self.Xs.shape[0], self.D_Y, self.X.shape[1] 48 | Y = np.random.randn(N, D_Y) 49 | Ys = np.random.randn(Ns, D_Y) 50 | 51 | kern = Matern52(self.X.shape[1], lengthscales=0.5) 52 | # mf = Linear(A=np.random.randn(D_X, D_Y), b=np.random.randn(D_Y)) 53 | mf = Zero() 54 | m_gpr = GPR(self.X, Y, kern, mean_function=mf) 55 | m_gpr.likelihood.variance = lik_var 56 | mean_gpr, var_gpr = m_gpr.predict_y(self.Xs) 57 | test_lik_gpr = m_gpr.predict_density(self.Xs, Ys) 58 | pred_m_gpr, pred_v_gpr = m_gpr.predict_f(self.Xs) 59 | pred_mfull_gpr, pred_vfull_gpr = m_gpr.predict_f_full_cov(self.Xs) 60 | 61 | kerns = [] 62 | kerns.append(Matern52(self.X.shape[1], lengthscales=0.5, variance=1e-1)) 63 | kerns.append(kern) 64 | 65 | layer0 = GPMC_Layer(kerns[0], self.X.copy(), D_X, Identity()) 66 | layer1 = GPR_Layer(kerns[1], mf, D_Y) 67 | m_dgp = DGP_Heinonen(self.X, Y, lik, [layer0, layer1]) 68 | 69 | 70 | mean_dgp, var_dgp = m_dgp.predict_y(self.Xs, 1) 71 | test_lik_dgp = m_dgp.predict_density(self.Xs, Ys, 1) 72 | pred_m_dgp, pred_v_dgp = m_dgp.predict_f(self.Xs, 1) 73 | pred_mfull_dgp, pred_vfull_dgp = m_dgp.predict_f_full_cov(self.Xs, 1) 74 | 75 | tol = 1e-4 76 | assert_allclose(mean_dgp[0], mean_gpr, atol=tol, rtol=tol) 77 | assert_allclose(test_lik_dgp, test_lik_gpr, atol=tol, rtol=tol) 78 | assert_allclose(pred_m_dgp[0], pred_m_gpr, atol=tol, rtol=tol) 79 | assert_allclose(pred_mfull_dgp[0], pred_mfull_gpr, atol=tol, rtol=tol) 80 | assert_allclose(pred_vfull_dgp[0], pred_vfull_gpr, atol=tol, rtol=tol) 81 | 82 | def test_vs_DGP2(self): 83 | lik = Gaussian() 84 | lik_var = 0.1 85 | lik.variance = lik_var 86 | N, Ns, D_Y, D_X = self.X.shape[0], self.Xs.shape[0], self.D_Y, self.X.shape[1] 87 | 88 | q_mu = np.random.randn(N, D_X) 89 | 90 | Y = np.random.randn(N, D_Y) 91 | Ys = np.random.randn(Ns, D_Y) 92 | 93 | kern1 = Matern52(self.X.shape[1], lengthscales=0.5) 94 | kern2 = Matern52(self.X.shape[1], lengthscales=0.5) 95 | kerns = [kern1, kern2] 96 | # mf = Linear(A=np.random.randn(D_X, D_Y), b=np.random.randn(D_Y)) 97 | 98 | mf = Zero() 99 | m_dgp = DGP(self.X, Y, self.X, kerns, lik, mean_function=mf, white=True) 100 | m_dgp.layers[0].q_mu = q_mu 101 | m_dgp.layers[0].q_sqrt = m_dgp.layers[0].q_sqrt.read_value() * 1e-24 102 | 103 | Fs, ms, vs = m_dgp.predict_all_layers(self.Xs, 1) 104 | Z = self.X.copy() 105 | Z[:len(self.Xs)] = ms[0][0] 106 | m_dgp.layers[1].feature.Z = Z # need to put the inducing points in the right place 107 | 108 | var_list = [[m_dgp.layers[1].q_mu, m_dgp.layers[1].q_sqrt]] 109 | NatGradOptimizer(gamma=1).minimize(m_dgp, var_list=var_list, maxiter=1) 110 | 111 | mean_dgp, var_dgp = m_dgp.predict_y(self.Xs, 1) 112 | test_lik_dgp = m_dgp.predict_density(self.Xs, Ys, 1) 113 | pred_m_dgp, pred_v_gpr = m_dgp.predict_f(self.Xs, 1) 114 | pred_mfull_dgp, pred_vfull_dgp = m_dgp.predict_f_full_cov(self.Xs, 1) 115 | 116 | # mean_functions = [Identity(), mf] 117 | layer0 = GPMC_Layer(kerns[0], self.X.copy(), D_X, Identity()) 118 | layer1 = GPR_Layer(kerns[1], mf, D_Y) 119 | 120 | m_heinonen = DGP_Heinonen(self.X, Y, lik, [layer0, layer1]) 121 | 122 | m_heinonen.layers[0].q_mu = q_mu 123 | 124 | mean_heinonen, var_heinonen = m_heinonen.predict_y(self.Xs, 1) 125 | test_lik_heinonen = m_heinonen.predict_density(self.Xs, Ys, 1) 126 | pred_m_heinonen, pred_v_heinonen = m_heinonen.predict_f(self.Xs, 1) 127 | pred_mfull_heinonen, pred_vfull_heinonen = m_heinonen.predict_f_full_cov(self.Xs, 1) 128 | 129 | tol = 1e-4 130 | assert_allclose(mean_dgp, mean_heinonen, atol=tol, rtol=tol) 131 | assert_allclose(test_lik_dgp, test_lik_heinonen, atol=tol, rtol=tol) 132 | assert_allclose(pred_m_dgp, pred_m_heinonen, atol=tol, rtol=tol) 133 | assert_allclose(pred_mfull_dgp, pred_mfull_heinonen, atol=tol, rtol=tol) 134 | assert_allclose(pred_vfull_dgp, pred_vfull_heinonen, atol=tol, rtol=tol) 135 | 136 | 137 | 138 | 139 | 140 | if __name__ == '__main__': 141 | unittest.main() 142 | --------------------------------------------------------------------------------