├── .gitattributes ├── Makefile ├── setup.cfg ├── treegrad ├── __init__.py ├── tests │ ├── regression_test.py │ ├── binary_test.py │ └── multiclass_test.py ├── treegrad.py └── tree_utils.py ├── nose2.cfg ├── LICENSE ├── .gitignore ├── setup.py ├── README.md └── notebooks ├── iris-decision-tree.ipynb ├── treegrad_tf.ipynb ├── iris-tree-conv.ipynb └── treegrad_binary.ipynb /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | publish: 3 | python setup.py sdist bdist_wheel 4 | python -m twine upload dist/* 5 | 6 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | 4 | [flake8] 5 | ignore = E501,W293,W291 6 | exclude = 7 | .ipynb_checkpoints -------------------------------------------------------------------------------- /treegrad/__init__.py: -------------------------------------------------------------------------------- 1 | from treegrad.treegrad import TGDClassifier, TGDRegressor 2 | import warnings 3 | 4 | try: 5 | from treegrad.version import version as __version__ # NOQA 6 | except: 7 | warnings.warn("Could not import version, has package been installed?") 8 | -------------------------------------------------------------------------------- /nose2.cfg: -------------------------------------------------------------------------------- 1 | [unittest] 2 | start-dir = . 3 | test-file-pattern = *_test.py 4 | test-method-prefix = test 5 | plugins = nose2.plugins.collect 6 | 7 | [html-report] 8 | always-on = False 9 | 10 | [coverage] 11 | always-on = True 12 | coverage-report = html 13 | term 14 | 15 | [collect-only] 16 | always-on = False 17 | 18 | 19 | [log-capture] 20 | always-on = True -------------------------------------------------------------------------------- /treegrad/tests/regression_test.py: -------------------------------------------------------------------------------- 1 | from sklearn.datasets import make_regression 2 | import numpy as np 3 | from treegrad import TGDRegressor 4 | 5 | 6 | def test_binary(): 7 | # test class binary 8 | X, y = make_regression() 9 | model = TGDRegressor(autograd_config={"num_iters": 1}) 10 | model.fit(X, y) 11 | a1 = model.predict(X) 12 | assert a1.shape[0] == X.shape[0] 13 | 14 | # partial fit off lightgbm 15 | model.partial_fit(X, y) 16 | a2 = model.predict(X) 17 | assert a2.shape[0] == X.shape[0] 18 | 19 | # partial fit off itself 20 | model.partial_fit(X, y) 21 | a3 = model.predict(X) 22 | assert a3.shape[0] == X.shape[0] 23 | 24 | assert not np.array_equal(a1, a2) 25 | assert not np.array_equal(a1, a3) 26 | -------------------------------------------------------------------------------- /treegrad/tests/binary_test.py: -------------------------------------------------------------------------------- 1 | from sklearn.datasets import make_classification 2 | import numpy as np 3 | from treegrad import TGDClassifier 4 | 5 | 6 | def test_binary(): 7 | # test class binary 8 | X, y = make_classification( 9 | 100, 10 | n_classes=2, 11 | n_informative=3, 12 | n_redundant=0, 13 | n_clusters_per_class=2, 14 | n_features=10, 15 | ) 16 | model = TGDClassifier(autograd_config={"num_iters": 1}) 17 | model.fit(X, y) 18 | assert model.predict(X).shape[0] == X.shape[0] 19 | a1 = model.predict_proba(X) 20 | assert a1.shape[1] == 2 21 | 22 | # partial fit off lightgbm 23 | model.partial_fit(X, y) 24 | assert model.predict(X).shape[0] == X.shape[0] 25 | a2 = model.predict_proba(X) 26 | assert a2.shape[1] == 2 27 | 28 | # partial fit off itself 29 | model.partial_fit(X, y) 30 | assert model.predict(X).shape[0] == X.shape[0] 31 | a3 = model.predict_proba(X) 32 | assert a3.shape[1] == 2 33 | 34 | assert not np.array_equal(a1, a2) 35 | assert not np.array_equal(a1, a3) 36 | -------------------------------------------------------------------------------- /treegrad/tests/multiclass_test.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from sklearn.datasets import make_classification 3 | from treegrad import TGDClassifier 4 | 5 | 6 | def test_multi(): 7 | # test class binary 8 | X, y = make_classification( 9 | 100, 10 | n_classes=3, 11 | n_informative=3, 12 | n_redundant=0, 13 | n_clusters_per_class=2, 14 | n_features=10, 15 | ) 16 | model = TGDClassifier(autograd_config={"num_iters": 1}) 17 | model.fit(X, y) 18 | a1 = model.predict_proba(X) 19 | assert model.predict(X).shape[0] == X.shape[0] 20 | assert a1.shape[1] == 3 21 | 22 | # partial fit off lightgbm 23 | model.partial_fit(X, y) 24 | a2 = model.predict_proba(X) 25 | assert model.predict(X).shape[0] == X.shape[0] 26 | assert a2.shape[1] == 3 27 | 28 | # partial fit off itself 29 | model.partial_fit(X, y) 30 | a3 = model.predict_proba(X) 31 | assert model.predict(X).shape[0] == X.shape[0] 32 | assert a3.shape[1] == 3 33 | 34 | assert not np.array_equal(a1, a2) 35 | assert not np.array_equal(a1, a3) 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Chapman Siu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .nox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | .pytest_cache/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | db.sqlite3 59 | 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | 64 | # Scrapy stuff: 65 | .scrapy 66 | 67 | # Sphinx documentation 68 | docs/_build/ 69 | 70 | # PyBuilder 71 | target/ 72 | 73 | # Jupyter Notebook 74 | .ipynb_checkpoints 75 | 76 | # IPython 77 | profile_default/ 78 | ipython_config.py 79 | 80 | # pyenv 81 | .python-version 82 | 83 | # celery beat schedule file 84 | celerybeat-schedule 85 | 86 | # SageMath parsed files 87 | *.sage.py 88 | 89 | # Environments 90 | .env 91 | .venv 92 | env/ 93 | venv/ 94 | ENV/ 95 | env.bak/ 96 | venv.bak/ 97 | 98 | # Spyder project settings 99 | .spyderproject 100 | .spyproject 101 | 102 | # Rope project settings 103 | .ropeproject 104 | 105 | # mkdocs documentation 106 | /site 107 | 108 | # mypy 109 | .mypy_cache/ 110 | .dmypy.json 111 | dmypy.json 112 | 113 | # Pyre type checker 114 | .pyre/ 115 | .vscode/settings.json 116 | treegrad/version.py 117 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | import os 3 | 4 | readme_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "README.md") 5 | with open(readme_file, "r") as f: 6 | readme = f.read() 7 | 8 | 9 | ISRELEASED = True 10 | MAJOR = 1 11 | MINOR = 0 12 | MICRO = 1 13 | VERSION = f"{MAJOR}.{MINOR}.{MICRO}" 14 | 15 | setup_dir = os.path.abspath(os.path.dirname(__file__)) 16 | VFNAME = "treegrad/version.py" 17 | 18 | 19 | def write_version_py(filename=os.path.join(setup_dir, VFNAME)): 20 | """ 21 | Generate the version.py file automatically upon install only 22 | """ 23 | version = VERSION 24 | if not ISRELEASED: 25 | version += ".dev" 26 | 27 | a = open(filename, "w") 28 | file_content = "\n".join( 29 | [ 30 | "", 31 | "# THIS FILE IS GENERATED FROM SETUP.PY", 32 | "version = '%(version)s'", 33 | "isrelease = '%(isrelease)s'", 34 | ] 35 | ) 36 | 37 | a.write(file_content % {"version": VERSION, "isrelease": str(ISRELEASED)}) 38 | a.close() 39 | 40 | 41 | write_version_py() 42 | 43 | NAME = "treegrad" 44 | DESCRIPTION = "transfer parameters from lightgbm to differentiable decision trees!" 45 | AUTHOR = "Chapman Siu" 46 | 47 | install_requires = ["autograd", "sklearn", "lightgbm"] 48 | 49 | extras_require = { 50 | "development": [ 51 | "sphinx>=1.6.6", 52 | "sphinxcontrib-napoleon>=0.6.1", 53 | "pandoc>=1.0.2", 54 | "nbsphinx>=0.3.3", 55 | "nose2>=0.7.4", 56 | "nose2_html_report>=0.6.0", 57 | "coverage>=4.5.1", 58 | "awscli>=1.15.26", 59 | "flake8>=3.5.0", 60 | "m2r", 61 | ] 62 | } 63 | extras_require["complete"] = sorted(set(sum(extras_require.values(), []))) 64 | 65 | setup( 66 | url="http://github.com/chappers/TreeGrad", 67 | name=NAME, 68 | version=VERSION, 69 | description=DESCRIPTION, 70 | long_description=readme, 71 | long_description_content_type="text/markdown", 72 | author=AUTHOR, 73 | author_email="chpmn.siu@gmail.com", 74 | include_package_data=True, 75 | packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), 76 | install_requires=install_requires, 77 | extras_require=extras_require, 78 | python_requires=">=3.5", # supporting type hints 79 | zip_safe=False, # force install as source 80 | ) 81 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TreeGrad 2 | 3 | [![PyPI version](https://badge.fury.io/py/treegrad.png)](https://badge.fury.io/py/treegrad) 4 | 5 | `TreeGrad` implements a naive approach to converting a Gradient Boosted Tree Model to an Online trainable model. It does this by creating differentiable tree models which can be learned via auto-differentiable frameworks. `TreeGrad` is in essence an implementation of Kontschieder, Peter, et al. "Deep neural decision forests." with extensions. 6 | 7 | To install 8 | 9 | ``` 10 | python setup.py install 11 | ``` 12 | 13 | or alternatively from pypi 14 | 15 | 16 | ``` 17 | pip install treegrad 18 | ``` 19 | 20 | Run tests: 21 | 22 | ``` 23 | python -m nose2 24 | ``` 25 | 26 | ``` 27 | @inproceedings{siu2019transferring, 28 | title={Transferring Tree Ensembles to Neural Networks}, 29 | author={Siu, Chapman}, 30 | booktitle={International Conference on Neural Information Processing}, 31 | pages={471--480}, 32 | year={2019}, 33 | organization={Springer} 34 | } 35 | ``` 36 | 37 | Link: https://arxiv.org/abs/1904.11132 38 | 39 | 40 | # Usage 41 | 42 | ```py 43 | from sklearn. 44 | import treegrad as tgd 45 | 46 | mod = tgd.TGDClassifier(num_leaves=31, max_depth=-1, learning_rate=0.1, n_estimators=100, autograd_config={'refit_splits':False}) 47 | mod.fit(X, y) 48 | mod.partial_fit(X, y) 49 | ``` 50 | 51 | # Requirments 52 | 53 | The requirements for this package are: 54 | 55 | * lightgbm 56 | * scikit-learn 57 | * autograd 58 | 59 | Future plans: 60 | 61 | * Add implementation for Neural Architecture search for decision boundary splits (requires a bit of clean up - TBA) 62 | * Implementation can be done quite trivially using objects residing in `tree_utils.py` - Challenge is getting this working in a sane manner with `scikit-learn` interface. 63 | * GPU enabled auto differentiation framework - see `notebooks/` for progress off Colab for Tensorflow 2.0 port 64 | * support xgboost/lightgbm additional features such as monotone constraints 65 | * Support `RegressorMixin` 66 | 67 | # Results 68 | 69 | When decision splits are reset and subsequently re-learned, TreeGrad can be competitive in performance with popular implementations (albeit an order of magnitude slower). Below is a table showing accuracy on test dataset on UCI benchmark datasets for Boosted Ensemble models (100 trees) 70 | 71 | 72 | | Dataset | TreeGrad | LightGBM | Scikit-Learn (Gradient Boosting Classifier) | 73 | | ---------| --------- | --------- | ------------------------------------------- | 74 | | adult | 0.860 | 0.873 | **0.874** | 75 | | covtype | 0.832 | **0.835** | 0.826 | 76 | | dna | **0.950** | 0.949 | 0.946 | 77 | | glass | 0.766 | **0.813** | 0.719 | 78 | | mandelon | **0.882** | 0.881 | 0.866 | 79 | | soybean | **0.936** | **0.936** | 0.917 | 80 | | yeast | **0.591** | 0.573 | 0.542 | 81 | 82 | 83 | # Implementation 84 | 85 | 86 | 87 | To understand the implementation of `TreeGrad`, we interpret a decision tree algorithm to be a three layer neural network, where the layers are as follows: 88 | 89 | 1. Node layer, which determines the decision boundaries 90 | 2. Routing layer, which determines which nodes are used to route to the final leaf nodes 91 | 3. Leaf layer, the layer which determines the final predictions 92 | 93 | In the node layer, the decision boundaries can be interpreted as _axis-parallel_ decision boundaries from your typical Linear Classifier; i.e. a fully connected dense layer 94 | 95 | The routing layer requires a binary routing matrix to which essentially the global product routing is applied 96 | 97 | The leaf layer is your typical fully connected dense layer. 98 | 99 | This approach is the same as the one taken by Kontschieder, Peter, et al. "Deep neural decision forests." 100 | 101 | -------------------------------------------------------------------------------- /notebooks/iris-decision-tree.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "data": { 10 | "text/plain": [ 11 | "DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=None,\n", 12 | " max_features=None, max_leaf_nodes=4, min_impurity_decrease=0.0,\n", 13 | " min_impurity_split=None, min_samples_leaf=1,\n", 14 | " min_samples_split=2, min_weight_fraction_leaf=0.0,\n", 15 | " presort=False, random_state=None, splitter='best')" 16 | ] 17 | }, 18 | "execution_count": 1, 19 | "metadata": {}, 20 | "output_type": "execute_result" 21 | } 22 | ], 23 | "source": [ 24 | "from sklearn.datasets import load_iris\n", 25 | "from sklearn import tree\n", 26 | "from sklearn.tree import _tree\n", 27 | "import lightgbm as lgb\n", 28 | "from sklearn.metrics import accuracy_score\n", 29 | "\n", 30 | "def tree_to_code(tree, feature_names):\n", 31 | " tree_ = tree.tree_\n", 32 | " feature_name = [\n", 33 | " feature_names[i] if i != _tree.TREE_UNDEFINED else \"undefined!\"\n", 34 | " for i in tree_.feature\n", 35 | " ]\n", 36 | " print(\"def tree({}):\".format(\", \".join(feature_names)))\n", 37 | "\n", 38 | " def recurse(node, depth):\n", 39 | " indent = \" \" * depth\n", 40 | " if tree_.feature[node] != _tree.TREE_UNDEFINED:\n", 41 | " name = feature_name[node]\n", 42 | " threshold = tree_.threshold[node]\n", 43 | " print(\"{}if {} <= {}:\".format(indent, name, threshold))\n", 44 | " recurse(tree_.children_left[node], depth + 1)\n", 45 | " print(\"{}else: # if {} > {}\".format(indent, name, threshold))\n", 46 | " recurse(tree_.children_right[node], depth + 1)\n", 47 | " else:\n", 48 | " print(\"{}return {}\".format(indent, tree_.value[node]))\n", 49 | "\n", 50 | " recurse(0, 1)\n", 51 | " \n", 52 | "iris = load_iris()\n", 53 | "X = iris.data\n", 54 | "y = iris.target\n", 55 | "\n", 56 | "clf = tree.DecisionTreeClassifier(max_leaf_nodes=4)\n", 57 | "clf.fit(X, y)" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": 2, 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [ 66 | "# dot -Tpng tree.dot -o tree.png \n", 67 | "# tree.export_graphviz(clf, out_file='tree_draft.dot') " 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 3, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "# !dot -Tpng tree.dot -o tree.png " 77 | ] 78 | }, 79 | { 80 | "cell_type": "code", 81 | "execution_count": 4, 82 | "metadata": {}, 83 | "outputs": [ 84 | { 85 | "data": { 86 | "text/plain": [ 87 | "0.9733333333333334" 88 | ] 89 | }, 90 | "execution_count": 4, 91 | "metadata": {}, 92 | "output_type": "execute_result" 93 | } 94 | ], 95 | "source": [ 96 | "clf.score(X, y)" 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": 5, 102 | "metadata": {}, 103 | "outputs": [ 104 | { 105 | "data": { 106 | "text/plain": [ 107 | "['sepal length (cm)',\n", 108 | " 'sepal width (cm)',\n", 109 | " 'petal length (cm)',\n", 110 | " 'petal width (cm)']" 111 | ] 112 | }, 113 | "execution_count": 5, 114 | "metadata": {}, 115 | "output_type": "execute_result" 116 | } 117 | ], 118 | "source": [ 119 | "iris.feature_names" 120 | ] 121 | }, 122 | { 123 | "cell_type": "code", 124 | "execution_count": 6, 125 | "metadata": {}, 126 | "outputs": [ 127 | { 128 | "name": "stdout", 129 | "output_type": "stream", 130 | "text": [ 131 | "def tree(sepal length (cm), sepal width (cm), petal length (cm), petal width (cm)):\n", 132 | " if petal width (cm) <= 0.800000011920929:\n", 133 | " return [[50. 0. 0.]]\n", 134 | " else: # if petal width (cm) > 0.800000011920929\n", 135 | " if petal width (cm) <= 1.75:\n", 136 | " if petal length (cm) <= 4.949999809265137:\n", 137 | " return [[ 0. 47. 1.]]\n", 138 | " else: # if petal length (cm) > 4.949999809265137\n", 139 | " return [[0. 2. 4.]]\n", 140 | " else: # if petal width (cm) > 1.75\n", 141 | " return [[ 0. 1. 45.]]\n" 142 | ] 143 | } 144 | ], 145 | "source": [ 146 | "tree_to_code(clf, iris.feature_names)\n", 147 | "\n", 148 | "# note for boundaries...\n", 149 | "# 1/0.8 = 1.25\n", 150 | "# 1/1.75 = 0.57\n", 151 | "# 1/4.95 = 0.20" 152 | ] 153 | }, 154 | { 155 | "cell_type": "code", 156 | "execution_count": 7, 157 | "metadata": {}, 158 | "outputs": [], 159 | "source": [ 160 | "# assume inter is always 1 for convenience\n", 161 | "param = [\n", 162 | " np.array([-1.25, -0.57, -0.20]),\n", 163 | " np.array([1, 1, 1]), \n", 164 | " np.array([[50, 0., 0.], \n", 165 | " [0., 47, 1],\n", 166 | " [0, 2, 4],\n", 167 | " [0, 1, 45]])\n", 168 | "]\n", 169 | "\n", 170 | "route_array = np.array([\n", 171 | " [1, 0, 0, 0, 0, 0], \n", 172 | " [0, 1, 1, 1, 0, 0],\n", 173 | " [0, 1, 0, 1, 0, 1],\n", 174 | " [0, 0, 0, 1, 1, 0]\n", 175 | "])\n", 176 | "\n", 177 | "sparse_info = np.array([\n", 178 | " [0, 0, 0, 1],\n", 179 | " [0, 0, 0, 1],\n", 180 | " [0, 0, 1, 0]\n", 181 | "]).T" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": 8, 187 | "metadata": {}, 188 | "outputs": [], 189 | "source": [ 190 | "# hard code all things\n", 191 | "def sigmoid(z):\n", 192 | " return 1. / ( 1 + np.exp(-z) )\n", 193 | "\n", 194 | "# softmax by axis...\n", 195 | "def gumbel_softmax(x, tau=1.0, eps=np.finfo(float).eps):\n", 196 | " # element-wise gumbel softmax\n", 197 | " # return np.exp(np.log(X+eps)/temp)/np.sum(np.exp(np.log(X+eps)/temp), axis=1)[:, np.newaxis]\n", 198 | " return 1/(1+np.exp(-(x)/tau))" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": 9, 204 | "metadata": {}, 205 | "outputs": [], 206 | "source": [ 207 | "coef, inter, leaf = param" 208 | ] 209 | }, 210 | { 211 | "cell_type": "code", 212 | "execution_count": 11, 213 | "metadata": {}, 214 | "outputs": [], 215 | "source": [ 216 | "coef_sparse = coef * sparse_info" 217 | ] 218 | }, 219 | { 220 | "cell_type": "code", 221 | "execution_count": 10, 222 | "metadata": {}, 223 | "outputs": [ 224 | { 225 | "data": { 226 | "text/plain": [ 227 | "0.9733333333333334" 228 | ] 229 | }, 230 | "execution_count": 10, 231 | "metadata": {}, 232 | "output_type": "execute_result" 233 | } 234 | ], 235 | "source": [ 236 | "clf.score(X, y)" 237 | ] 238 | }, 239 | { 240 | "cell_type": "code", 241 | "execution_count": 12, 242 | "metadata": {}, 243 | "outputs": [ 244 | { 245 | "data": { 246 | "text/plain": [ 247 | "0.9733333333333334" 248 | ] 249 | }, 250 | "execution_count": 12, 251 | "metadata": {}, 252 | "output_type": "execute_result" 253 | } 254 | ], 255 | "source": [ 256 | "# short version\n", 257 | "decisions = np.dot(X, np.hstack([coef_sparse, -coef_sparse]))+np.hstack([inter, -inter])\n", 258 | "decision_soft = np.log(np.round(gumbel_softmax(decisions, tau=1.0))+1e-11)\n", 259 | "route_probas = np.exp(np.dot(decision_soft, route_array.T))\n", 260 | "proba = np.dot(route_probas, leaf)\n", 261 | "\n", 262 | "accuracy_score(y, np.argmax(proba, axis=1))" 263 | ] 264 | } 265 | ], 266 | "metadata": { 267 | "kernelspec": { 268 | "display_name": "Python [default]", 269 | "language": "python", 270 | "name": "python3" 271 | }, 272 | "language_info": { 273 | "codemirror_mode": { 274 | "name": "ipython", 275 | "version": 3 276 | }, 277 | "file_extension": ".py", 278 | "mimetype": "text/x-python", 279 | "name": "python", 280 | "nbconvert_exporter": "python", 281 | "pygments_lexer": "ipython3", 282 | "version": "3.6.5" 283 | } 284 | }, 285 | "nbformat": 4, 286 | "nbformat_minor": 2 287 | } 288 | -------------------------------------------------------------------------------- /notebooks/treegrad_tf.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "colab": {}, 8 | "colab_type": "code", 9 | "id": "Mh0Y-o4ZCKAd" 10 | }, 11 | "outputs": [], 12 | "source": [ 13 | "from __future__ import absolute_import, division, print_function, unicode_literals\n", 14 | "\n", 15 | "!pip install -q tensorflow==2.0.0-alpha0 treegrad\n", 16 | "import tensorflow as tf\n", 17 | "import numpy as np" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": null, 23 | "metadata": { 24 | "colab": { 25 | "base_uri": "https://localhost:8080/", 26 | "height": 35 27 | }, 28 | "colab_type": "code", 29 | "id": "glyp-yCdC4S5", 30 | "outputId": "97ca524a-cdc7-439f-8f43-f0641900d856" 31 | }, 32 | "outputs": [ 33 | { 34 | "data": { 35 | "text/plain": [ 36 | "'2.0.0-alpha0'" 37 | ] 38 | }, 39 | "execution_count": 2, 40 | "metadata": { 41 | "tags": [] 42 | }, 43 | "output_type": "execute_result" 44 | } 45 | ], 46 | "source": [ 47 | "tf.__version__" 48 | ] 49 | }, 50 | { 51 | "cell_type": "markdown", 52 | "metadata": { 53 | "colab_type": "text", 54 | "id": "KhRhLqSIMVMQ" 55 | }, 56 | "source": [ 57 | "In this example, we will perform scoring on an Iris dataset using TreeGrad related formulation to demonstrate the application of a 3-layer neural network." 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": null, 63 | "metadata": { 64 | "colab": { 65 | "base_uri": "https://localhost:8080/", 66 | "height": 219 67 | }, 68 | "colab_type": "code", 69 | "id": "WM48tZhKMUxH", 70 | "outputId": "b4ef73c8-7b9d-43d9-9125-e25e27a9aaca" 71 | }, 72 | "outputs": [ 73 | { 74 | "name": "stdout", 75 | "output_type": "stream", 76 | "text": [ 77 | "def tree(sepal length (cm), sepal width (cm), petal length (cm), petal width (cm)):\n", 78 | " if petal length (cm) <= 2.449999988079071:\n", 79 | " return [[50. 0. 0.]]\n", 80 | " else: # if petal length (cm) > 2.449999988079071\n", 81 | " if petal width (cm) <= 1.75:\n", 82 | " if petal length (cm) <= 4.950000047683716:\n", 83 | " return [[ 0. 47. 1.]]\n", 84 | " else: # if petal length (cm) > 4.950000047683716\n", 85 | " return [[0. 2. 4.]]\n", 86 | " else: # if petal width (cm) > 1.75\n", 87 | " return [[ 0. 1. 45.]]\n" 88 | ] 89 | } 90 | ], 91 | "source": [ 92 | "from sklearn.datasets import load_iris\n", 93 | "from sklearn import tree\n", 94 | "from sklearn.tree import _tree\n", 95 | "import lightgbm as lgb\n", 96 | "from sklearn.metrics import accuracy_score\n", 97 | "\n", 98 | "def tree_to_code(tree, feature_names):\n", 99 | " tree_ = tree.tree_\n", 100 | " feature_name = [\n", 101 | " feature_names[i] if i != _tree.TREE_UNDEFINED else \"undefined!\"\n", 102 | " for i in tree_.feature\n", 103 | " ]\n", 104 | " print(\"def tree({}):\".format(\", \".join(feature_names)))\n", 105 | "\n", 106 | " def recurse(node, depth):\n", 107 | " indent = \" \" * depth\n", 108 | " if tree_.feature[node] != _tree.TREE_UNDEFINED:\n", 109 | " name = feature_name[node]\n", 110 | " threshold = tree_.threshold[node]\n", 111 | " print(\"{}if {} <= {}:\".format(indent, name, threshold))\n", 112 | " recurse(tree_.children_left[node], depth + 1)\n", 113 | " print(\"{}else: # if {} > {}\".format(indent, name, threshold))\n", 114 | " recurse(tree_.children_right[node], depth + 1)\n", 115 | " else:\n", 116 | " print(\"{}return {}\".format(indent, tree_.value[node]))\n", 117 | "\n", 118 | " recurse(0, 1)\n", 119 | " \n", 120 | "iris = load_iris()\n", 121 | "X = iris.data\n", 122 | "y = iris.target\n", 123 | "\n", 124 | "clf = tree.DecisionTreeClassifier(max_leaf_nodes=4)\n", 125 | "clf.fit(X, y)\n", 126 | "\n", 127 | "tree_to_code(clf, iris.feature_names)" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": null, 133 | "metadata": { 134 | "colab": {}, 135 | "colab_type": "code", 136 | "id": "YmHQ8Ki0MmMT" 137 | }, 138 | "outputs": [], 139 | "source": [ 140 | "# assume inter is always 1 for convenience\n", 141 | "param = [\n", 142 | " np.array([-1.25, -0.57, -0.20]),\n", 143 | " np.array([1, 1, 1]), \n", 144 | " np.array([[50, 0., 0.], \n", 145 | " [0., 47, 1],\n", 146 | " [0, 2, 4],\n", 147 | " [0, 1, 45]])\n", 148 | "]\n", 149 | "\n", 150 | "route_array = np.array([\n", 151 | " [1, 0, 0, 0, 0, 0], \n", 152 | " [0, 1, 1, 1, 0, 0],\n", 153 | " [0, 1, 0, 1, 0, 1],\n", 154 | " [0, 0, 0, 1, 1, 0]\n", 155 | "])\n", 156 | "\n", 157 | "sparse_info = np.array([\n", 158 | " [0, 0, 0, 1],\n", 159 | " [0, 0, 0, 1],\n", 160 | " [0, 0, 1, 0]\n", 161 | "]).T\n", 162 | "\n", 163 | "coef, inter, leaf = param\n", 164 | "coef_sparse = sparse_info*coef" 165 | ] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "execution_count": null, 170 | "metadata": { 171 | "colab": {}, 172 | "colab_type": "code", 173 | "id": "C9jhVYFiDSjf" 174 | }, 175 | "outputs": [], 176 | "source": [ 177 | "# to build tree model, it is just a three layer neural network - lets see how we go...\n", 178 | "\n", 179 | "class NodeLayer(tf.keras.layers.Layer):\n", 180 | " def __init__(self, num_nodes):\n", 181 | " super(NodeLayer, self).__init__()\n", 182 | " self.num_nodes = num_nodes\n", 183 | " \n", 184 | " def build(self, input_shape):\n", 185 | " # we may want a sparse one later...ignore it for now\n", 186 | " self.kernel = self.add_variable(\"kernel\", \n", 187 | " shape=[int(input_shape[-1]), \n", 188 | " self.num_nodes])\n", 189 | " self.bias = self.add_variable(\"bias\", shape=[self.num_nodes,])\n", 190 | " \n", 191 | " def call(self, input):\n", 192 | " return tf.matmul(input, tf.concat([self.kernel, -self.kernel], 1)) + tf.concat([self.bias, -self.bias], 0)\n", 193 | "\n", 194 | " \n", 195 | "def gumbel_softmax(x, tau=0.01):\n", 196 | " x_temp = tf.clip_by_value(x/tau, -32, 32)\n", 197 | " return 1/(1+tf.keras.backend.exp(-(x)))\n", 198 | "\n", 199 | "def activation1(x):\n", 200 | " return tf.keras.backend.log(gumbel_softmax(x)+tf.keras.backend.epsilon())\n", 201 | "\n", 202 | "def activation2(x):\n", 203 | " return tf.keras.backend.exp(x)\n", 204 | "\n", 205 | "# route layer is Dense(num_nodes+1)\n", 206 | "# leaf layer is Dense(1)\n", 207 | "num_nodes = 3\n", 208 | "decision_tree = tf.keras.Sequential([\n", 209 | " NodeLayer(num_nodes),\n", 210 | " tf.keras.layers.Lambda(activation1),\n", 211 | " tf.keras.layers.Dense(num_nodes+1, trainable=False, use_bias=False),\n", 212 | " tf.keras.layers.Lambda(activation2),\n", 213 | " tf.keras.layers.Dense(3, use_bias=False)\n", 214 | "])" 215 | ] 216 | }, 217 | { 218 | "cell_type": "code", 219 | "execution_count": null, 220 | "metadata": { 221 | "colab": {}, 222 | "colab_type": "code", 223 | "id": "CZo06bK7NJ9K" 224 | }, 225 | "outputs": [], 226 | "source": [ 227 | "iris_pred = decision_tree(X)" 228 | ] 229 | }, 230 | { 231 | "cell_type": "code", 232 | "execution_count": null, 233 | "metadata": { 234 | "colab": { 235 | "base_uri": "https://localhost:8080/", 236 | "height": 109 237 | }, 238 | "colab_type": "code", 239 | "id": "CaR8Pa8QNXJ9", 240 | "outputId": "a1a7489d-cb85-4422-94a3-a21d94501a31" 241 | }, 242 | "outputs": [ 243 | { 244 | "data": { 245 | "text/plain": [ 246 | "[<__main__.NodeLayer at 0x7fab59955518>,\n", 247 | " ,\n", 248 | " ,\n", 249 | " ,\n", 250 | " ]" 251 | ] 252 | }, 253 | "execution_count": 7, 254 | "metadata": { 255 | "tags": [] 256 | }, 257 | "output_type": "execute_result" 258 | } 259 | ], 260 | "source": [ 261 | "decision_tree.layers" 262 | ] 263 | }, 264 | { 265 | "cell_type": "code", 266 | "execution_count": null, 267 | "metadata": { 268 | "colab": {}, 269 | "colab_type": "code", 270 | "id": "1YWq1SNnK_Lo" 271 | }, 272 | "outputs": [], 273 | "source": [ 274 | "# set parameters for iris dataset\n", 275 | "decision_tree.layers[0].set_weights([coef_sparse, inter])\n", 276 | "decision_tree.layers[2].set_weights([route_array.T])\n", 277 | "decision_tree.layers[4].set_weights([leaf])" 278 | ] 279 | }, 280 | { 281 | "cell_type": "code", 282 | "execution_count": null, 283 | "metadata": { 284 | "colab": { 285 | "base_uri": "https://localhost:8080/", 286 | "height": 146 287 | }, 288 | "colab_type": "code", 289 | "id": "5NqTdSR0LRlQ", 290 | "outputId": "dbfa9a36-df1a-4bc9-dd2f-2bfe3cffb173" 291 | }, 292 | "outputs": [ 293 | { 294 | "data": { 295 | "text/plain": [ 296 | "array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 297 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 298 | " 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", 299 | " 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", 300 | " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2,\n", 301 | " 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n", 302 | " 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2])" 303 | ] 304 | }, 305 | "execution_count": 9, 306 | "metadata": { 307 | "tags": [] 308 | }, 309 | "output_type": "execute_result" 310 | } 311 | ], 312 | "source": [ 313 | "# prediction from neural network\n", 314 | "np.argmax(np.array(decision_tree(X)), axis=1)" 315 | ] 316 | }, 317 | { 318 | "cell_type": "markdown", 319 | "metadata": { 320 | "colab_type": "text", 321 | "id": "15o1I8UySgd2" 322 | }, 323 | "source": [ 324 | "If we want to build a sparse tree, we just need to alter the construction of the `NodeLayer`" 325 | ] 326 | }, 327 | { 328 | "cell_type": "code", 329 | "execution_count": null, 330 | "metadata": { 331 | "colab": {}, 332 | "colab_type": "code", 333 | "id": "6jO06iXuSkze" 334 | }, 335 | "outputs": [], 336 | "source": [ 337 | "# to build tree model, it is just a three layer neural network - lets see how we go...\n", 338 | "\n", 339 | "class NodeLayer(tf.keras.layers.Layer):\n", 340 | " def __init__(self, num_nodes):\n", 341 | " super(NodeLayer, self).__init__()\n", 342 | " self.num_nodes = num_nodes\n", 343 | " \n", 344 | " def build(self, input_shape):\n", 345 | " # we may want a sparse one later...ignore it for now\n", 346 | " self.sparse = self.add_variable(\"sparse\", \n", 347 | " shape=[int(input_shape[-1]), \n", 348 | " self.num_nodes],\n", 349 | " trainable=False)\n", 350 | " self.kernel = self.add_variable(\"kernel\", shape=[self.num_nodes,])\n", 351 | " self.bias = self.add_variable(\"bias\", shape=[self.num_nodes,])\n", 352 | " \n", 353 | " def call(self, input):\n", 354 | " coef = self.kernel * self.sparse\n", 355 | " return tf.matmul(input, tf.concat([coef, -coef], 1)) + tf.concat([self.bias, -self.bias], 0)\n", 356 | "\n", 357 | "# route layer is Dense(num_nodes+1)\n", 358 | "# leaf layer is Dense(1)\n", 359 | "num_nodes = 3\n", 360 | "decision_tree = tf.keras.Sequential([\n", 361 | " NodeLayer(num_nodes),\n", 362 | " tf.keras.layers.Lambda(activation1),\n", 363 | " tf.keras.layers.Dense(num_nodes+1, trainable=False, use_bias=False),\n", 364 | " tf.keras.layers.Lambda(activation2),\n", 365 | " tf.keras.layers.Dense(3, use_bias=False)\n", 366 | "])" 367 | ] 368 | }, 369 | { 370 | "cell_type": "code", 371 | "execution_count": null, 372 | "metadata": { 373 | "colab": {}, 374 | "colab_type": "code", 375 | "id": "hFElQQBOUSiR" 376 | }, 377 | "outputs": [], 378 | "source": [ 379 | "iris_pred = decision_tree(X)" 380 | ] 381 | }, 382 | { 383 | "cell_type": "code", 384 | "execution_count": null, 385 | "metadata": { 386 | "colab": {}, 387 | "colab_type": "code", 388 | "id": "wiInNnl1UeMS" 389 | }, 390 | "outputs": [], 391 | "source": [ 392 | "decision_tree.layers[0].set_weights([coef, inter, sparse_info])\n", 393 | "decision_tree.layers[2].set_weights([route_array.T])\n", 394 | "decision_tree.layers[4].set_weights([leaf])" 395 | ] 396 | }, 397 | { 398 | "cell_type": "code", 399 | "execution_count": null, 400 | "metadata": { 401 | "colab": { 402 | "base_uri": "https://localhost:8080/", 403 | "height": 146 404 | }, 405 | "colab_type": "code", 406 | "id": "egXpPb9YUbbp", 407 | "outputId": "60867ef2-2e7d-431f-a521-ca669413162c" 408 | }, 409 | "outputs": [ 410 | { 411 | "data": { 412 | "text/plain": [ 413 | "array([2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n", 414 | " 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1,\n", 415 | " 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0,\n", 416 | " 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0,\n", 417 | " 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 418 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 419 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])" 420 | ] 421 | }, 422 | "execution_count": 37, 423 | "metadata": { 424 | "tags": [] 425 | }, 426 | "output_type": "execute_result" 427 | } 428 | ], 429 | "source": [ 430 | "# set parameters for iris dataset\n", 431 | "np.argmax(np.array(decision_tree(X)), axis=1)\n" 432 | ] 433 | }, 434 | { 435 | "cell_type": "code", 436 | "execution_count": null, 437 | "metadata": { 438 | "colab": {}, 439 | "colab_type": "code", 440 | "id": "2Kphs_TLZp4a" 441 | }, 442 | "outputs": [], 443 | "source": [] 444 | } 445 | ], 446 | "metadata": { 447 | "accelerator": "GPU", 448 | "colab": { 449 | "collapsed_sections": [], 450 | "name": "treegrad_tf.ipynb", 451 | "provenance": [], 452 | "version": "0.3.2" 453 | }, 454 | "kernelspec": { 455 | "display_name": "Python [default]", 456 | "language": "python", 457 | "name": "python3" 458 | }, 459 | "language_info": { 460 | "codemirror_mode": { 461 | "name": "ipython", 462 | "version": 3 463 | }, 464 | "file_extension": ".py", 465 | "mimetype": "text/x-python", 466 | "name": "python", 467 | "nbconvert_exporter": "python", 468 | "pygments_lexer": "ipython3", 469 | "version": "3.6.5" 470 | } 471 | }, 472 | "nbformat": 4, 473 | "nbformat_minor": 2 474 | } 475 | -------------------------------------------------------------------------------- /notebooks/iris-tree-conv.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "data": { 10 | "text/plain": [ 11 | "DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=None,\n", 12 | " max_features=None, max_leaf_nodes=4,\n", 13 | " min_impurity_decrease=0.0, min_impurity_split=None,\n", 14 | " min_samples_leaf=1, min_samples_split=2,\n", 15 | " min_weight_fraction_leaf=0.0, presort=False,\n", 16 | " random_state=None, splitter='best')" 17 | ] 18 | }, 19 | "execution_count": 1, 20 | "metadata": {}, 21 | "output_type": "execute_result" 22 | } 23 | ], 24 | "source": [ 25 | "from sklearn.datasets import load_iris\n", 26 | "from sklearn import tree\n", 27 | "from sklearn.tree import _tree\n", 28 | "import lightgbm as lgb\n", 29 | "from sklearn.metrics import accuracy_score\n", 30 | "\n", 31 | "def tree_to_code(tree, feature_names):\n", 32 | " tree_ = tree.tree_\n", 33 | " feature_name = [\n", 34 | " feature_names[i] if i != _tree.TREE_UNDEFINED else \"undefined!\"\n", 35 | " for i in tree_.feature\n", 36 | " ]\n", 37 | " print(\"def tree({}):\".format(\", \".join(feature_names)))\n", 38 | "\n", 39 | " def recurse(node, depth):\n", 40 | " indent = \" \" * depth\n", 41 | " if tree_.feature[node] != _tree.TREE_UNDEFINED:\n", 42 | " name = feature_name[node]\n", 43 | " threshold = tree_.threshold[node]\n", 44 | " print(\"{}if {} <= {}:\".format(indent, name, threshold))\n", 45 | " recurse(tree_.children_left[node], depth + 1)\n", 46 | " print(\"{}else: # if {} > {}\".format(indent, name, threshold))\n", 47 | " recurse(tree_.children_right[node], depth + 1)\n", 48 | " else:\n", 49 | " print(\"{}return {}\".format(indent, tree_.value[node]))\n", 50 | "\n", 51 | " recurse(0, 1)\n", 52 | " \n", 53 | "iris = load_iris()\n", 54 | "X = iris.data\n", 55 | "y = iris.target\n", 56 | "\n", 57 | "clf = tree.DecisionTreeClassifier(max_leaf_nodes=4)\n", 58 | "clf.fit(X, y)" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 2, 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "# dot -Tpng tree.dot -o tree.png \n", 68 | "# tree.export_graphviz(clf, out_file='tree_draft.dot') " 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": 3, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "# !dot -Tpng tree.dot -o tree.png " 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 4, 83 | "metadata": {}, 84 | "outputs": [ 85 | { 86 | "data": { 87 | "text/plain": [ 88 | "0.9733333333333334" 89 | ] 90 | }, 91 | "execution_count": 4, 92 | "metadata": {}, 93 | "output_type": "execute_result" 94 | } 95 | ], 96 | "source": [ 97 | "clf.score(X, y)" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": 5, 103 | "metadata": {}, 104 | "outputs": [ 105 | { 106 | "data": { 107 | "text/plain": [ 108 | "['sepal length (cm)',\n", 109 | " 'sepal width (cm)',\n", 110 | " 'petal length (cm)',\n", 111 | " 'petal width (cm)']" 112 | ] 113 | }, 114 | "execution_count": 5, 115 | "metadata": {}, 116 | "output_type": "execute_result" 117 | } 118 | ], 119 | "source": [ 120 | "iris.feature_names" 121 | ] 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": 6, 126 | "metadata": {}, 127 | "outputs": [ 128 | { 129 | "name": "stdout", 130 | "output_type": "stream", 131 | "text": [ 132 | "def tree(sepal length (cm), sepal width (cm), petal length (cm), petal width (cm)):\n", 133 | " if petal width (cm) <= 0.800000011920929:\n", 134 | " return [[50. 0. 0.]]\n", 135 | " else: # if petal width (cm) > 0.800000011920929\n", 136 | " if petal width (cm) <= 1.75:\n", 137 | " if petal length (cm) <= 4.950000047683716:\n", 138 | " return [[ 0. 47. 1.]]\n", 139 | " else: # if petal length (cm) > 4.950000047683716\n", 140 | " return [[0. 2. 4.]]\n", 141 | " else: # if petal width (cm) > 1.75\n", 142 | " return [[ 0. 1. 45.]]\n" 143 | ] 144 | } 145 | ], 146 | "source": [ 147 | "tree_to_code(clf, iris.feature_names)\n", 148 | "\n", 149 | "# note for boundaries...\n", 150 | "# 1/0.8 = 1.25\n", 151 | "# 1/1.75 = 0.57\n", 152 | "# 1/4.95 = 0.20" 153 | ] 154 | }, 155 | { 156 | "cell_type": "code", 157 | "execution_count": 7, 158 | "metadata": {}, 159 | "outputs": [], 160 | "source": [ 161 | "# assume inter is always 1 for convenience\n", 162 | "param = [\n", 163 | " np.array([-1.25, -0.57, -0.20]),\n", 164 | " np.array([1, 1, 1]), \n", 165 | " np.array([[50, 0., 0.], \n", 166 | " [0., 47, 1],\n", 167 | " [0, 2, 4],\n", 168 | " [0, 1, 45]])\n", 169 | "]\n", 170 | "\n", 171 | "route_array = np.array([\n", 172 | " [1, 0, 0, 0, 0, 0], \n", 173 | " [0, 1, 1, 1, 0, 0],\n", 174 | " [0, 1, 0, 1, 0, 1],\n", 175 | " [0, 0, 0, 1, 1, 0]\n", 176 | "])\n", 177 | "\n", 178 | "sparse_info = np.array([\n", 179 | " [0, 0, 0, 1],\n", 180 | " [0, 0, 0, 1],\n", 181 | " [0, 0, 1, 0]\n", 182 | "]).T" 183 | ] 184 | }, 185 | { 186 | "cell_type": "code", 187 | "execution_count": 8, 188 | "metadata": {}, 189 | "outputs": [], 190 | "source": [ 191 | "# hard code all things\n", 192 | "def sigmoid(z):\n", 193 | " return 1. / ( 1 + np.exp(-z) )\n", 194 | "\n", 195 | "# softmax by axis...\n", 196 | "def gumbel_softmax(x, tau=1.0, eps=np.finfo(float).eps):\n", 197 | " # element-wise gumbel softmax\n", 198 | " # return np.exp(np.log(X+eps)/temp)/np.sum(np.exp(np.log(X+eps)/temp), axis=1)[:, np.newaxis]\n", 199 | " return 1/(1+np.exp(-(x)/tau))" 200 | ] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": 9, 205 | "metadata": {}, 206 | "outputs": [], 207 | "source": [ 208 | "coef, inter, leaf = param" 209 | ] 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": 10, 214 | "metadata": {}, 215 | "outputs": [], 216 | "source": [ 217 | "coef_sparse = coef * sparse_info" 218 | ] 219 | }, 220 | { 221 | "cell_type": "code", 222 | "execution_count": 11, 223 | "metadata": {}, 224 | "outputs": [ 225 | { 226 | "data": { 227 | "text/plain": [ 228 | "0.9733333333333334" 229 | ] 230 | }, 231 | "execution_count": 11, 232 | "metadata": {}, 233 | "output_type": "execute_result" 234 | } 235 | ], 236 | "source": [ 237 | "clf.score(X, y)" 238 | ] 239 | }, 240 | { 241 | "cell_type": "code", 242 | "execution_count": 12, 243 | "metadata": {}, 244 | "outputs": [ 245 | { 246 | "data": { 247 | "text/plain": [ 248 | "0.9733333333333334" 249 | ] 250 | }, 251 | "execution_count": 12, 252 | "metadata": {}, 253 | "output_type": "execute_result" 254 | } 255 | ], 256 | "source": [ 257 | "# short version\n", 258 | "decisions = np.dot(X, np.hstack([coef_sparse, -coef_sparse]))+np.hstack([inter, -inter])\n", 259 | "decision_soft = np.log(gumbel_softmax(decisions, tau=0.01))\n", 260 | "route_probas = np.exp(np.dot(decision_soft, route_array.T))\n", 261 | "proba = np.dot(route_probas, leaf)\n", 262 | "\n", 263 | "accuracy_score(y, np.argmax(proba, axis=1))" 264 | ] 265 | }, 266 | { 267 | "cell_type": "code", 268 | "execution_count": 13, 269 | "metadata": {}, 270 | "outputs": [], 271 | "source": [ 272 | "from tensorflow.keras.layers import Conv1D, Input, Lambda, GlobalMaxPooling1D, LocallyConnected1D, Dense, Concatenate\n", 273 | "from tensorflow.keras.models import Model\n", 274 | "import tensorflow.keras.backend as K\n", 275 | "import tensorflow.keras as keras\n", 276 | "from tensorflow.keras.constraints import non_neg" 277 | ] 278 | }, 279 | { 280 | "cell_type": "code", 281 | "execution_count": 14, 282 | "metadata": {}, 283 | "outputs": [], 284 | "source": [ 285 | "def keras_gumbel_softmax(x, tau=1.0, eps=np.finfo(float).eps):\n", 286 | " # element-wise gumbel softmax\n", 287 | " # return np.exp(np.log(X+eps)/temp)/np.sum(np.exp(np.log(X+eps)/temp), axis=1)[:, np.newaxis]\n", 288 | " x_clip = K.clip(-(x+eps)/tau, -32, 32)\n", 289 | " \n", 290 | " return 1/(1+K.exp(x_clip))" 291 | ] 292 | }, 293 | { 294 | "cell_type": "code", 295 | "execution_count": 15, 296 | "metadata": {}, 297 | "outputs": [ 298 | { 299 | "name": "stdout", 300 | "output_type": "stream", 301 | "text": [ 302 | "Model: \"model\"\n", 303 | "__________________________________________________________________________________________________\n", 304 | "Layer (type) Output Shape Param # Connected to \n", 305 | "==================================================================================================\n", 306 | "input_1 (InputLayer) [(None, 4)] 0 \n", 307 | "__________________________________________________________________________________________________\n", 308 | "lambda (Lambda) (None, 4, 1) 0 input_1[0][0] \n", 309 | "__________________________________________________________________________________________________\n", 310 | "nodes (Conv1D) (None, 1, 3) 15 lambda[0][0] \n", 311 | "__________________________________________________________________________________________________\n", 312 | "global_max_pooling1d (GlobalMax (None, 3) 0 nodes[0][0] \n", 313 | "__________________________________________________________________________________________________\n", 314 | "lambda_1 (Lambda) (None, 3) 0 global_max_pooling1d[0][0] \n", 315 | "__________________________________________________________________________________________________\n", 316 | "lambda_2 (Lambda) (None, 3) 0 global_max_pooling1d[0][0] \n", 317 | "__________________________________________________________________________________________________\n", 318 | "decision (Concatenate) (None, 6) 0 lambda_1[0][0] \n", 319 | " lambda_2[0][0] \n", 320 | "__________________________________________________________________________________________________\n", 321 | "lambda_3 (Lambda) (None, 6) 0 decision[0][0] \n", 322 | "__________________________________________________________________________________________________\n", 323 | "route (Dense) (None, 4) 24 lambda_3[0][0] \n", 324 | "__________________________________________________________________________________________________\n", 325 | "lambda_4 (Lambda) (None, 4) 0 route[0][0] \n", 326 | "__________________________________________________________________________________________________\n", 327 | "leaf (Dense) (None, 3) 12 lambda_4[0][0] \n", 328 | "==================================================================================================\n", 329 | "Total params: 51\n", 330 | "Trainable params: 27\n", 331 | "Non-trainable params: 24\n", 332 | "__________________________________________________________________________________________________\n" 333 | ] 334 | } 335 | ], 336 | "source": [ 337 | "iris_input = Input(shape=(4,))\n", 338 | "iris_axis = Lambda(lambda x: K.expand_dims(x, -1))(iris_input)\n", 339 | "iris_conv = Conv1D(3, (4), strides=4, padding='same', name='nodes')(iris_axis)\n", 340 | "iris_pooled = GlobalMaxPooling1D()(iris_conv)\n", 341 | "\n", 342 | "iris_left = Lambda(lambda x: keras_gumbel_softmax(x, 0.1))(iris_pooled)\n", 343 | "iris_right = Lambda(lambda x: keras_gumbel_softmax(-x, 0.1))(iris_pooled)\n", 344 | "iris_decisions = Concatenate(name='decision')([iris_left, iris_right])\n", 345 | "\n", 346 | "iris_log_decisions = Lambda(lambda x: K.log(x))(iris_decisions)\n", 347 | "iris_route = Dense(4, use_bias=False, trainable=False, name='route')(iris_log_decisions) # this is normally a dense layer which is not trained - i.e. a fix adj. matrix in graphcnn\n", 348 | "iris_exp_route = Lambda(lambda x: K.exp(x))(iris_route)\n", 349 | "iris_leaf = Dense(3, activation='softmax', name='leaf', use_bias=False) (iris_exp_route) # this is the output leaves\n", 350 | "\n", 351 | "iris_model = Model(inputs=iris_input, outputs=iris_leaf)\n", 352 | "iris_model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n", 353 | "iris_model.summary()" 354 | ] 355 | }, 356 | { 357 | "cell_type": "code", 358 | "execution_count": 16, 359 | "metadata": {}, 360 | "outputs": [], 361 | "source": [ 362 | "tf_coef = np.expand_dims(coef_sparse, axis=1)\n", 363 | "\n", 364 | "iris_model.get_layer('nodes').set_weights([tf_coef, inter])\n", 365 | "iris_model.get_layer('route').set_weights([route_array.T])\n", 366 | "iris_model.get_layer('leaf').set_weights([leaf])" 367 | ] 368 | }, 369 | { 370 | "cell_type": "code", 371 | "execution_count": 17, 372 | "metadata": {}, 373 | "outputs": [ 374 | { 375 | "data": { 376 | "text/plain": [ 377 | "0.96" 378 | ] 379 | }, 380 | "execution_count": 17, 381 | "metadata": {}, 382 | "output_type": "execute_result" 383 | } 384 | ], 385 | "source": [ 386 | "accuracy_score(y, np.argmax(iris_model.predict(X), axis=1))" 387 | ] 388 | }, 389 | { 390 | "cell_type": "code", 391 | "execution_count": 19, 392 | "metadata": {}, 393 | "outputs": [], 394 | "source": [ 395 | "y_cat = keras.utils.to_categorical(y)" 396 | ] 397 | }, 398 | { 399 | "cell_type": "code", 400 | "execution_count": 22, 401 | "metadata": {}, 402 | "outputs": [ 403 | { 404 | "data": { 405 | "text/plain": [ 406 | "" 407 | ] 408 | }, 409 | "execution_count": 22, 410 | "metadata": {}, 411 | "output_type": "execute_result" 412 | } 413 | ], 414 | "source": [ 415 | "iris_model.fit(X, y_cat, epochs=10, verbose=0)" 416 | ] 417 | }, 418 | { 419 | "cell_type": "code", 420 | "execution_count": 23, 421 | "metadata": {}, 422 | "outputs": [ 423 | { 424 | "data": { 425 | "text/plain": [ 426 | "0.96" 427 | ] 428 | }, 429 | "execution_count": 23, 430 | "metadata": {}, 431 | "output_type": "execute_result" 432 | } 433 | ], 434 | "source": [ 435 | "accuracy_score(y, np.argmax(iris_model.predict(X), axis=1))" 436 | ] 437 | } 438 | ], 439 | "metadata": { 440 | "kernelspec": { 441 | "display_name": "Python [default]", 442 | "language": "python", 443 | "name": "python3" 444 | }, 445 | "language_info": { 446 | "codemirror_mode": { 447 | "name": "ipython", 448 | "version": 3 449 | }, 450 | "file_extension": ".py", 451 | "mimetype": "text/x-python", 452 | "name": "python", 453 | "nbconvert_exporter": "python", 454 | "pygments_lexer": "ipython3", 455 | "version": "3.6.5" 456 | } 457 | }, 458 | "nbformat": 4, 459 | "nbformat_minor": 2 460 | } 461 | -------------------------------------------------------------------------------- /treegrad/treegrad.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tree Grad 3 | 4 | Implementation of an online learning approach for tree based models 5 | """ 6 | import lightgbm as lgb 7 | import copy 8 | 9 | from sklearn.base import ClassifierMixin, RegressorMixin 10 | from sklearn.base import BaseEstimator 11 | from sklearn.utils.validation import check_is_fitted 12 | from sklearn.preprocessing import LabelBinarizer 13 | 14 | import autograd.numpy as np 15 | from autograd import grad 16 | from autograd.misc.optimizers import adam 17 | from autograd.misc.flatten import flatten as weights_flatten 18 | 19 | from treegrad.tree_utils import ( 20 | split_trees_by_classes, 21 | multiclass_trees_to_param, 22 | gbm_gen, 23 | simple_callback, 24 | multi_tree_to_param, 25 | sigmoid, 26 | generate_batch, 27 | ) 28 | 29 | 30 | class BaseTreeGrad(BaseEstimator): 31 | def __init__( 32 | self, 33 | num_leaves=31, 34 | max_depth=-1, 35 | learning_rate=0.1, 36 | n_estimators=100, 37 | autograd_config={"refit_splits": False, "batch_size": 32}, 38 | ): 39 | self.ensemble_config = { 40 | "num_leaves": num_leaves, 41 | "max_depth": max_depth, 42 | "learning_rate": learning_rate, 43 | "n_estimators": n_estimators, 44 | } 45 | self.autograd_config = autograd_config 46 | 47 | 48 | class TGDClassifier(BaseTreeGrad, ClassifierMixin): 49 | def fit(self, X, y): 50 | self.base_model_ = lgb.LGBMClassifier(**self.ensemble_config) 51 | self.base_model_.fit(X, y) 52 | self.n_classes_ = self.base_model_.n_classes_ 53 | self.is_partial = False 54 | return self 55 | 56 | def partial_fit_base(self, X, y): 57 | check_is_fitted(self, "base_model_") 58 | 59 | batch_indices = generate_batch(X, self.autograd_config.get("batch_size", 32)) 60 | 61 | esp = 1e-11 # where should this live? 62 | step_size = self.autograd_config.get("step_size", 0.05) 63 | callback = ( 64 | None if self.autograd_config.get("verbose", False) else simple_callback 65 | ) 66 | num_iters = self.autograd_config.get("num_iters", 1000) 67 | 68 | nclass = self.n_classes_ 69 | model_dump = self.base_model_.booster_.dump_model() 70 | trees_ = [m["tree_structure"] for m in model_dump["tree_info"]] 71 | 72 | if nclass == 2: 73 | y_ohe = y 74 | else: 75 | y_ohe = LabelBinarizer().fit_transform(y) 76 | 77 | if nclass > 2: 78 | trees = split_trees_by_classes(trees_, nclass) 79 | trees_params = multiclass_trees_to_param(X, y, trees) 80 | model_ = gbm_gen( 81 | trees_params[0], X, trees_params[2], trees_params[1], True, nclass 82 | ) 83 | 84 | def training_loss(weights, idx=0): 85 | # Training loss is the negative log-likelihood of the training labels. 86 | t_idx_ = batch_indices(idx) 87 | preds = model_(weights, X[t_idx_, :]) 88 | loglik = -np.sum(np.log(preds + esp) * y_ohe[t_idx_, :]) 89 | 90 | num_unpack = 3 91 | reg = 0 92 | # reg_l1 = np.sum(np.abs(flattened)) * 1. 93 | for idx_ in range(0, len(weights), num_unpack): 94 | param_temp_ = weights[idx_ : idx_ + num_unpack] 95 | flattened, _ = weights_flatten(param_temp_[:2]) 96 | reg_l1 = np.sum(np.abs(flattened)) * 1.0 97 | reg += reg_l1 98 | return loglik + reg 99 | 100 | else: 101 | trees_params = multi_tree_to_param(X, y, trees_) 102 | model_ = gbm_gen( 103 | trees_params[0], X, trees_params[2], trees_params[1], False, 2 104 | ) 105 | 106 | def training_loss(weights, idx=0): 107 | # Training loss is the negative log-likelihood of the training labels. 108 | t_idx_ = batch_indices(idx) 109 | preds = sigmoid(model_(weights, X[t_idx_, :])) 110 | label_probabilities = preds * y[t_idx_] + (1 - preds) * (1 - y[t_idx_]) 111 | # print(label_probabilities) 112 | loglik = -np.sum(np.log(label_probabilities)) 113 | 114 | num_unpack = 3 115 | reg = 0 116 | # reg_l1 = np.sum(np.abs(flattened)) * 1. 117 | for idx_ in range(0, len(weights), num_unpack): 118 | param_temp_ = weights[idx_ : idx_ + num_unpack] 119 | flattened, _ = weights_flatten(param_temp_[:2]) 120 | reg_l1 = np.sum(np.abs(flattened)) * 1.0 121 | reg += reg_l1 122 | return loglik + reg 123 | 124 | training_gradient_fun = grad(training_loss) 125 | param_ = adam( 126 | training_gradient_fun, 127 | trees_params[0], 128 | callback=callback, 129 | step_size=step_size, 130 | num_iters=num_iters, 131 | ) 132 | 133 | self.base_param_ = copy.deepcopy(trees_params) 134 | self.partial_param_ = param_ 135 | self.is_partial = True 136 | return self 137 | 138 | def partial_fit_param(self, X, y): 139 | check_is_fitted(self, "base_model_") 140 | check_is_fitted(self, "base_param_") 141 | check_is_fitted(self, "partial_param_") 142 | 143 | batch_indices = generate_batch(X, self.autograd_config.get("batch_size", 32)) 144 | 145 | esp = 1e-11 # where should this live? 146 | step_size = self.autograd_config.get("step_size", 0.05) 147 | callback = ( 148 | None if self.autograd_config.get("verbose", False) else simple_callback 149 | ) 150 | num_iters = self.autograd_config.get("num_iters", 1000) 151 | nclass = self.n_classes_ 152 | 153 | if nclass == 2: 154 | y_ohe = y 155 | else: 156 | y_ohe = LabelBinarizer().fit_transform(y) 157 | 158 | if nclass > 2: 159 | model_ = gbm_gen( 160 | self.base_param_[0], 161 | X, 162 | self.base_param_[2], 163 | self.base_param_[1], 164 | True, 165 | nclass, 166 | ) 167 | 168 | def training_loss(weights, idx=0): 169 | # Training loss is the negative log-likelihood of the training labels. 170 | t_idx_ = batch_indices(idx) 171 | preds = model_(weights, X[t_idx_, :]) 172 | loglik = -np.sum(np.log(preds + esp) * y_ohe[t_idx_, :]) 173 | 174 | num_unpack = 3 175 | reg = 0 176 | # reg_l1 = np.sum(np.abs(flattened)) * 1. 177 | for idx_ in range(0, len(weights), num_unpack): 178 | param_temp_ = weights[idx_ : idx_ + num_unpack] 179 | flattened, _ = weights_flatten(param_temp_[:2]) 180 | reg_l1 = np.sum(np.abs(flattened)) * 1.0 181 | reg += reg_l1 182 | return loglik + reg 183 | 184 | else: 185 | model_ = gbm_gen( 186 | self.base_param_[0], 187 | X, 188 | self.base_param_[2], 189 | self.base_param_[1], 190 | False, 191 | 2, 192 | ) 193 | 194 | def training_loss(weights, idx=0): 195 | # Training loss is the negative log-likelihood of the training labels. 196 | t_idx_ = batch_indices(idx) 197 | preds = sigmoid(model_(weights, X[t_idx_, :])) 198 | label_probabilities = preds * y[t_idx_] + (1 - preds) * (1 - y[t_idx_]) 199 | # print(label_probabilities) 200 | loglik = -np.sum(np.log(label_probabilities)) 201 | 202 | num_unpack = 3 203 | reg = 0 204 | # reg_l1 = np.sum(np.abs(flattened)) * 1. 205 | for idx_ in range(0, len(weights), num_unpack): 206 | param_temp_ = weights[idx_ : idx_ + num_unpack] 207 | flattened, _ = weights_flatten(param_temp_[:2]) 208 | reg_l1 = np.sum(np.abs(flattened)) * 1.0 209 | reg += reg_l1 210 | return loglik + reg 211 | 212 | training_gradient_fun = grad(training_loss) 213 | param_ = adam( 214 | training_gradient_fun, 215 | self.partial_param_, 216 | callback=callback, 217 | step_size=step_size, 218 | num_iters=num_iters, 219 | ) 220 | 221 | self.partial_param_ = param_ 222 | self.is_partial = True 223 | return self 224 | 225 | def partial_fit(self, X, y): 226 | check_is_fitted(self, "base_model_") 227 | if self.is_partial: 228 | self.partial_fit_param(X, y) 229 | else: 230 | self.partial_fit_base(X, y) 231 | return self 232 | 233 | def predict(self, X): 234 | check_is_fitted(self, "base_model_") 235 | if not self.is_partial: 236 | return self.base_model_.predict(X) 237 | else: 238 | multi_class = self.n_classes_ > 2 239 | model_ = gbm_gen( 240 | self.partial_param_, 241 | X, 242 | self.base_param_[2], 243 | self.base_param_[1], 244 | multi_class, 245 | self.n_classes_, 246 | ) 247 | preds = model_(self.partial_param_, X) 248 | if self.n_classes_ > 2: 249 | return np.argmax(preds, axis=1) 250 | else: 251 | return np.round(sigmoid(preds)) 252 | 253 | def predict_proba(self, X): 254 | check_is_fitted(self, "base_model_") 255 | if not self.is_partial: 256 | return self.base_model_.predict_proba(X) 257 | else: 258 | multi_class = self.n_classes_ > 2 259 | model_ = gbm_gen( 260 | self.partial_param_, 261 | X, 262 | self.base_param_[2], 263 | self.base_param_[1], 264 | multi_class, 265 | self.n_classes_, 266 | ) 267 | preds = model_(self.partial_param_, X) 268 | if self.n_classes_ > 2: 269 | return preds 270 | else: 271 | pred_positive = sigmoid(preds) 272 | return np.stack([1 - pred_positive, pred_positive], axis=-1) 273 | 274 | 275 | class TGDRegressor(BaseTreeGrad, RegressorMixin): 276 | def fit(self, X, y): 277 | self.base_model_ = lgb.LGBMRegressor(**self.ensemble_config) 278 | self.base_model_.fit(X, y) 279 | self.n_classes_ = 1 280 | self.is_partial = False 281 | return self 282 | 283 | def partial_fit_base(self, X, y): 284 | check_is_fitted(self, "base_model_") 285 | 286 | batch_indices = generate_batch(X, self.autograd_config.get("batch_size", 32)) 287 | 288 | esp = 1e-11 # where should this live? 289 | step_size = self.autograd_config.get("step_size", 0.05) 290 | callback = ( 291 | None if self.autograd_config.get("verbose", False) else simple_callback 292 | ) 293 | num_iters = self.autograd_config.get("num_iters", 1000) 294 | 295 | nclass = self.n_classes_ 296 | model_dump = self.base_model_.booster_.dump_model() 297 | trees_ = [m["tree_structure"] for m in model_dump["tree_info"]] 298 | 299 | trees_params = multi_tree_to_param(X, y, trees_) 300 | model_ = gbm_gen(trees_params[0], X, trees_params[2], trees_params[1], False, 2) 301 | 302 | def training_loss(weights, idx=0): 303 | # Training loss is the negative log-likelihood of the training labels. 304 | t_idx_ = batch_indices(idx) 305 | preds = sigmoid(model_(weights, X[t_idx_, :])) 306 | label_probabilities = preds * y[t_idx_] + (1 - preds) * (1 - y[t_idx_]) 307 | # print(label_probabilities) 308 | loglik = -np.sum(np.log(label_probabilities)) 309 | 310 | num_unpack = 3 311 | reg = 0 312 | # reg_l1 = np.sum(np.abs(flattened)) * 1. 313 | for idx_ in range(0, len(weights), num_unpack): 314 | param_temp_ = weights[idx_ : idx_ + num_unpack] 315 | flattened, _ = weights_flatten(param_temp_[:2]) 316 | reg_l1 = np.sum(np.abs(flattened)) * 1.0 317 | reg += reg_l1 318 | return loglik + reg 319 | 320 | training_gradient_fun = grad(training_loss) 321 | param_ = adam( 322 | training_gradient_fun, 323 | trees_params[0], 324 | callback=callback, 325 | step_size=step_size, 326 | num_iters=num_iters, 327 | ) 328 | 329 | self.base_param_ = copy.deepcopy(trees_params) 330 | self.partial_param_ = param_ 331 | self.is_partial = True 332 | return self 333 | 334 | def partial_fit_param(self, X, y): 335 | check_is_fitted(self, "base_model_") 336 | check_is_fitted(self, "base_param_") 337 | check_is_fitted(self, "partial_param_") 338 | 339 | batch_indices = generate_batch(X, self.autograd_config.get("batch_size", 32)) 340 | 341 | esp = 1e-11 # where should this live? 342 | step_size = self.autograd_config.get("step_size", 0.05) 343 | callback = ( 344 | None if self.autograd_config.get("verbose", False) else simple_callback 345 | ) 346 | num_iters = self.autograd_config.get("num_iters", 1000) 347 | nclass = self.n_classes_ 348 | 349 | model_ = gbm_gen( 350 | self.base_param_[0], X, self.base_param_[2], self.base_param_[1], False, 2 351 | ) 352 | 353 | def training_loss(weights, idx=0): 354 | # Training loss is the negative log-likelihood of the training labels. 355 | t_idx_ = batch_indices(idx) 356 | preds = sigmoid(model_(weights, X[t_idx_, :])) 357 | label_probabilities = preds * y[t_idx_] + (1 - preds) * (1 - y[t_idx_]) 358 | # print(label_probabilities) 359 | loglik = -np.sum(np.log(label_probabilities)) 360 | 361 | num_unpack = 3 362 | reg = 0 363 | # reg_l1 = np.sum(np.abs(flattened)) * 1. 364 | for idx_ in range(0, len(weights), num_unpack): 365 | param_temp_ = weights[idx_ : idx_ + num_unpack] 366 | flattened, _ = weights_flatten(param_temp_[:2]) 367 | reg_l1 = np.sum(np.abs(flattened)) * 1.0 368 | reg += reg_l1 369 | return loglik + reg 370 | 371 | training_gradient_fun = grad(training_loss) 372 | param_ = adam( 373 | training_gradient_fun, 374 | self.partial_param_, 375 | callback=callback, 376 | step_size=step_size, 377 | num_iters=num_iters, 378 | ) 379 | 380 | self.partial_param_ = param_ 381 | self.is_partial = True 382 | return self 383 | 384 | def partial_fit(self, X, y): 385 | check_is_fitted(self, "base_model_") 386 | if self.is_partial: 387 | self.partial_fit_param(X, y) 388 | else: 389 | self.partial_fit_base(X, y) 390 | return self 391 | 392 | def predict(self, X): 393 | check_is_fitted(self, "base_model_") 394 | if not self.is_partial: 395 | return self.base_model_.predict(X) 396 | else: 397 | multi_class = self.n_classes_ > 2 398 | model_ = gbm_gen( 399 | self.partial_param_, 400 | X, 401 | self.base_param_[2], 402 | self.base_param_[1], 403 | multi_class, 404 | self.n_classes_, 405 | ) 406 | preds = model_(self.partial_param_, X) 407 | if self.n_classes_ > 2: 408 | return np.argmax(preds, axis=1) 409 | else: 410 | return np.round(sigmoid(preds)) 411 | 412 | 413 | if __name__ == "__main__": 414 | # these are test cases - to be refactored out. 415 | from sklearn.datasets import make_classification 416 | 417 | X, y = make_classification( 418 | 100, 419 | n_classes=3, 420 | n_informative=3, 421 | n_redundant=0, 422 | n_clusters_per_class=2, 423 | n_features=10, 424 | ) 425 | model = TGDClassifier(autograd_config={"num_iters": 5}) 426 | model.fit(X, y) 427 | print(model.predict(X)) 428 | 429 | # partial fit off lightgbm 430 | model.partial_fit(X, y) 431 | print(model.predict(X)) 432 | 433 | # partial fit off itself 434 | model.partial_fit(X, y) 435 | print(model.predict(X)) 436 | 437 | # test class binary 438 | X, y = make_classification( 439 | 100, 440 | n_classes=2, 441 | n_informative=3, 442 | n_redundant=0, 443 | n_clusters_per_class=2, 444 | n_features=8, 445 | ) 446 | model = TGDClassifier(autograd_config={"num_iters": 100}) 447 | model.fit(X, y) 448 | print(model.predict(X)) 449 | print(np.round(model.predict_proba(X))) 450 | 451 | # partial fit off lightgbm 452 | model.partial_fit(X, y) 453 | print(model.predict(X)) 454 | print(np.round(model.predict_proba(X))) 455 | 456 | # partial fit off itself 457 | model.partial_fit(X, y) 458 | print(model.predict(X)) 459 | -------------------------------------------------------------------------------- /treegrad/tree_utils.py: -------------------------------------------------------------------------------- 1 | import pprint 2 | from functools import reduce 3 | import copy 4 | 5 | import pandas as pd 6 | import scipy as sp 7 | import scipy.sparse 8 | import itertools 9 | 10 | from sklearn.datasets import load_iris, make_classification 11 | from sklearn import metrics 12 | 13 | import lightgbm as lgb 14 | from scipy.special import expit, logit 15 | 16 | from sklearn.metrics import roc_auc_score 17 | from sklearn.preprocessing import LabelBinarizer 18 | 19 | import autograd.numpy as np 20 | import numpy 21 | import scipy.sparse 22 | from autograd import grad 23 | from autograd.misc.optimizers import adam 24 | from autograd.misc.flatten import flatten as weights_flatten 25 | 26 | from sklearn.metrics import roc_auc_score 27 | 28 | 29 | def get_route(tree): 30 | # gets the route for the tree... 31 | tree_dict = {} 32 | boundary_dict = {} 33 | leaf_dict = {} 34 | 35 | def recurse(sub_tree, child_split=None, parent=None): 36 | # pprint.pprint(sub_tree) 37 | route_path = {} 38 | 39 | if "threshold" in sub_tree: 40 | boundary_dict[sub_tree["split_index"]] = { 41 | "column": sub_tree["split_feature"], 42 | "value": sub_tree["threshold"], 43 | } 44 | 45 | if "split_index" in sub_tree["left_child"]: 46 | boundary_dict[sub_tree["split_index"]]["left"] = sub_tree["left_child"][ 47 | "split_index" 48 | ] 49 | 50 | if "split_index" in sub_tree["right_child"]: 51 | boundary_dict[sub_tree["split_index"]]["right"] = sub_tree[ 52 | "right_child" 53 | ]["split_index"] 54 | else: 55 | # we're a leaf! 56 | leaf_dict[parent] = leaf_dict.get(parent, {}) 57 | leaf_dict[parent][child_split] = sub_tree 58 | 59 | if "left_child" in sub_tree: 60 | try: 61 | route_path["left"] = sub_tree["left_child"]["split_index"] 62 | except Exception as e: 63 | # print("\tleft_child {}".format(e)) 64 | pass 65 | if "right_child" in sub_tree: 66 | try: 67 | route_path["right"] = sub_tree["right_child"]["split_index"] 68 | except Exception as e: 69 | # print("\tright_child {}".format(e)) 70 | pass 71 | 72 | # print(route_path) 73 | if len(route_path) > 0: 74 | tree_dict[sub_tree["split_index"]] = route_path.copy() 75 | 76 | if "left_child" in sub_tree: 77 | recurse(sub_tree["left_child"], "left", sub_tree["split_index"]) 78 | if "right_child" in sub_tree: 79 | recurse(sub_tree["right_child"], "right", sub_tree["split_index"]) 80 | # print("\n\n") 81 | 82 | recurse(tree) 83 | 84 | # combine leaf_dict and boundary_dict 85 | max_index = np.max(list(boundary_dict.keys())) 86 | 87 | for k in leaf_dict.keys(): 88 | # print(leaf_dict) 89 | if "left" in leaf_dict[k]: 90 | max_index += 1 91 | boundary_dict[k]["left"] = max_index 92 | tree_dict[k] = tree_dict.get(k, {}) 93 | tree_dict[k]["left"] = max_index 94 | tree_dict[max_index] = {} 95 | pred_val = expit(leaf_dict[k]["left"]["leaf_value"]) 96 | boundary_dict[max_index] = { 97 | "predict": np.array([1 - pred_val, pred_val]), 98 | "leaf_value": leaf_dict[k]["left"]["leaf_value"], 99 | } 100 | 101 | if "right" in leaf_dict[k]: 102 | max_index += 1 103 | boundary_dict[k]["right"] = max_index 104 | tree_dict[k] = tree_dict.get(k, {}) 105 | tree_dict[k]["right"] = max_index 106 | tree_dict[max_index] = {} 107 | # print(leaf_dict) 108 | pred_val = expit(leaf_dict[k]["right"]["leaf_value"]) 109 | boundary_dict[max_index] = { 110 | "predict": np.array([1 - pred_val, pred_val]), 111 | "leaf_value": leaf_dict[k]["right"]["leaf_value"], 112 | } 113 | 114 | return tree_dict, boundary_dict, leaf_dict 115 | 116 | 117 | def boundary_dict_mapping(boundary_dict, mode="raw"): 118 | weights = [] 119 | inter = [] 120 | pred = [] 121 | 122 | coef_mapping = [] 123 | for idx in sorted(list(boundary_dict.keys())): 124 | if "coef" in boundary_dict[idx]: 125 | weights.append(boundary_dict[idx]["coef"]) 126 | inter.append(boundary_dict[idx]["inter"]) 127 | coef_mapping.append(idx) 128 | else: 129 | if mode == "proba": 130 | pred.append(boundary_dict[idx]["predict"]) 131 | elif mode == "raw": 132 | pred.append(boundary_dict[idx]["leaf_value"]) 133 | else: 134 | raise Exception( 135 | "Expecting mode in ['proba', 'raw'], got {}".format(mode) 136 | ) 137 | 138 | weights = np.hstack(weights) 139 | inter = np.hstack(inter) 140 | pred = np.vstack(pred) 141 | return [(weights), (inter), (pred)], coef_mapping 142 | 143 | 144 | class BaseTree(object): 145 | def build_tree(self, depth=2): 146 | """ 147 | builds the adjancey list up to depth of 2 148 | """ 149 | total_nodes = np.sum([2 ** x for x in range(depth)]) 150 | nodes = list(range(total_nodes)) 151 | nodes_per_level = np.cumsum([2 ** x for x in range(depth - 1)]) 152 | nodes_level = [x.tolist() for x in np.array_split(nodes, nodes_per_level)] 153 | 154 | adj_list = dict((idx, {}) for idx in nodes) 155 | for fr in nodes_level[:-1]: 156 | for i in fr: 157 | i_list = adj_list.get(i, {}) 158 | # the connected nodes always follows this pattern 159 | i_list["left"] = i * 2 + 1 160 | i_list["right"] = i * 2 + 2 161 | adj_list[i] = i_list.copy() 162 | return adj_list 163 | 164 | def calculate_routes(self, adj_list=None): 165 | """ 166 | Calculates routes in GBM format. 167 | 168 | {0:{'left': 1, 'right': 2}, 1:{}, 2:{}} --> [([(0, 0)], 1), 169 | ([(0, 1)], 2)] 170 | {0:{'left': 1, 'right': 2}, 1:{'left': 3, 'right':4}, 171 | 2:{}, 3:{}, 4: {}} --> [([(0, 0), (1, 0)], 3), 172 | ([(0, 0), (1, 1)], 4), 173 | ([(0, 1)], 2)] 174 | """ 175 | if adj_list is None: 176 | adj_list = self.build_tree(3) 177 | 178 | def get_next(next_node, current_path): 179 | paths = adj_list[next_node] 180 | if len(paths) == 0: 181 | all_paths.append((current_path, next_node)) 182 | else: 183 | # do left... 184 | get_next(paths["left"], current_path + [(next_node, 0)]) 185 | get_next(paths["right"], current_path + [(next_node, 1)]) 186 | 187 | all_paths = [] 188 | get_next(0, []) 189 | return all_paths 190 | 191 | 192 | class Tree(BaseTree): 193 | """ 194 | Tree object to help abstract out some of the methods that are commonly used. 195 | Also used to help figure out how to maintain state around pruning and grafting nodes 196 | 197 | Usage: 198 | tt = Tree().graft() 199 | tt.plot() 200 | """ 201 | 202 | def __init__(self, depth=3, nodes=None, tree=None, previous_state={}): 203 | self.depth = depth 204 | self.nodes = ( 205 | nodes if nodes is not None else np.sum([2 ** x for x in range(self.depth)]) 206 | ) 207 | self.tree = tree if tree is not None else self.build_tree(self.depth) 208 | self.update() 209 | 210 | def update(self): 211 | self.update_route() 212 | self.update_nodes() 213 | 214 | def update_nodes(self): 215 | self.nodes = len([k for k, v in self.tree.items() if len(v) > 0]) 216 | 217 | def update_depth(self): 218 | all_routes = [len(r) for r, _ in self.route] 219 | self.depth = max(all_routes) 220 | 221 | def update_route(self): 222 | self.route = self.calculate_routes(self.tree) 223 | self.route.sort(key=lambda x: x[1]) 224 | self.route_list = old_route_to_new_route(self.route, self.nodes) 225 | 226 | 227 | def l2_norm(params): 228 | """Computes l2 norm of params by flattening them into a vector.""" 229 | flattened, _ = weights_flatten(params) 230 | return np.dot(flattened, flattened) 231 | 232 | 233 | def get_route(tree): 234 | # gets the route for the tree... 235 | tree_dict = {} 236 | boundary_dict = {} 237 | leaf_dict = {} 238 | 239 | def recurse(sub_tree, child_split=None, parent=None): 240 | # pprint.pprint(sub_tree) 241 | route_path = {} 242 | 243 | if "threshold" in sub_tree: 244 | boundary_dict[sub_tree["split_index"]] = { 245 | "column": sub_tree["split_feature"], 246 | "value": sub_tree["threshold"], 247 | } 248 | 249 | if "split_index" in sub_tree["left_child"]: 250 | boundary_dict[sub_tree["split_index"]]["left"] = sub_tree["left_child"][ 251 | "split_index" 252 | ] 253 | 254 | if "split_index" in sub_tree["right_child"]: 255 | boundary_dict[sub_tree["split_index"]]["right"] = sub_tree[ 256 | "right_child" 257 | ]["split_index"] 258 | else: 259 | # we're a leaf! 260 | leaf_dict[parent] = leaf_dict.get(parent, {}) 261 | leaf_dict[parent][child_split] = sub_tree 262 | 263 | if "left_child" in sub_tree: 264 | try: 265 | route_path["left"] = sub_tree["left_child"]["split_index"] 266 | except Exception as e: 267 | # print("\tleft_child {}".format(e)) 268 | pass 269 | if "right_child" in sub_tree: 270 | try: 271 | route_path["right"] = sub_tree["right_child"]["split_index"] 272 | except Exception as e: 273 | # print("\tright_child {}".format(e)) 274 | pass 275 | 276 | # print(route_path) 277 | if len(route_path) > 0: 278 | tree_dict[sub_tree["split_index"]] = route_path.copy() 279 | 280 | if "left_child" in sub_tree: 281 | recurse(sub_tree["left_child"], "left", sub_tree["split_index"]) 282 | if "right_child" in sub_tree: 283 | recurse(sub_tree["right_child"], "right", sub_tree["split_index"]) 284 | # print("\n\n") 285 | 286 | recurse(tree) 287 | 288 | # combine leaf_dict and boundary_dict 289 | max_index = np.max(list(boundary_dict.keys())) 290 | 291 | for k in leaf_dict.keys(): 292 | # print(leaf_dict) 293 | if "left" in leaf_dict[k]: 294 | max_index += 1 295 | boundary_dict[k]["left"] = max_index 296 | tree_dict[k] = tree_dict.get(k, {}) 297 | tree_dict[k]["left"] = max_index 298 | tree_dict[max_index] = {} 299 | pred_val = expit(leaf_dict[k]["left"]["leaf_value"]) 300 | boundary_dict[max_index] = { 301 | "predict": np.array([1 - pred_val, pred_val]), 302 | "leaf_value": leaf_dict[k]["left"]["leaf_value"], 303 | } 304 | 305 | if "right" in leaf_dict[k]: 306 | max_index += 1 307 | boundary_dict[k]["right"] = max_index 308 | tree_dict[k] = tree_dict.get(k, {}) 309 | tree_dict[k]["right"] = max_index 310 | tree_dict[max_index] = {} 311 | # print(leaf_dict) 312 | pred_val = expit(leaf_dict[k]["right"]["leaf_value"]) 313 | boundary_dict[max_index] = { 314 | "predict": np.array([1 - pred_val, pred_val]), 315 | "leaf_value": leaf_dict[k]["right"]["leaf_value"], 316 | } 317 | 318 | return tree_dict, boundary_dict, leaf_dict 319 | 320 | 321 | def boundary_dict_mapping(boundary_dict, mode="raw"): 322 | weights = [] 323 | inter = [] 324 | pred = [] 325 | 326 | coef_mapping = [] 327 | for idx in sorted(list(boundary_dict.keys())): 328 | if "coef" in boundary_dict[idx]: 329 | weights.append(boundary_dict[idx]["coef"]) 330 | inter.append(boundary_dict[idx]["inter"]) 331 | coef_mapping.append(idx) 332 | else: 333 | if mode == "proba": 334 | pred.append(boundary_dict[idx]["predict"]) 335 | elif mode == "raw": 336 | pred.append(boundary_dict[idx]["leaf_value"]) 337 | else: 338 | raise Exception( 339 | "Expecting mode in ['proba', 'raw'], got {}".format(mode) 340 | ) 341 | 342 | weights = np.hstack(weights) 343 | inter = np.hstack(inter) 344 | pred = np.vstack(pred) 345 | return [(weights), (inter), (pred)], coef_mapping 346 | 347 | 348 | class BaseTree(object): 349 | def build_tree(self, depth=2): 350 | """ 351 | builds the adjancey list up to depth of 2 352 | """ 353 | total_nodes = np.sum([2 ** x for x in range(depth)]) 354 | nodes = list(range(total_nodes)) 355 | nodes_per_level = np.cumsum([2 ** x for x in range(depth - 1)]) 356 | nodes_level = [x.tolist() for x in np.array_split(nodes, nodes_per_level)] 357 | 358 | adj_list = dict((idx, {}) for idx in nodes) 359 | for fr in nodes_level[:-1]: 360 | for i in fr: 361 | i_list = adj_list.get(i, {}) 362 | # the connected nodes always follows this pattern 363 | i_list["left"] = i * 2 + 1 364 | i_list["right"] = i * 2 + 2 365 | adj_list[i] = i_list.copy() 366 | return adj_list 367 | 368 | def calculate_routes(self, adj_list=None): 369 | """ 370 | Calculates routes in GBM format. 371 | 372 | {0:{'left': 1, 'right': 2}, 1:{}, 2:{}} --> [([(0, 0)], 1), 373 | ([(0, 1)], 2)] 374 | {0:{'left': 1, 'right': 2}, 1:{'left': 3, 'right':4}, 375 | 2:{}, 3:{}, 4: {}} --> [([(0, 0), (1, 0)], 3), 376 | ([(0, 0), (1, 1)], 4), 377 | ([(0, 1)], 2)] 378 | """ 379 | if adj_list is None: 380 | adj_list = self.build_tree(3) 381 | 382 | def get_next(next_node, current_path): 383 | paths = adj_list[next_node] 384 | if len(paths) == 0: 385 | all_paths.append((current_path, next_node)) 386 | else: 387 | # do left... 388 | get_next(paths["left"], current_path + [(next_node, 0)]) 389 | get_next(paths["right"], current_path + [(next_node, 1)]) 390 | 391 | all_paths = [] 392 | get_next(0, []) 393 | return all_paths 394 | 395 | 396 | class Tree(BaseTree): 397 | """ 398 | Tree object to help abstract out some of the methods that are commonly used. 399 | Also used to help figure out how to maintain state around pruning and grafting nodes 400 | 401 | Usage: 402 | tt = Tree().graft() 403 | tt.plot() 404 | """ 405 | 406 | def __init__(self, depth=3, nodes=None, tree=None, previous_state={}): 407 | self.depth = depth 408 | self.nodes = ( 409 | nodes if nodes is not None else np.sum([2 ** x for x in range(self.depth)]) 410 | ) 411 | self.tree = tree if tree is not None else self.build_tree(self.depth) 412 | self.update() 413 | 414 | def update(self): 415 | self.update_route() 416 | self.update_nodes() 417 | 418 | def update_nodes(self): 419 | self.nodes = len([k for k, v in self.tree.items() if len(v) > 0]) 420 | 421 | def update_depth(self): 422 | all_routes = [len(r) for r, _ in self.route] 423 | self.depth = max(all_routes) 424 | 425 | def update_route(self): 426 | self.route = self.calculate_routes(self.tree) 427 | self.route.sort(key=lambda x: x[1]) 428 | 429 | 430 | flatten = lambda l: [item for sublist in l for item in sublist] 431 | 432 | 433 | def split_trees_by_classes(trees, n_classes): 434 | # https://github.com/BayesWitnesses/m2cgen/blob/master/m2cgen/assemblers/boosting.py 435 | # Splits are computed based on a comment 436 | # https://github.com/dmlc/xgboost/issues/1746#issuecomment-267400592. 437 | if n_classes == 2: 438 | return trees 439 | 440 | trees_by_classes = [[] for _ in range(n_classes)] 441 | for i in range(len(trees)): 442 | class_idx = i % n_classes 443 | trees_by_classes[class_idx].append(trees[i]) 444 | return trees_by_classes 445 | 446 | 447 | def calculate_boundary(X, boundary_value, column): 448 | """ 449 | We probably want to take the range of X[column] 450 | to determine approximately the correct value 451 | """ 452 | x_range = np.max(X[:, column]) - np.min(X[:, column]) 453 | coef_ = 1 454 | if np.abs(boundary_value) > 0.1 and np.abs(x_range) > 1: 455 | coef_ = x_range / 2 456 | inter = [coef_ * -boundary_value] 457 | else: 458 | coef_ = max(x_range / 2, 1.0) 459 | inter = [coef_ * -boundary_value] 460 | 461 | # print(coef_, inter) 462 | coef = ([coef_], [0], [column]) 463 | return coef, inter 464 | 465 | 466 | def boundary_weights(X, y, tree_dict, boundary_dict, output="dict"): 467 | """ 468 | This recursively goes down the nodes and returns the 469 | results by extending boundary dict(?) 470 | """ 471 | tree_temp = Tree(tree=tree_dict) 472 | 473 | unseen_nodes = [key for key, val in tree_dict.items() if len(val) > 0] 474 | 475 | route_path = [x[0] for x in tree_temp.route] 476 | route_path = [[x[0] for x in x_path] for x_path in route_path] 477 | route_path.sort() 478 | route_path = list(k for k, _ in itertools.groupby(route_path)) 479 | 480 | for node in unseen_nodes: 481 | coef, inter = calculate_boundary( 482 | X, boundary_dict[node]["value"], boundary_dict[node]["column"] 483 | ) 484 | boundary_dict[node]["coef"] = coef 485 | boundary_dict[node]["inter"] = inter 486 | 487 | if output == "dict": 488 | return boundary_dict 489 | 490 | weights = [] 491 | inter = [] 492 | pred = [] 493 | for idx in sorted(list(boundary_dict.keys())): 494 | if "coef" in boundary_dict[idx]: 495 | weights.append(boundary_dict[idx]["coef"]) 496 | inter.append(boundary_dict[idx]["inter"]) 497 | else: 498 | pred.append(boundary_dict[idx]["predict"]) 499 | 500 | # weights = np.hstack(weights) 501 | # inter = np.hstack(inter) 502 | # pred = np.vstack(pred) 503 | return [(weights), (inter), (pred)] 504 | 505 | 506 | # from tree build the sparse coef representation 507 | def tree_to_nnet(X, y, tree): 508 | """ 509 | Outputs the parameters for neural network 510 | """ 511 | t_d, b_d, _ = get_route(tree) 512 | tt = Tree(tree=t_d) 513 | boundary_dict = boundary_weights(X, y, t_d, b_d, output="dict") 514 | return boundary_dict, tt.route 515 | 516 | 517 | def boundary_dict_mapping(X, boundary_dict, mode="raw"): 518 | num_cols = X.shape[1] 519 | weights = [] 520 | inter = [] 521 | pred = [] 522 | 523 | coef_mapping = [] 524 | for idx in sorted(list(boundary_dict.keys())): 525 | if "coef" in boundary_dict[idx]: 526 | data, row, col = boundary_dict[idx]["coef"] 527 | weights_ = scipy.sparse.coo_matrix((data, (row, col)), shape=[1, num_cols]) 528 | weights.append(np.array(weights_.todense())) 529 | inter.append(np.array(boundary_dict[idx]["inter"])) 530 | coef_mapping.append(idx) 531 | else: 532 | if mode == "proba": 533 | pred.append(boundary_dict[idx]["predict"]) 534 | elif mode == "raw": 535 | pred.append(boundary_dict[idx]["leaf_value"]) 536 | else: 537 | raise Exception( 538 | "Expecting mode in ['proba', 'raw'], got {}".format(mode) 539 | ) 540 | return [(weights), (inter), (pred)], coef_mapping 541 | 542 | 543 | def sigmoid(z): 544 | z = np.clip(z, -32, 32) 545 | return 1.0 / (1 + np.exp(-z)) 546 | 547 | 548 | # softmax by axis... 549 | def gumbel_softmax(X, tau=1.0, eps=np.finfo(float).eps): 550 | # element-wise gumbel softmax 551 | # return np.exp(np.log(X+eps)/temp)/np.sum(np.exp(np.log(X+eps)/temp), axis=1)[:, np.newaxis] 552 | X_temp = np.clip(X / tau, -32, 32) 553 | return 1 / (1 + np.exp(X_temp)) 554 | 555 | 556 | def proba_to_alpha(proba=0.1): 557 | return proba / (1 - proba) 558 | 559 | 560 | def alpha_to_proba(alpha=0.9): 561 | return alpha / (alpha + 1) 562 | 563 | 564 | def old_route_to_new_route(route, num_nodes): 565 | route_array = [] 566 | try: 567 | for rout, _ in route: 568 | data = [] 569 | row = [] 570 | col = [] 571 | for node, direction in rout: 572 | data.append(1) 573 | row.append(0) 574 | # sprint(node) 575 | col.append(node if direction == 0 else node + num_nodes) 576 | route_array.append( 577 | scipy.sparse.coo_matrix( 578 | (data, (row, col)), shape=(1, num_nodes * 2) 579 | ).toarray() 580 | ) 581 | return np.vstack(route_array) 582 | except: 583 | return None 584 | 585 | 586 | def tree_to_param(X, y, tree): 587 | boundary, route = tree_to_nnet(X, y, tree) 588 | boundary_test = boundary_dict_mapping(X, boundary) 589 | params, _ = boundary_test 590 | coef_, inter_, leaf = params 591 | 592 | coef = np.vstack(coef_).T 593 | sparse_coef = scipy.sparse.coo_matrix(coef) 594 | inter = np.hstack(inter_) 595 | param = (sparse_coef.data, inter, np.array(leaf)) 596 | # return param, (coef != 0)*1.0, route 597 | return param, (coef != 0) * 1.0, old_route_to_new_route(route, param[0].shape[0]) 598 | 599 | 600 | def multi_tree_to_param(X, y, trees): 601 | param_route = [tree_to_param(X, y, tree) for tree in trees] 602 | all_param = flatten([x[0] for x in param_route]) 603 | all_sparse_info = [x[1] for x in param_route] 604 | all_route = [x[2] for x in param_route] 605 | return all_param, all_sparse_info, all_route 606 | 607 | 608 | def multiclass_trees_to_param(X, y, multitrees): 609 | param_list = [multi_tree_to_param(X, y, tree_x) for tree_x in multitrees] 610 | all_param = flatten([x[0] for x in param_list]) 611 | all_sparse_info = [x[1] for x in param_list] 612 | all_route = [x[2] for x in param_list] 613 | return all_param, all_sparse_info, all_route 614 | 615 | 616 | def gbm_gen( 617 | param=None, 618 | X=None, 619 | all_route=None, 620 | all_sparse_info=None, 621 | multi=False, 622 | num_classes=3, 623 | tau=0.01, 624 | eps=1e-11, 625 | ): 626 | def decision_tree(param, X, route, sparse_info, tau=tau, eps=eps): 627 | coef, inter, leaf = param 628 | # coef_sparse = scipy.sparse.coo_matrix((coef, (sparse_row_col[0], sparse_row_col[1])), shape=(X.shape[1], inter.shape[0])).todense() # this is just a hack 629 | 630 | # no sparsity 631 | coef_sparse = sparse_info * coef 632 | decisions = np.dot(X, np.hstack([coef_sparse, -coef_sparse])) + np.hstack( 633 | [inter, -inter] 634 | ) 635 | decision_soft = np.log(gumbel_softmax(decisions, tau=tau) + eps) 636 | route_probas = np.exp(np.dot(decision_soft, route.T)) 637 | proba = np.dot(route_probas, leaf) 638 | return proba 639 | 640 | # boosted_tree = reduce(lambda x, y: x+y, [decision_tree(X, y, tree) for tree in trees]) 641 | def boosted_tree( 642 | all_param, X, all_route=all_route, all_sparse_info=all_sparse_info 643 | ): 644 | # roll up params 645 | 646 | tree_pred = [] 647 | num_unpack = 3 648 | for idx in range(0, len(all_param), num_unpack): 649 | param = all_param[idx : idx + num_unpack] 650 | # print(len(param)) 651 | other_idx = idx // num_unpack 652 | # print(idx, other_idx) 653 | tree_pred.append( 654 | decision_tree( 655 | param, X, all_route[other_idx], all_sparse_info[other_idx] 656 | ) 657 | ) 658 | pred_out = reduce(lambda x, y: x + y, tree_pred) 659 | return pred_out 660 | 661 | def multi_tree( 662 | all_param, 663 | X, 664 | all_route=all_route, 665 | all_sparse_info=all_sparse_info, 666 | num_classes=num_classes, 667 | ): 668 | # this is for multiclass trees 669 | # we need to unpack for each class...and then run the boosted tree 670 | num_unpack = len(all_param) // num_classes 671 | class_prediction = [] 672 | for idx in range(0, len(all_param), num_unpack): 673 | other_idx = idx // num_unpack 674 | booster_param = all_param[idx : idx + num_unpack] 675 | booster_route = all_route[other_idx] 676 | booster_sparse_info = all_sparse_info[other_idx] 677 | class_prediction.append( 678 | boosted_tree(booster_param, X, booster_route, booster_sparse_info) 679 | ) 680 | class_prediction = [np.exp(x) for x in class_prediction] 681 | class_total = reduce(lambda x, y: x + y, class_prediction) 682 | 683 | return np.stack([x / class_total for x in class_prediction], axis=-1) 684 | 685 | if not multi: 686 | return boosted_tree 687 | else: 688 | return multi_tree 689 | 690 | 691 | def simple_callback(params, t, g): 692 | if (t + 1) % 1 == 0: 693 | print("Iteration {}".format(t + 1)) 694 | 695 | 696 | def update_tree_info(tree, split_index, threshold, split_feature=None): 697 | def update_level(tree, split_index, threshold, split_feature=None): 698 | if tree.get("split_index", None) == split_index: 699 | tree["threshold"] = threshold 700 | if split_feature is not None: 701 | tree["split_feature"] = split_feature 702 | return tree.copy() 703 | return tree.copy() 704 | 705 | def traverse_dict(tree, split_index, threshold, split_feature): 706 | tree = update_level(tree.copy(), split_index, threshold, split_feature) 707 | for k, v in tree.items(): 708 | if isinstance(v, dict): 709 | tree[k] = traverse_dict(v.copy(), split_index, threshold, split_feature) 710 | return tree 711 | 712 | tree_copy = tree.copy() 713 | return traverse_dict(tree_copy.copy(), split_index, threshold, split_feature) 714 | 715 | 716 | def update_leaf_info(tree, leaf_index, leaf_value): 717 | def update_level(tree, leaf_index, leaf_value): 718 | if tree.get("leaf_index", None) == leaf_index: 719 | tree["leaf_value"] = leaf_value 720 | return tree.copy() 721 | return tree.copy() 722 | 723 | def traverse_dict(tree, leaf_index, leaf_value): 724 | tree = update_level(tree.copy(), leaf_index, leaf_value) 725 | for k, v in tree.items(): 726 | if isinstance(v, dict): 727 | tree[k] = traverse_dict(v.copy(), leaf_index, leaf_value) 728 | return tree 729 | 730 | tree_copy = tree.copy() 731 | return traverse_dict(tree_copy.copy(), leaf_index, leaf_value) 732 | 733 | 734 | def param_to_tree(param, sparse_info, single_tree): 735 | # we want to iterate over model info and replace it so that we have something that can be compared 736 | # this only works for binary class for now 737 | # this is due to laziness, and difficulty to compare multiclass? 738 | coef, inter, leaf = param 739 | thresholds = (-inter / coef).tolist() 740 | leaves = (leaf).tolist() 741 | tree = copy.deepcopy(single_tree) 742 | 743 | # now figure out the columns 744 | feat_splits = [] 745 | for col_idx in range(sparse_info.shape[1]): 746 | split_indx = np.nonzero(sparse_info[:, col_idx])[0].tolist()[0] 747 | feat_splits.append(split_indx) 748 | 749 | # now combine it all based on model_info... 750 | for idx, threshold in enumerate(thresholds): 751 | # iterate through the tree and replace where 752 | # split_index == idx 753 | tree = update_tree_info(tree.copy(), idx, threshold, feat_splits[idx]) 754 | 755 | for idx, value in enumerate(leaves): 756 | # iterate through the tree and replace where 757 | # split_index == idx 758 | tree = update_leaf_info(tree.copy(), idx, value) 759 | return tree 760 | 761 | 762 | def main(): 763 | num_iters = 10 764 | X, y = make_classification( 765 | 100, 766 | n_classes=3, 767 | n_informative=3, 768 | n_redundant=0, 769 | n_clusters_per_class=2, 770 | n_features=20, 771 | ) 772 | 773 | model = lgb.LGBMClassifier( 774 | boosting_type="gbdt", objective="binary", n_estimators=3, random_state=1 775 | ) 776 | model.fit(X, y) 777 | 778 | model_dump = model.booster_.dump_model() 779 | trees_ = [m["tree_structure"] for m in model_dump["tree_info"]] 780 | 781 | # needs to infer from model.predict_proba? or labelbinarizer 782 | lb = LabelBinarizer() 783 | y_ohe = lb.fit_transform(y) 784 | nclass = y_ohe.shape[1] 785 | if nclass == 2: 786 | y_ohe = y 787 | 788 | if nclass > 2: 789 | trees = split_trees_by_classes(trees_, nclass) 790 | trees_params = multiclass_trees_to_param(X, y, trees) 791 | model_ = gbm_gen( 792 | trees_params[0], X, trees_params[2], trees_params[1], True, nclass 793 | ) 794 | 795 | def training_loss(weights, idx=0): 796 | # Training loss is the negative log-likelihood of the training labels. 797 | preds = model_(weights, X) 798 | loglik = -np.sum(np.log(preds + 1e-7) * y_ohe) 799 | 800 | return loglik 801 | 802 | else: 803 | trees_params = multi_tree_to_param(X, y, trees_) 804 | model_ = gbm_gen(trees_params[0], X, trees_params[2], trees_params[1], False, 2) 805 | 806 | def training_loss(weights, idx=0): 807 | # Training loss is the negative log-likelihood of the training labels. 808 | preds = sigmoid(model_(weights, X)) 809 | label_probabilities = preds * y + (1 - preds) * (1 - y) 810 | loglik = -np.sum(np.log(label_probabilities)) 811 | 812 | return loglik 813 | 814 | # training the model and outputting results 815 | training_gradient_fun = grad(training_loss) 816 | param_ = adam( 817 | training_gradient_fun, 818 | trees_params[0], 819 | callback=simple_callback, 820 | step_size=0.05, 821 | num_iters=num_iters, 822 | ) 823 | 824 | lgb_predict = model.predict_proba(X) 825 | if lgb_predict.shape[1] == 2: 826 | lgb_predict = lgb_predict[:, 1] 827 | 828 | results = { 829 | "train_base": roc_auc_score(y_ohe, model_(trees_params[0], X)), 830 | "train_nnet": roc_auc_score(y_ohe, model_(param_, X)), 831 | "train_lgb": roc_auc_score(y_ohe, lgb_predict), 832 | } 833 | return results 834 | 835 | 836 | def generate_batch(X, batch_size=32): 837 | num_batches = int(np.ceil(X.shape[0] / batch_size)) 838 | 839 | def batch_indices(iter): 840 | idx = iter % num_batches 841 | return slice(idx * batch_size, (idx + 1) * batch_size) 842 | 843 | return batch_indices 844 | 845 | 846 | if __name__ == "__main__": 847 | print(main()) 848 | -------------------------------------------------------------------------------- /notebooks/treegrad_binary.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "colab_type": "text", 7 | "id": "9y71WrINQxf6" 8 | }, 9 | "source": [ 10 | "This notebook is to convert *binary* boosted trees defined by LightGBM to TreeGrad so that it can be trained using GPUs" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": null, 16 | "metadata": { 17 | "colab": {}, 18 | "colab_type": "code", 19 | "id": "Csz6ciIdQuPk" 20 | }, 21 | "outputs": [], 22 | "source": [ 23 | "from __future__ import absolute_import, division, print_function, unicode_literals\n", 24 | "\n", 25 | "!pip install -q tensorflow==2.0.0-alpha0 treegrad\n", 26 | "import tensorflow as tf\n", 27 | "import numpy as np\n", 28 | "from sklearn.metrics import roc_auc_score, accuracy_score" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 19, 34 | "metadata": { 35 | "colab": { 36 | "base_uri": "https://localhost:8080/", 37 | "height": 72 38 | }, 39 | "colab_type": "code", 40 | "id": "x-w7p-6mQ6NL", 41 | "outputId": "8e09cab1-6b9a-49c1-ac21-3fae4fd23a38" 42 | }, 43 | "outputs": [ 44 | { 45 | "name": "stdout", 46 | "output_type": "stream", 47 | "text": [ 48 | "[0 1 1 0 0 0 1 0 0 1 1 1 1 1 0 1 0 0 0 0 1 0 1 0 1 0 1 1 1 1 1 0 1 1 1 1 0\n", 49 | " 1 0 1 0 1 1 1 0 1 0 1 1 1 0 1 1 1 1 1 0 1 0 0 1 1 0 0 0 0 0 1 1 0 0 1 1 1\n", 50 | " 0 0 0 0 1 1 0 1 0 0 1 1 1 0 0 0 1 1 1 0 0 1 1 0 1 0]\n" 51 | ] 52 | } 53 | ], 54 | "source": [ 55 | "from sklearn.datasets import make_classification\n", 56 | "import lightgbm as lgb\n", 57 | "X, y = make_classification(100, n_classes=2, n_informative=3, n_redundant=0, n_clusters_per_class=2, n_features=10)\n", 58 | "lgb_model = lgb.LGBMClassifier(n_estimators=3)\n", 59 | "lgb_model.fit(X, y)\n", 60 | "nclass = lgb_model.n_classes_\n", 61 | "print(lgb_model.predict(X))" 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": null, 67 | "metadata": { 68 | "colab": {}, 69 | "colab_type": "code", 70 | "id": "7w2arQdURN1h" 71 | }, 72 | "outputs": [], 73 | "source": [ 74 | "from treegrad.tree_utils import multi_tree_to_param\n", 75 | "\n", 76 | "model_dump = lgb_model.booster_.dump_model()\n", 77 | "trees_ = [m[\"tree_structure\"] for m in model_dump[\"tree_info\"]]\n", 78 | "trees_params = multi_tree_to_param(X, y, trees_)" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": null, 84 | "metadata": { 85 | "colab": {}, 86 | "colab_type": "code", 87 | "id": "4YnIAKBeRtC4" 88 | }, 89 | "outputs": [], 90 | "source": [ 91 | "weights = trees_params[0]\n", 92 | "sparse_info = trees_params[1]\n", 93 | "routes_list = trees_params[2]" 94 | ] 95 | }, 96 | { 97 | "cell_type": "markdown", 98 | "metadata": { 99 | "colab_type": "text", 100 | "id": "MnPlIxDCVpYf" 101 | }, 102 | "source": [ 103 | "------\n", 104 | "\n", 105 | "Now we recreate the node layer definitions" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": null, 111 | "metadata": { 112 | "colab": {}, 113 | "colab_type": "code", 114 | "id": "LJwRpBmUVo8a" 115 | }, 116 | "outputs": [], 117 | "source": [ 118 | "# to build tree model, it is just a three layer neural network - lets see how we go...\n", 119 | "from tensorflow.keras.constraints import max_norm, MinMaxNorm\n", 120 | "from tensorflow.keras import constraints\n", 121 | "\n", 122 | "'''\n", 123 | "class NodeLayer(tf.keras.layers.Layer):\n", 124 | " def __init__(self, num_nodes):\n", 125 | " super(NodeLayer, self).__init__()\n", 126 | " self.num_nodes = num_nodes\n", 127 | " \n", 128 | " def build(self, input_shape):\n", 129 | " # we may want a sparse one later...ignore it for now\n", 130 | " self.sparse = self.add_variable(\"sparse\", \n", 131 | " shape=[int(input_shape[-1]), \n", 132 | " self.num_nodes],\n", 133 | " trainable=False)\n", 134 | " self.kernel = self.add_variable(\"kernel\", regularizer='l1', constraint=max_norm(2), shape=[self.num_nodes,])\n", 135 | " self.bias = self.add_variable(\"bias\", regularizer='l1', constraint=max_norm(2), shape=[self.num_nodes,])\n", 136 | " \n", 137 | " def call(self, input):\n", 138 | " return tf.matmul(input, tf.concat([self.kernel * self.sparse, -self.kernel * self.sparse], 1)) + tf.concat([self.bias, -self.bias], 0)\n", 139 | "'''\n", 140 | "\n", 141 | "\n", 142 | "class NodeLayer(tf.keras.layers.Layer):\n", 143 | " def __init__(self, num_nodes):\n", 144 | " super(NodeLayer, self).__init__()\n", 145 | " self.num_nodes = num_nodes\n", 146 | " \n", 147 | " def build(self, input_shape):\n", 148 | " # we may want a sparse one later...ignore it for now\n", 149 | " self.kernel = self.add_variable(\"sparse\", \n", 150 | " shape=[int(input_shape[-1]), \n", 151 | " self.num_nodes],\n", 152 | " regularizer='l1',\n", 153 | " trainable=True)\n", 154 | " self.bias = self.add_variable(\"bias\", constraint=MinMaxNorm(1., 2.), shape=[self.num_nodes,])\n", 155 | " \n", 156 | " def call(self, input):\n", 157 | " return tf.matmul(input, tf.concat([self.kernel, -self.kernel], 1)) + tf.concat([self.bias, -self.bias], 0)\n", 158 | "\n", 159 | "\n", 160 | " \n", 161 | "def gumbel_softmax(x, tau=0.01):\n", 162 | " x_temp = tf.clip_by_value(x/tau, -32, 32)\n", 163 | " return 1/(1+tf.keras.backend.exp(-x))\n", 164 | "\n", 165 | "def activation1(x):\n", 166 | " return tf.keras.backend.log(tf.clip_by_value(gumbel_softmax(x),tf.keras.backend.epsilon(), 1.))\n", 167 | "\n", 168 | "def activation2(x):\n", 169 | " return tf.keras.backend.exp(tf.clip_by_value(x, -32, 0))\n", 170 | "\n" 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": null, 176 | "metadata": { 177 | "colab": {}, 178 | "colab_type": "code", 179 | "id": "Hxdcv076SPVf" 180 | }, 181 | "outputs": [], 182 | "source": [ 183 | "\n", 184 | "\n", 185 | "# now we have to construct the tree...\n", 186 | "inputs = tf.keras.Input(shape=(10,))\n", 187 | "decision_tree_layers = []\n", 188 | "for tree_idx in range(len(sparse_info)):\n", 189 | " num_nodes = sparse_info[tree_idx].shape[1]\n", 190 | " decision_tree_layers.append(\n", 191 | " tf.keras.Sequential([\n", 192 | " NodeLayer(num_nodes),\n", 193 | " tf.keras.layers.Lambda(activation1),\n", 194 | " tf.keras.layers.Dense(num_nodes+1, trainable=False, use_bias=False),\n", 195 | " #tf.keras.layers.Lambda(lambda x: tf.matmul(x, routes_list[tree_idx].T.astype(np.float32))),\n", 196 | " tf.keras.layers.Lambda(activation2),\n", 197 | " tf.keras.layers.Dense(1, use_bias=False)\n", 198 | " ], name='decision_tree_{}'.format(tree_idx))(inputs)\n", 199 | " )\n", 200 | "log_preds = tf.keras.layers.Add()(decision_tree_layers)\n", 201 | "log_preds = tf.keras.layers.Lambda(lambda x: tf.clip_by_value(x, -32, 32))(log_preds)\n", 202 | "preds = tf.keras.layers.Activation('sigmoid')(log_preds)\n", 203 | "model = tf.keras.Model(inputs=inputs, outputs=preds)\n" 204 | ] 205 | }, 206 | { 207 | "cell_type": "code", 208 | "execution_count": 24, 209 | "metadata": { 210 | "colab": { 211 | "base_uri": "https://localhost:8080/", 212 | "height": 458 213 | }, 214 | "colab_type": "code", 215 | "id": "Xmo0UsYwXikR", 216 | "outputId": "f23f4c18-8b30-4882-aadb-f38973841fa1" 217 | }, 218 | "outputs": [ 219 | { 220 | "name": "stdout", 221 | "output_type": "stream", 222 | "text": [ 223 | "Model: \"model_1\"\n", 224 | "__________________________________________________________________________________________________\n", 225 | "Layer (type) Output Shape Param # Connected to \n", 226 | "==================================================================================================\n", 227 | "input_2 (InputLayer) [(None, 10)] 0 \n", 228 | "__________________________________________________________________________________________________\n", 229 | "decision_tree_0 (Sequential) (None, 1) 61 input_2[0][0] \n", 230 | "__________________________________________________________________________________________________\n", 231 | "decision_tree_1 (Sequential) (None, 1) 61 input_2[0][0] \n", 232 | "__________________________________________________________________________________________________\n", 233 | "decision_tree_2 (Sequential) (None, 1) 61 input_2[0][0] \n", 234 | "__________________________________________________________________________________________________\n", 235 | "add_1 (Add) (None, 1) 0 decision_tree_0[0][0] \n", 236 | " decision_tree_1[0][0] \n", 237 | " decision_tree_2[0][0] \n", 238 | "__________________________________________________________________________________________________\n", 239 | "lambda_13 (Lambda) (None, 1) 0 add_1[0][0] \n", 240 | "__________________________________________________________________________________________________\n", 241 | "activation_1 (Activation) (None, 1) 0 lambda_13[0][0] \n", 242 | "==================================================================================================\n", 243 | "Total params: 183\n", 244 | "Trainable params: 111\n", 245 | "Non-trainable params: 72\n", 246 | "__________________________________________________________________________________________________\n" 247 | ] 248 | } 249 | ], 250 | "source": [ 251 | "model.summary()" 252 | ] 253 | }, 254 | { 255 | "cell_type": "code", 256 | "execution_count": 25, 257 | "metadata": { 258 | "colab": { 259 | "base_uri": "https://localhost:8080/", 260 | "height": 533 261 | }, 262 | "colab_type": "code", 263 | "id": "_DB-0CGdahfS", 264 | "outputId": "efaf8006-a2a2-449b-977e-590457245575" 265 | }, 266 | "outputs": [ 267 | { 268 | "data": { 269 | "image/png": "iVBORw0KGgoAAAANSUhEUgAABRsAAAIECAYAAACZnPlwAAAABmJLR0QA/wD/AP+gvaeTAAAgAElE\nQVR4nOzdeVgUZ7o28LtYuxtpwBUiIQGJKHEfjQKiMSjjMrjEqEw0Z0i+8XJJBlGToEbjhibqDHCM\nMplEY+bEXAZRRxIVNUQJQZEsxOUw0UEIKm6A7DTI9n5/eOjQgkhD00XD/bsu/rDqraqnqoBH7q5F\nEkIIEBEREREREREREbVOjJncFRAREREREREREVHHwLCRiIiIiIiIiIiIDIJhIxERERERERERERkE\nw0YiIiIiIiIiIiIyCAu5CyAiIjJls2bNkrsEIiIyIC8vLyxbtkzuMoiIiEwWr2wkIiJqhQMHDiA7\nO1vuMoha7Ny5czh37pzcZZiU7OxsHDhwQO4yqA2cO3cOycnJcpdBRERk0nhlIxERUSstXboUs2fP\nlrsMohapuzo3JiZG5kpMx/79+zFnzhwesw6IV6sTERG1Hq9sJCIiIiIiIiIiIoNg2EhERERERERE\nREQGwbCRiIiIiIiIiIiIDIJhIxERERERERERERkEw0YiIiIiIiIiIiIyCIaNRERERNRqx44dg52d\nHb766iu5S2mXFi5cCEmStF/z5s1rMCY+Ph4rV67EwYMH4ebmph37yiuvNBjr7+8PW1tbmJub49ln\nn0VqaqoxdqPVamtrERERAW9v70eOSUpKgo+PD1QqFZycnBAaGor79+9r53/55ZfYsmULampqdJY7\nfPiwzjHu3r17m+0HERERPRrDRiIiIiJqNSGE3CW0e127dkVcXByuXLmC3bt368xbu3Yttm/fjlWr\nVmHmzJnIzMxEnz590K1bN+zduxdHjx7VGX/y5EnExMQgICAAaWlpGDZsmDF3pUXS09MxZswYLFu2\nDBqNptExaWlp8Pf3h5+fH3Jzc3Ho0CF88sknWLRokXbM1KlToVAo4Ofnh8LCQu30adOmITs7G4mJ\niZg8eXKb7w8RERE1jmEjEREREbXalClTUFRUhICAALlLQXl5eZNXzslFqVRi4sSJ6Nu3L6ytrbXT\n33//fXzxxRfYv38/bG1tdZbZvn07zMzMsGDBAhQVFRm7ZIO5cOECVqxYgUWLFmHIkCGPHLdx40Y4\nOjpi/fr1sLGxgZeXF0JDQ/Hpp5/i8uXL2nFLlizB4MGDMXnyZFRXVwMAJElC79694evri2eeeabN\n94mIiIgax7CRiIiIiDqU3bt3IycnR+4ymuXq1atYs2YN1q9fD4VC0WC+t7c3QkJCcPPmTbz55psy\nVGgYgwcPxsGDBzF37lydoLW+6upqHD16FGPHjoUkSdrpkyZNghACsbGxOuPXrVuH8+fPIzIysk1r\nJyIiIv0wbCQiIiKiVklKSoKLiwskScKOHTsAAFFRUbCxsYFKpUJsbCwmTZoEtVoNZ2dn7Nu3T7vs\n9u3boVAo0LNnTyxcuBBOTk5QKBTw9vZGSkqKdlxwcDCsrKzg6Oionfb666/DxsYGkiQhLy8PABAS\nEoLly5cjIyMDkiTB3d0dAHD8+HGo1Wps2rTJGIek2bZv3w4hBKZOnfrIMWFhYejbty927dqF+Pj4\nJtcnhEB4eDj69+8Pa2trODg4YPr06TpXBTb33ABATU0N3n33Xbi4uECpVGLQoEGIjo5u3U4/QmZm\nJkpLS+Hi4qIzvU+fPgCAixcv6kx3cHDA2LFjERkZydv4iYiI2hGGjURERETUKqNHj8bZs2d1pi1e\nvBhLly5FeXk5bG1tER0djYyMDLi5uWH+/PmoqqoC8CBEDAoKgkajwZIlS5CVlYXU1FRUV1djwoQJ\nuHHjBoAHodzs2bN1trFz506sX79eZ1pkZCQCAgLQp08fCCFw9epVANC+TKS2trZNjkFLHT16FB4e\nHlCpVI8co1Qq8emnn8LMzAzz589HWVnZI8euW7cOK1euxDvvvIOcnBwkJibixo0b8PX1xd27dwE0\n/9wAwIoVK7B161ZERETg9u3bCAgIwMsvv4wff/zRcAfh/9y5cwcAGtxKrlAooFQqtfXXN3ToUNy8\neRMXLlwweD1ERETUMgwbiYiIiKhNeXt7Q61Wo0ePHggMDERZWRmuX7+uM8bCwkJ7NZ6npyeioqJQ\nUlKCPXv2GKSGKVOmoLi4GGvWrDHI+gyhrKwMv/76q/bKvaZ4eXlh6dKlyMrKwooVKxodU15ejvDw\ncLz44ouYN28e7OzsMHDgQHz44YfIy8vDRx991GCZps5NRUUFoqKiMGPGDMycORP29vZYvXo1LC0t\nDXZe6qt747S5uXmDeZaWligvL28wve7ZjJcuXTJ4PURERNQyDBuJiIiIyGisrKwAQOfqucYMHz4c\nKpVK5/bfjiYnJwdCiCavaqwvLCwMHh4e2LlzJ5KSkhrMT0tLQ2lpKYYPH64zfcSIEbCystK5Lb0x\nD5+bK1euQKPRYMCAAdoxSqUSjo6ObXJe6p5ZWffCl/oqKyuhVCobTK87do1d9UhERETyYNhIRERE\nRO2StbU1cnNz5S6jzVRUVADAI1+Y8jCFQoE9e/ZAkiS89tprDa70KywsBAB06dKlwbL29vYoKSnR\nq76627VXr14NSZK0X9euXYNGo9FrXc1R9zzO4uJinekajQYVFRVwcnJqsExdAFl3LImIiEh+DBuJ\niIiIqN2pqqpCYWEhnJ2d5S6lzdQFZXXPk2wOLy8vLFu2DOnp6di4caPOPHt7ewBoNFRsybHs0aMH\nACAiIgJCCJ2v5ORkvdbVHK6urrC1tcW1a9d0ptc9d3PQoEENlqmsrASARq96JCIiInkwbCQiIiKi\ndichIQFCCIwaNUo7zcLC4rG3X5uSnj17QpIkFBUV6bXcxo0b0a9fP/z888860wcMGIAuXbo0eHlL\nSkoKKisr8bvf/U6v7Tz55JNQKBQ4f/68Xsu1lIWFBSZPnozExESdF/nExcVBkqRG39hdd+x69epl\nlBqJiIjo8Rg2EhEREZHsamtrUVBQgOrqaly8eBEhISFwcXFBUFCQdoy7uzvy8/Nx+PBhVFVVITc3\nt8FVcADQtWtX3Lp1C1lZWSgpKUFVVRXi4uKgVquxadMmI+5V01QqFdzc3JCdna3XcnW3Uz/8IhWF\nQoHly5fj0KFD2Lt3L4qLi3Hp0iUsWrQITk5OWLBggd7befXVV7Fv3z5ERUWhuLgYNTU1yM7Oxu3b\ntwEAgYGB6NWrF1JTU/Va96OsWbMGd+/exdq1a1FWVobk5GRs27YNQUFB8PDwaDC+7tgNHDjQINsn\nIiKi1mPYSEREREStsmPHDowYMQIAEBoaimnTpiEqKgoREREAHtz+mpmZiY8//hjLly8HAEycOBHp\n6enadVRUVGDgwIFQKpXw9fVF3759cfr0aZ3nGS5evBjjxo3DH//4R3h4eGDjxo3a22e9vLxw48YN\nAMCiRYvQs2dPeHp6YvLkycjPzzfKcWiJKVOmIC0tTef5i//617/g7u6OjIwMjBgxAn/5y18aLDdq\n1CgsW7aswfS1a9di8+bN2LBhA7p3746xY8fi6aefRkJCAmxsbABAr3MTGRmJpUuXYsuWLejWrRuc\nnJwQEhKCgoICAA9uY87JyUFsbGyT+3nu3DmMHj0aTzzxBFJSUnDhwgU4OTnBx8cHiYmJ2nHPPvss\nTpw4gZMnT6Jbt26YOXMmXnvtNfz9739vdL0//PADevfu3egt1kRERCQPSQgh5C6CiIjIVEmShOjo\naMyePVvuUohaZNasWQCAmJgY2WpYuHAhYmJicO/ePdlq0Mf+/fsxZ84c6PPf6IULF+LIkSMNrmK8\nevUq+vfvjz179mDevHmGLrXN1dbW4vnnn0dQUBBee+01o2773r17cHZ2RlhYmDYorRMSEoK9e/ci\nLy9Pr3W2h58HIiIiExfDKxuJiIiISHb6vCTFVJWXl+PEiRNIT0/XvtjE3d0dGzZswIYNG1BaWipz\nhfqpqanB4cOHUVJSgsDAQKNvf926dRgyZAiCg4MBAEII3Lp1C0lJSdqXyhAREZHxMWwkIiIiIjKC\n/Px8TJw4EX379tW5CnDlypWYNWsWAgMD9X5ZjJwSEhJw8OBBxMXFQaVSGXXb4eHhOH/+PI4dOwZL\nS0sAQGxsLHr37g1fX18cPXrUqPUQERHRbxg2EhERGdGxY8dgZ2eHr776Su5SWmXDhg3w9PSEWq2G\ntbU13N3d8fbbb7foyqxz586hf//+MDMzgyRJ6NWrF8LCwtqg6pY7ePAg3NzcIEkSJEmCo6OjSd7y\n2h6tWrUKe/bsQVFREVxdXXHgwAG5S2oTH374IYQQ2q+9e/fqzN+0aROCg4Px3nvvyVSh/vz8/PD5\n55/D0dHRqNuNjY3F/fv3kZCQAAcHB+306dOn6xxjfW+hJiIiIsOwkLsAIiKizqSjPCr51KlTeOON\nNxAYGAhLS0vExcVh3rx5uHTpEuLi4vRa16hRo/DLL79g4sSJOHHiBK5cuQJ7e/s2qrxlZs6ciZkz\nZ8Ld3R15eXm4c+eO3CV1GJs3b8bmzZvlLqNd8Pf3h7+/v9xltHvTpk3DtGnT5C6DiIiIHoFXNhIR\nERnRlClTUFRUhICAALlLQXl5Oby9vVu0bJcuXbBgwQJ07doVtra2mD17NmbMmIHjx49r3whsylpz\nbIiIiIiIOjNe2UhERNRJ7d69Gzk5OS1a9siRIw2mde/eHQCg0WhaVVd70JpjQ0RERETUmfHKRiIi\nIiNJSkqCi4sLJEnCjh07AABRUVGwsbGBSqVCbGwsJk2aBLVaDWdnZ+zbt0+77Pbt26FQKNCzZ08s\nXLgQTk5OUCgU8Pb2RkpKinZccHAwrKysdJ6h9vrrr8PGxgaSJGmfYRYSEoLly5cjIyMDkiTB3d29\n1ft38+ZNKJVKuLq6aqcdP34carUamzZt0nt9pn5svvvuO3h6esLOzg4KhQIDBw7EiRMnAAB//vOf\ntc9/7NOnD37++WcAwKuvvgqVSgU7Ozt8+eWXAB688ffdd9+Fi4sLlEolBg0ahOjoaADA1q1boVKp\nYGtri5ycHCxfvhy9e/fGlStXWlQzEREREVFrMWwkIiIyktGjR+Ps2bM60xYvXoylS5eivLwctra2\niI6ORkZGBtzc3DB//nxUVVUBeBCUBQUFQaPRYMmSJcjKykJqaiqqq6sxYcIE7a3L27dvx+zZs3W2\nsXPnTqxfv15nWmRkJAICAtCnTx8IIXD16tVW7ZtGo8GpU6cwf/58WFlZaafX1NQAAGpra/Vep6kf\nm7t372LOnDnIysrCrVu30KVLF8ydOxcAsGvXLsycORPm5ub47rvvMHToUADAnj17MGPGDOzduxdT\np04FAKxYsQJbt25FREQEbt++jYCAALz88sv48ccf8fbbb2PZsmUoLS3F5s2b4erqilGjRnWYZ4MS\nERERkelh2EhERNROeHt7Q61Wo0ePHggMDERZWRmuX7+uM8bCwgL9+/eHtbU1PD09ERUVhZKSEuzZ\ns0emqh/YvHkznJycGrxFesqUKSguLsaaNWtatX5TPDYvvfQS1q5dCwcHB3Tt2hVTp07FvXv3kJub\nCwBYtGgRampqdOorLi7GDz/8gMmTJwMAKioqEBUVhRkzZmDmzJmwt7fH6tWrYWlp2WC/3n//fbzx\nxhs4ePAg+vXrZ7wdJSIiIiKqh89sJCIiaofqrg6su3rvUYYPHw6VSoXLly8bo6xGHTp0CPv378fJ\nkydha2vb5tszpWNTn6WlJYDfrvZ84YUX0LdvX3zyySdYtWoVJEnCF198gcDAQJibmwMArly5Ao1G\ngwEDBmjXo1Qq4ejoaND9OnDgACRJMtj6Ogses47ppZdekrsEIiIik8awkYiIyMRZW1trr5Yzti++\n+ALh4eFISEjAE088IUsNTZHz2Bw9ehTbtm1DWloaiouLG4SjkiRh4cKFWLZsGb755huMHz8e//M/\n/4PPP/9cO6asrAwAsHr1aqxevVpneScnJ4PVOmrUKCxdutRg6+vokpOTERkZqX12JnUcERERcpdA\nRERk8hg2EhERmbCqqioUFhbC2dnZ6Nv+4IMPcOLECZw6dQpdunQx+vYfx9jHJjExET/99BOWLl2K\n69evY8aMGXjxxRfxySef4IknnsAHH3yAt99+W2eZoKAgrFq1Crt27cKTTz4JtVqNp556Sju/R48e\nAB4EICEhIW1Wu7Ozc4PnWVLTIiMjecw6oJiYGLlLICIiMnkMG4mIiExYQkIChBAYNWqUdpqFhcVj\nbzFuDSEEVqxYgYKCAhw+fBgWFu3zvxPGPjY//fQTbGxsAACXLl1CVVUVFi9eDDc3NwCN33Lr4OCA\nOXPm4IsvvoCtrS3mz5+vM//JJ5+EQqHA+fPn26RmIiIiIiJD4wtiiIiITEhtbS0KCgpQXV2Nixcv\nIiQkBC4uLggKCtKOcXd3R35+Pg4fPoyqqirk5ubi2rVrDdbVtWtX3Lp1C1lZWSgpKWl2CPfvf/8b\nW7duxccffwxLS0tIkqTz9de//lU7Ni4uDmq1Gps2bWr1vj+OXMemqqoKd+/eRUJCgjZsdHFxAQDE\nx8ejoqIC6enpSElJaXT5RYsW4f79+zhy5AgCAgJ05ikUCrz66qvYt28foqKiUFxcjJqaGmRnZ+P2\n7dv6HiIiIiIiojbHsJGIiMhIduzYgREjRgAAQkNDMW3aNERFRWmfETZo0CBkZmbi448/xvLlywEA\nEydORHp6unYdFRUVGDhwIJRKJXx9fdG3b1+cPn0a1tbW2jGLFy/GuHHj8Mc//hEeHh7YuHEjlEol\nAMDLyws3btwA8CDk6tmzJzw9PTF58mTk5+c3az+EEK0/GPWkpKRgwIAB+PrrrwEA/fv3x+bNm9vV\nsdm9ezfc3d2RkZGBoqIinXDVysoKjo6O+PLLL6FSqQAAAwcORGhoKHbu3AknJye88847eP755wEA\no0eP1m4HAEaOHImhQ4fi1VdfbfQq0cjISCxduhRbtmxBt27d4OTkhJCQEBQUFGDr1q0IDw8HAPTt\n2xd79+41yDkhIiIiImopSRj6LwYiIqJORJIkREdHG+XZbQsXLkRMTAzu3bvX5tsyNaZ+bKZMmYId\nO3bA1dXV6NueNWsWAD6rTh/79+/HnDlzDB68k/z480BERNRqMbyykYiIyITU1NTIXUK7ZUrHpv5t\n2RcvXoRCoZAlaCQiIiIiMjSGjURERITLly83ePZiY1+BgYFyl9ohhIaGIj09Hf/5z3/w6quvYuPG\njXKXRG1s4cKFOj9L8+bNazAmPj4eK1euxMGDB+Hm5qYd+8orrzQY6+/vD1tbW5ibm+PZZ59Famqq\nMXaj1WpraxEREQFvb+9HjklKSoKPjw9UKhWcnJwQGhqK+/fva+d/+eWX2LJlS4MPGA4fPqxzjLt3\n795m+0FERESPxrCRiIjIBKxatQp79uxBUVERXF1dceDAAYOuv1+/fhBCPPbriy++MOh2DaGtj01b\nUKlU6NevH8aPH49169bB09NT7pLICLp27Yq4uDhcuXIFu3fv1pm3du1abN++HatWrcLMmTORmZmJ\nPn36oFu3bti7dy+OHj2qM/7kyZOIiYlBQEAA0tLSMGzYMGPuSoukp6djzJgxWLZsGTQaTaNj0tLS\n4O/vDz8/P+Tm5uLQoUP45JNPsGjRIu2YqVOnQqFQwM/PD4WFhdrp06ZNQ3Z2NhITEzF58uQ23x8i\nIiJqHMNGIiIiE7B582bcv38fQgj8+uuveOmll+Quqd0wxWMTFhaGmpoaXL9+vcEbqDuj8vLyJq90\nM5VtPI5SqcTEiRPRt29fnRcXvf/++/jiiy+wf/9+2Nra6iyzfft2mJmZYcGCBSgqKjJ2yQZz4cIF\nrFixAosWLcKQIUMeOW7jxo1wdHTE+vXrYWNjAy8vL4SGhuLTTz/F5cuXteOWLFmCwYMHY/Lkyaiu\nrgbw4Bm6vXv3hq+vL5555pk23yciIiJqHMNGIiIiIpLV7t27kZOTY/LbaImrV69izZo1WL9+PRQK\nRYP53t7eCAkJwc2bN/Hmm2/KUKFhDB48GAcPHsTcuXN1gtb6qqurcfToUYwdOxaSJGmnT5o0CUII\nxMbG6oxft24dzp8/j8jIyDatnYiIiPTDsJGIiIiI9CKEQHh4OPr37w9ra2s4ODhg+vTpOleeBQcH\nw8rKCo6Ojtppr7/+OmxsbCBJEvLy8gAAISEhWL58OTIyMiBJEtzd3bF9+3YoFAr07NkTCxcuhJOT\nExQKBby9vZGSkmKQbQDA8ePHoVarsWnTpjY9Xk3Zvn07hBCYOnXqI8eEhYWhb9++2LVrF+Lj45tc\nX3POTVRUFGxsbKBSqRAbG4tJkyZBrVbD2dkZ+/bt01lfTU0N3n33Xbi4uECpVGLQoEGIjo5u3U4/\nQmZmJkpLS+Hi4qIzvU+fPgAevEypPgcHB4wdOxaRkZF8MzgREVE7wrCRiIiIiPSybt06rFy5Eu+8\n8w5ycnKQmJiIGzduwNfXF3fv3gXwIESbPXu2znI7d+7E+vXrdaZFRkYiICAAffr0gRACV69eRXBw\nMIKCgqDRaLBkyRJkZWUhNTUV1dXVmDBhAm7cuNHqbQC/vcG8trbWcAdHT0ePHoWHhwdUKtUjxyiV\nSnz66acwMzPD/PnzUVZW9sixzTk3ixcvxtKlS1FeXg5bW1tER0cjIyMDbm5umD9/vs7b0lesWIGt\nW7ciIiICt2/fRkBAAF5++WX8+OOPhjsI/+fOnTsA0OBWcoVCAaVSqa2/vqFDh+LmzZu4cOGCwesh\nIiKilmHYSERERETNVl5ejvDwcLz44ouYN28e7OzsMHDgQHz44YfIy8vDRx99ZLBtWVhYaK/Q8/T0\nRFRUFEpKSrBnzx6DrH/KlCkoLi7GmjVrDLI+fZWVleHXX3/VXrnXFC8vLyxduhRZWVlYsWJFo2Na\ncm68vb2hVqvRo0cPBAYGoqysDNevXwcAVFRUICoqCjNmzMDMmTNhb2+P1atXw9LS0mDnoL66N06b\nm5s3mGdpaYny8vIG0+uezXjp0iWD10NEREQtw7CRiIiIiJotLS0NpaWlGD58uM70ESNGwMrKSuc2\nZ0MbPnw4VCqVzi3BpiwnJwdCiCavaqwvLCwMHh4e2LlzJ5KSkhrMb+25sbKyAgDtlY1XrlyBRqPB\ngAEDtGOUSiUcHR3b5BzUPbOy7oUv9VVWVkKpVDaYXnfsGrvqkYiIiOTBsJGIiIiImq2wsBAA0KVL\nlwbz7O3tUVJS0qbbt7a2Rm5ubptuw1gqKioA4JEvTHmYQqHAnj17IEkSXnvttQZX+hn63NTdrr16\n9WpIkqT9unbtGjQajV7rao66Z28WFxfrTNdoNKioqICTk1ODZeoCyLpjSURERPJj2EhEREREzWZv\nbw8AjQZXhYWFcHZ2brNtV1VVtfk2jKkuKKt7dmRzeHl5YdmyZUhPT8fGjRt15hn63PTo0QMAEBER\nASGEzldycrJe62oOV1dX2Nra4tq1azrT656xOWjQoAbLVFZWAkCjVz0SERGRPBg2EhEREVGzDRgw\nAF26dGnwgpCUlBRUVlbid7/7nXaahYWFzstGWishIQFCCIwaNarNtmFMPXv2hCRJKCoq0mu5jRs3\nol+/fvj55591putzbprjySefhEKhwPnz5/VarqUsLCwwefJkJCYm6ry0Jy4uDpIkNfrG7rpj16tX\nL6PUSERERI/HsJGIiIiImk2hUGD58uU4dOgQ9u7di+LiYly6dAmLFi2Ck5MTFixYoB3r7u6O/Px8\nHD58GFVVVcjNzW1w1RoAdO3aFbdu3UJWVhZKSkq04WFtbS0KCgpQXV2NixcvIiQkBC4uLggKCjLI\nNuLi4qBWq7Fp0ybDH6hmUKlUcHNzQ3Z2tl7L1d1O/fCLVPQ5N83dzquvvop9+/YhKioKxcXFqKmp\nQXZ2Nm7fvg0ACAwMRK9evZCamqrXuh9lzZo1uHv3LtauXYuysjIkJydj27ZtCAoKgoeHR4Pxdcdu\n4MCBBtk+ERERtR7DRiIiIiLSy9q1a7F582Zs2LAB3bt3x9ixY/H0008jISEBNjY22nGLFy/GuHHj\n8Mc//hEeHh7YuHGj9nZXLy8v3LhxAwCwaNEi9OzZE56enpg8eTLy8/MBPHgO38CBA6FUKuHr64u+\nffvi9OnTOs84bO025DZlyhSkpaXpPH/xX//6F9zd3ZGRkYERI0bgL3/5S4PlRo0ahWXLljWY3pxz\nExUVhYiICAAPbk3OzMzExx9/jOXLlwMAJk6ciPT0dABAZGQkli5dii1btqBbt25wcnJCSEgICgoK\nADy4jTknJwexsbFN7ue5c+cwevRoPPHEE0hJScGFCxfg5OQEHx8fJCYmasc9++yzOHHiBE6ePIlu\n3bph5syZeO211/D3v/+90fX+8MMP6N27d6O3WBMREZE8JCGEkLsIIiIiUyVJEqKjozF79my5SyFq\nkVmzZgEAYmJiZK5E18KFCxETE4N79+7JXUoD+/fvx5w5c6DPf6MXLlyII0eONLiK8erVq+jfvz/2\n7NmDefPmGbrUNldbW4vnn38eQUFBeO2114y67Xv37sHZ2RlhYWHaoLROSEgI9u7di7y8PL3W2V5/\nHoiIiExIDK9sJCIiIqJ2SZ8Xp5iC8vJynDhxAunp6doXm7i7u2PDhg3YsGEDSktLZa5QPzU1NTh8\n+DBKSkoQGBho9O2vW7cOQ4YMQXBwMABACIFbt24hKSlJ+1IZIiIiMj6GjURERERERpCfn4+JEyei\nb9++OlcBrly5ErNmzUJgYKDeL4uRU0JCAg4ePIi4uDioVCqjbjs8PBznz5/HsWPHYGlpCQCIjY1F\n79694evri6NHjxq1HiIiIvoNw0YiIiIialdWrVqFPXv2oKioCK6urjhw4IDcJbXahx9+CCGE9mvv\n3r068zdt2oTg4GC89957MlWoPz8/P3z++edwdHQ06nZjY2Nx//59JCQkwMHBQTt9+vTpOsdY31uo\niYiIyDAs5C6AiIiIiKi+zZs3Y/PmzXKXYXT+/v7w9/eXu4x2b9q0aZg2bXYaroUAACAASURBVJrc\nZRAREdEj8MpGIiIiIiIiIiIiMgiGjURERERERERERGQQDBuJiIiIiIiIiIjIIBg2EhERERERERER\nkUHwBTFEREStlJycLHcJssnJyYGNjQ1sbGzkLoVaKDs7GwCwf/9+mSsxHXU/8x3xmN29exdqtRpK\npVLuUmSRnZ0NZ2dnucsgIiIyaZIQQshdBBERkamSJEnuEoiIyIBeeuklxMTEyF0GERGRqYrhlY1E\nRESt0NE/s7t9+zaSkpIQHx+PpKQk/Pvf/4aFhQUGDx6M8ePHY/z48Rg9ejQUCoXcpVIb2r9/P+bM\nmdPhv98JyM3Nxblz53DmzBnEx8cjNTUVZmZmGDJkCHx8fDB69GhMmDAB9vb2cpdKRERE7RSvbCQi\nIiKtO3fu4LvvvmsyXPTx8em0t1h2VgwbO6+cnBx8++23SEpKwpkzZxoNH/39/WFnZyd3qURERNQ+\nxDBsJCIi6sTu3r2LxMREbZDw008/6YSLPj4+GDt2LNRqtdylkowYNlKdh39n1A8f635njBkzhuEj\nERFR58WwkYiIqDNpTlDAcJEexrCRHqWx3ynm5ub8wIKIiKjzYthIRETUkfEWSDIEho3UXI97FIOP\njw+ef/552Nrayl0qERERtQ2GjURERB1JTk4OUlJS+HIHMiiGjdRSfA4sERFRp8OwkYiIyJTxzbFk\nDAwbyVDqv+E+Pj4emZmZfMM9ERFRx8KwkYiIyJQUFxfj+++/1/6h/vPPP0OSJHh4eGD06NHaP9Yd\nHBzkLpU6EIaN1FZu3bql/bDk66+/xq+//srwkYiIyLQxbCQiImrPSkpKkJKSohMuAkC/fv204aKf\nnx+6du0qc6XUkTFsJGOpHz6ePHkSWVlZUCqVGDZsmPZ3nq+vL6ytreUulYiIiBrHsJGIiKg9aSxc\nrK2thZubm/YqH4aLZGwMG0ku9cPHEydO4Nq1a1CpVBg6dCjDRyIiovaJYSMREZGcSktLce7cuSbD\nxRdeeAHdunWTu1TqxBg2UnuRmZmJpKQknDlzhuEjERFR+8SwkYiIyJjqh4tJSUn4/vvvUVVVxXCR\n2jWGjdRe1Q8fjx8/juvXr0OlUsHb21v7kqwxY8bAyspK7lKJiIg6C4aNREREbamsrAzJyclNhovj\nxo1D9+7d5S6V6JEYNpKpyMzM1P6+TUhIwI0bN2BjYwMvLy+Gj0RERMbBsJGIiMiQ6sLFuittEhMT\nUVlZqQ0XfXx84Ofnh969e8tdKlGzMWwkU1U/fDx9+jSys7O14WPd7+SRI0fC0tJS7lKJiIg6CoaN\nRERErdGccPGFF16As7Oz3KUStRjDRuoo6oePp06dws2bN9GlSxeMGjWK4SMREZFhMGwkIiLSh0aj\nwdmzZxsNF+tu0Zs0aRKefPJJuUslMhiGjdRR1YWP8fHxOH36NPLy8hg+EhERtQ7DRiIioqZoNBqk\npqbizJkziI+Px3fffYf79+/rhIsTJ06Ei4uL3KUStRmGjdRZ1A8fT506hXv37umEj+PHj8fQoUNh\nZmYmd6lERETtFcNGIiKi+srLy/HTTz81GS7+/ve/x1NPPSV3qURGw7CROqvGwkdbW1uMHDmS4SMR\nEVHjGDYSEVHnVlVVhYsXL2r/mExKSkJFRQWcnJwwevRojB8/Hv7+/nj66aflLpVINgwbiYDa2lr8\n8ssv2g+jvvnmG+Tn5zN8JCIi0sWwkYiIOpfq6mpcuHCB4SKRHhg2EjX0cPgYHx+PgoIC9OjRAyNH\njtT2lGHDhkGSJLnLJSIiMhaGjURE1LE1J1ycMGECXF1d5S6VqN1i2Ej0eDU1Nbh8+TLDRyIi6uwY\nNhIRUcfycLh45swZlJeX64SLPj4+ePbZZ+UulchkMGwk0l9NTQ3Onz+PpKQknDlzBl9//TUKCwvR\ns2dPPPfccwwfiYioo2LYSEREpq1+uJiUlITExEQUFxfD0dERvr6+DBeJDIBhI1HrNRU+jh07VvsS\nMoaPRERk4hg2EhGRaan7Y43hIpHxMGwkMryHw8eTJ0+iqKgIvXr1wpgxYxg+EhGRqWLYSERE7Rv/\nGCOSH8NGorbHD9OIiKiDYNhIRETtC28zI2p/GDYSGR8fE0JERCaKYSMREcmLD9Anav8YNhLJjy9A\nIyIiE8GwkYiIjKumpgaXL1/GmTNntH8wFRQUoEePHhg5ciTDRaJ2iGEjUfvzcPiYlJSEiooKnfBx\nwoQJcHV1lbtUIiLqXBg2EhFR26qtrcUvv/zCcJHIhDFsJGr/mhM++vv74+mnn5a7VCIi6tgYNhIR\nkeFlZmZq/9j55ptvkJ+fD1tbW4wcORLjx4/H+PHjMXToUJiZmcldKhE1A8NGItNTVVWFixcvMnwk\nIiJjY9hIREStVz9cPHXqFO7du8dwkagDYdhIZPrKy8vx008/ae80+O6773D//n24ublpX772+9//\nHk899ZTcpRIRkWlj2EhERPprLFzs0qULRo0axXCRqANi2EjU8Wg0GqSmpjYZPk6cOBEuLi5yl0pE\nRKaFYSMRET1e/XDx9OnTyMvL0wkXfXx8MHLkSFhaWspdKhG1AYaNRB2fRqPB2bNnkZSUhDNnziAx\nMRGVlZU64eOkSZPw5JNPyl0qERG1bwwbiYioobpwMSkpCadOncLNmzcZLhJ1YgwbiTqfsrIyJCcn\nNxo+1v1f4IUXXoCzs7PcpRIRUfvCsJGIiHTDxdOnTyM7Oxs2Njbw8vJiuEjUyWRnZ+NPf/oTampq\ntNMKCgrw66+/YtiwYTpjPTw88I9//MPYJRKRDJoTPvr5+aF3795yl0pERPJi2EhE1BllZmZq/1iI\ni4vDjRs3tOFi3a1SY8aMgZWVldylEpEM3N3dkZGR8dhxa9aswYYNG4xQERG1N3XhY92Hld9//z2q\nqqq04eP48eMxbtw4dO/eXe5SiYjIuBg2EhF1BvXDxePHj+P69etQqVTw9vZmuEhEDWzYsAFhYWGo\nqqpqclxaWho8PT2NVBURtWelpaU4d+5ck+HjCy+8gG7dusldKhERtS2GjUREHVH9cPHEiRO4du0a\nVCoVhg4ditGjR2P8+PHw9fWFtbW13KUSUTuUkZGBZ555pslnND777LP43//9XyNWRUSmpH74GB8f\nj59//hm1tbUMH4mIOj6GjUREHcGtW7dw5swZxMfHM1wkIoMYMmQILl682GjgaGlpiU2bNuGtt96S\noTIiMkUlJSVISUlpMnz08/ND165d5S6ViIhah2EjEZExFBUVwc7OzmDrqx8unjx5EllZWVAqlRg2\nbBjDRSIyiPDwcISGhqK6urrBPEmSkJmZiaefftr4hRFRh9BY+AgA/fr10/5fxpDhY0VFBSRJ4v+N\niIjaHsNGIqK2VFtbi507d+L999/H9evXYW5u3qL11A8Xv/76a/z666+wsLDA4MGDtVcDjB49GgqF\nwsB7QESd1e3bt+Hs7Iza2lqd6WZmZhg5ciTOnj0rU2VE1BE1Fj5KkgQPDw9t+Dh+/Hg4ODi0aP1x\ncXFYvnw5PvnkE4waNcrA1RMRUT0MG4mI2kp6ejr+9Kc/4dy5cxBC4IcffsDw4cObtezt27eRlJSk\n/Q93ZmYmw0UiMroxY8bgzJkzOoGjhYUFPvjgAyxcuFDGyoioo8vNzcW5c+e0H7ampqbCzMwMQ4YM\n0b7cbsKECbC3t2/W+kJDQ7Ft2zZIkoSQkBCEhYVBqVS28V4QEXVKDBuJiAyttrYWu3btwpIlS1BT\nU4OqqipYWlpi8+bNePPNNxtd5s6dO/juu++0b3D897//3SBc9PHx4X+KicioPv74YyxcuFAnbDQ3\nN8etW7fQs2dPGSsjos4mJycHKSkpLQ4fhw0bpr1V28LCAk5OTvjnP/+JcePGGXM3iIg6A4aNRESG\nlJGRgT/96U9ITk7W+ePczMwM/v7+iIuLA/D4cNHHxwdjx46FWq2Wa1eIiFBQUIBevXqhqqoKwIOg\ncfz48Th+/LjMlRFRZ5eTk4Nvv/0WSUlJOHPmTKPho7+/P+zs7FBSUgIHBwfU1NRolzc3N0dtbS3+\n/Oc/Izw8HF26dJFxb4iIOhSGjUREhlBdXY2dO3ciNDQUtbW12j/M67O2tsa8efOQmJiI9PR0WFpa\n4rnnnsPzzz+P559/Ht7e3lCpVDJUT0T0aFOmTMGJEydQU1MDMzMz/POf/8S8efPkLouISMft27eR\nkJCg/frPf/4DS0tLjBgxAs7Ozti/f3+jy1laWqJXr1749NNP4efnZ+SqiYg6JIaNREStlZaWhlde\neQUXL17U+cS8MYMGDcIf/vAHjB07Fj4+PrCxsTFSlURELbNv3z7MnTsXQghYW1sjLy+PVwARUbt3\n69YtnD59GgkJCTh8+DCKi4tRWVnZ6Nj6Vzn+7W9/g62trZGrJSLqUBg2EhG1VHV1Nf72t79hzZo1\nEEKgurq6yfGWlpYICwvD22+/baQKiYhar6ysDN27d0dFRQVeeuklxMTEyF0SEZFehgwZggsXLjx2\nnKWlJRwcHLBr1y4EBAQYoTIiog4pxkzuCoiITNH58+cxZMgQvPPOO6iqqnps0Ag8CCfj4+ONUB0R\nkeHY2Nhg+vTpAMDbp4nI5BQVFeHSpUvNGltVVYW8vDxMnToV/+///T8UFRW1cXVERB1Tgysb9+/f\njzlz5shVDxERdXJtdcE9+xsREcmJ/Y2IiDqiRvpbjMWjBkdHR7dtNUSkt+TkZERGRvLnU09z5sxB\nSEgIvLy8Wr2u+/fvIycnB8XFxSgoKEBxcTGKiopQWFiIgoIC5Ofno6ioCKWlpTpvo5YkCWZmZqip\nqUFYWBieeeaZVtfS0dR9f7c1/vwQ6a+mpgbR0dF4+eWX22T97G8tY8j+Rm2H/U1en332GY4cOQJz\nc/MGz9a2tLSEWq2Gvb09HBwc0LVrV6jVatjZ2cHBwUE7r1evXjJVT6aO/a1l2N9MQ1P97ZFh4+zZ\ns9usICJqucjISP586mnOnDnw8vIy+nHLy8tDTk4OcnJycOvWLeTm5iInJwc+Pj54/vnnjVqLqTDG\nH2P8+SFqmRkzZkChULTZ+tnf9CdXfyP9sb/J586dOxg+fDh69eoFJycn9OjRA46OjnB0dIRKpZK7\nPOoE2N/0x/5mOvQOG4mIqHW6d++O7t27w9PTU+5SiIharS2DRiKithIcHCx3CUREnQ5fEENERERE\nREREREQGwbCRiIiIiIiIiIiIDIJhIxERERERERERERkEw0YiIiIiIiIiIiIyCIaNRJ3QsWPHYGdn\nh6+++kruUtq9+Ph4rFy5EgcPHoSbmxskSYIkSXjllVcajPX394etrS3Mzc3x7LPPIjU1VYaK9Vdb\nW4uIiAh4e3s/ckxSUhJ8fHygUqng5OSE0NBQ3L9/Xzv/yy+/xJYtW1BTU2OMkomIGsX+1nzsbw+w\nvxGRKWB/az72twfk7m8MG4k6ISGE3CWYhLVr12L79u1YtWoVZs6ciczMTPTp0wfdunXD3r17cfTo\nUZ3xJ0+eRExMDAICApCWloZhw4bJVHnzpaenY8yYMVi2bBk0Gk2jY9LS0uDv7w8/Pz/k5ubi0KFD\n+OSTT7Bo0SLtmKlTp0KhUMDPzw+FhYXGKp+ISAf7W/Owvz3A/kZEpoL9rXnY3x5oD/2NYSNRJzRl\nyhQUFRUhICBA7lJQXl7e5Ccycnn//ffxxRdfYP/+/bC1tdWZt337dpiZmWHBggUoKiqSqcLWu3Dh\nAlasWIFFixZhyJAhjxy3ceNGODo6Yv369bCxsYGXlxdCQ0Px6aef4vLly9pxS5YsweDBgzF58mRU\nV1cbYxeIiHSwvz0e+9tv2N+IyFSwvz0e+9tv2kN/Y9hIRLLavXs3cnJy5C5Dx9WrV7FmzRqsX78e\nCoWiwXxvb2+EhITg5s2bePPNN2Wo0DAGDx6MgwcPYu7cubC2tm50THV1NY4ePYqxY8dCkiTt9EmT\nJkEIgdjYWJ3x69atw/nz5xEZGdmmtRMRtXfsb/JhfyMiajvsb/Ixpf7GsJGok0lKSoKLiwskScKO\nHTsAAFFRUbCxsYFKpUJsbCwmTZoEtVoNZ2dn7Nu3T7vs9u3boVAo0LNnTyxcuBBOTk5QKBTw9vZG\nSkqKdlxwcDCsrKzg6Oionfb666/DxsYGkiQhLy8PABASEoLly5cjIyMDkiTB3d0dAHD8+HGo1Wps\n2rTJGIekge3bt0MIgalTpz5yTFhYGPr27Ytdu3YhPj6+yfUJIRAeHo7+/fvD2toaDg4OmD59us6n\nSs09BwBQU1ODd999Fy4uLlAqlRg0aBCio6Nbt9OPkJmZidLSUri4uOhM79OnDwDg4sWLOtMdHBww\nduxYREZG8nYPIjIq9rfHY3/7DfsbEZkK9rfHY3/7TXvpbwwbiTqZ0aNH4+zZszrTFi9ejKVLl6K8\nvBy2traIjo5GRkYG3NzcMH/+fFRVVQF40ISCgoKg0WiwZMkSZGVlITU1FdXV1ZgwYQJu3LgB4MEv\n+9mzZ+tsY+fOnVi/fr3OtMjISAQEBKBPnz4QQuDq1asAoH1IbW1tbZscg8c5evQoPDw8oFKpHjlG\nqVTi008/hZmZGebPn4+ysrJHjl23bh1WrlyJd955Bzk5OUhMTMSNGzfg6+uLu3fvAmj+OQCAFStW\nYOvWrYiIiMDt27cREBCAl19+GT/++KPhDsL/uXPnDgA0uBVBoVBAqVRq669v6NChuHnzJi5cuGDw\neoiIHoX97fHY337D/kZEpoL97fHY337TXvobw0Yi0uHt7Q21Wo0ePXogMDAQZWVluH79us4YCwsL\n7ac8np6eiIqKQklJCfbs2WOQGqZMmYLi4mKsWbPGIOvTR1lZGX799VftJz9N8fLywtKlS5GVlYUV\nK1Y0Oqa8vBzh4eF48cUXMW/ePNjZ2WHgwIH48MMPkZeXh48++qjBMk2dg4qKCkRFRWHGjBmYOXMm\n7O3tsXr1alhaWhrs+NdX98Yyc3PzBvMsLS1RXl7eYPozzzwDALh06ZLB6yEiain2N/a3+tjfiKij\nYH9jf6uvvfQ3ho1E9EhWVlYAoPOpTGOGDx8OlUqlc1m5qcrJyYEQoslPxeoLCwuDh4cHdu7ciaSk\npAbz09LSUFpaiuHDh+tMHzFiBKysrHRuX2jMw+fgypUr0Gg0GDBggHaMUqmEo6Njmxz/umeeNPbA\n4MrKSiiVygbT645dY5+aERG1B+xvj8f+xv5GRKaH/e3x2N+M098YNhKRQVhbWyM3N1fuMlqtoqIC\nAB75wN2HKRQK7NmzB5Ik4bXXXmvwSVFhYSEAoEuXLg2Wtbe3R0lJiV711V3uv3r1akiSpP26du0a\nNBqNXutqjrrnthQXF+tM12g0qKiogJOTU4Nl6hpY3bEkIjJl7G/sb3XY34ioI2F/Y3+r0xb9jWEj\nEbVaVVUVCgsL4ezsLHcprVb3i7buuSPN4eXlhWXLliE9PR0bN27UmWdvbw8AjTallhyzHj16AAAi\nIiIghND5Sk5O1mtdzeHq6gpbW1tcu3ZNZ3rd81kGDRrUYJnKykoAaPRTMyIiU8L+xv5WH/sbEXUU\n7G/sb/W1RX9j2EhErZaQkAAhBEaNGqWdZmFh8djL99ujnj17QpIkFBUV6bXcxo0b0a9fP/z88886\n0wcMGIAuXbo0ePhvSkoKKisr8bvf/U6v7Tz55JNQKBQ4f/68Xsu1lIWFBSZPnozExESdBz7HxcVB\nkqRG3/hWd+x69epllBqJiNoK+xv7W33sb0TUUbC/sb/V1xb9jWEjEemttrYWBQUFqK6uxsWLFxES\nEgIXFxcEBQVpx7i7uyM/Px+HDx9GVVUVcnNzG3y6AgBdu3bFrVu3kJWVhZKSElRVVSEuLg5qtRqb\nNm0y4l49oFKp4ObmhuzsbL2Wq7sc/+EH8SoUCixfvhyHDh3C3r17UVxcjEuXLmHRokVwcnLCggUL\n9N7Oq6++in379iEqKgrFxcWoqalBdnY2bt++DQAIDAxEr169kJqaqte6H2XNmjW4e/cu1q5di7Ky\nMiQnJ2Pbtm0ICgqCh4dHg/F1x27gwIEG2T4RkbGwvzXE/vYb9jciMlXsbw2xv/2mTfqbeEh0dLRo\nZDIRtQOG+Pn84IMPhKOjowAgVCqVmDp1qti5c6dQqVQCgHjmmWdERkaG+Oijj4RarRYAxFNPPSX+\n85//CCGEWLBggbC0tBS9e/cWFhYWQq1Wi+nTp4uMjAyd7dy7d0+MGzdOKBQK4erqKv7yl7+It956\nSwAQ7u7u4vr160IIIVJTU8VTTz0llEqlGD16tLhz5444duyYsLW1FWFhYa3a1zoARHR0dLPHBwcH\nC0tLS6HRaLTTDh06JPr06SMAiO7du4s33nij0WXfeustMW3aNJ1ptbW1Ytu2beKZZ54RlpaWwsHB\nQcyYMUNcuXJFO0afc3D//n0RGhoqXFxchIWFhejRo4eYOXOmSEtLE0IIMWPGDAFAvPvuu03uZ3Jy\nsvDx8RFOTk4CgAAgHB0dhbe3t/j22291xn777bfiueeeE9bW1sLJyUm89dZboqKiotH1TpkyRfTu\n3VvU1tY2uf2HtXX/YX8jar/Y31qG/a1x7G9E1F6wv7UM+1vjTKi/7WfYSGRC2sPP54IFC0TXrl1l\nrUFf+jar9PR0YWFhIT777LM2rKrt1NTUCF9fX7F7926jbzsvL08oFArx17/+Ve9l+ccYUefVHn4+\n2d/aP/Y3edZPRC3XHn4+2d/avw7Y3/bzNmoi0ps+D981Re7u7tiwYQM2bNiA0tJSucvRS01NDQ4f\nPoySkhIEBgYaffvr1q3DkCFDEBwcbPRtExG1Fvtb+8X+RkTUcuxv7VdH7W9tEjb++c9/hq2tLSRJ\nMuhDMI8dOwY7Ozt89dVXbTK+M0pKSoKPjw9UKhWcnJwQGhqK+/fvt3h9Fy5cQGBgIFxdXWFtbY3u\n3btj8ODBCAsLM2DV7Yu+32d//etftQ+x/fDDD9u4OmqplStXYtasWQgMDNT7YcNySkhIwMGDBxEX\nFweVSmXUbYeHh+P8+fM4duwYLC0tjbptY2F/My21tbWIiIiAt7d3q9fF/vZ47G+mgf1Nf+xvLcf+\nZlgbNmyAp6cn1Go1rK2t4e7ujrfffrtV4Qr72+Oxv5kG9jf9tWV/a5OwcdeuXfj4448Nvl4hRJuO\n72zS0tLg7+8PPz8/5Obm4tChQ/jkk0+waNGiFq3v0qVL8Pb2hqOjI06fPo2ioiKcPXsWEydOREJC\ngmGLb0f0/T578803cfbs2Taqpm2tWrUKe/bsQVFREVxdXXHgwAG5S2pTmzZtQnBwMN577z25S2k2\nPz8/fP7553B0dDTqdmNjY3H//n0kJCTAwcHBqNs2JvY305Geno4xY8Zg2bJl0Gg0rVoX+1vzsL+Z\nDva35mN/ax32N8M6deoU3njjDWRlZSEvLw+bN29GZGQkZs2a1aL1sb81D/ub6WB/a74272963HOt\nl3379gkA4ueff271ujoCjUYjvLy85C5Dx5w5c4Srq6vOQ0C3bdsmJEkSv/zyi97r+6//+i/xxBNP\nNJh+//598Yc//KFVtbYXhjqP6enpAoD4+9//rtdy7eGZH6YIej7zg+RhKs+0Yn/T1R772/nz58WL\nL74o9u7dK4YMGSIGDx7cqvWxvzUf+5txsb+ZBvY309Qe+9uUKVNEdXW1zrTZs2cLANqXd+iD/a35\n2N+Mi/3NNMjyzEZJktpq1SZp9+7dyMnJkbsMrerqahw9ehRjx47VOVeTJk2CEAKxsbF6r/PevXso\nKipCfn6+znQrKyuD3Qpx7do1lJeXG2RdLdHeziMRGR/7m672+Htx8ODBOHjwIObOnQtra+tWr4/9\njYg6A/Y3Xe3x9+KRI0dgbm6uM6179+4A0KKr+NnfiKitGCRsFEJg27Zt8PDwgLW1Nezs7PDWW281\nGFdTU4N3330XLi4uUCqVGDRoEKKjo3XGfPbZZxg+fDgUCgVsbGzw9NNPY+PGjUhKSoKLiwskScKO\nHTu047/99ls899xzUKlUUKvVGDhwIIqLix85XgiB8PBw9O/fH9bW1nBwcMD06dNx+fJl7ZioqCjY\n2NhApVIhNjYWkyZNglqthrOzM/bt26f38QkJCcHy5cuRkZEBSZLg7u6OrVu3QqVSwdbWFjk5OVi+\nfDl69+6NK1euPPY4Nec4Pk5mZiZKS0vh4uKiM71Pnz4AgIsXL2qnHT9+HGq1Gps2bWpynSNGjEBZ\nWRleeOEFnDlzpsmxj9uHuu+pvn37wsrKCvb29vD09ISrqyuuXLkCAAgODoaVlZXO5cavv/46bGxs\nIEkS8vLymrW95p7vxs7jo77PvvvuO3h6esLOzg4KhQIDBw7EiRMnmjwmRNT+sL81rT32N32wvz3A\n/kbU+bC/Nc2U+tvNmzehVCrh6uqqncb+9gD7G5GM9LgM8pHeeecdIUmS+Nvf/iYKCgqERqMRO3fu\nbHAZ/ptvvimsra3FgQMHREFBgVi1apUwMzMTP/zwgxBCiIiICAFAvPfee+LevXsiPz9f/OMf/xBz\n584VQghx48YNAUB88MEHQgghSktLhVqtFlu2bBHl5eXizp074sUXXxS5ubmNjhdCiHfffVdYWVmJ\nzz77TBQWFoqLFy+KYcOGie7du4s7d+7o7BMA8c0334iioiKRk5MjfH19hY2NjaisrNTr+AghxMyZ\nM0WfPn0aHDcAYsmSJeKDDz4QL774ovjll18ee5weN785vv32WwFAbNu2rcE8pVIp/Pz8tP8+cuSI\nsLW1FRs2bGhynRqNRgwfPlwAEACEp6en2LJli7h3716DsY/bh82bNwtJksTWrVtFfn6+0Gg0YseO\nHQ2+p+bOnSt69eqls+5t27YJANrvg+Zsr7nnu7Hz2Nj3WUxMjFi3bp3Iz88X9+7dE6NGjRLdunXT\nzudl+MYFXoZvEtrjbWbsb4/X3vrbw0aOHPnI26jZ39jfTB37m2lgMHW32QAAIABJREFUf2N/a4v+\nJoQQZWVlwtbWVgQHB+tMZ39jfzN17G+moanbqFsdNmo0GqFSqcSECRN0pj/8zI/y8nKhUqlEYGCg\nzrLW1tZi8eLForKyUtjb24tx48bprKe6ulpERkYKIRr+Uvjf//1fAUAcOXKk0doeHq/RaESXLl10\nahBCiO+//14A0PllXPfLq7y8XDutrgFfvXq12cenTlPNqv42HnecHje/uU6ePCkAiPDw8Abz1Gq1\n8Pb21mf3tCorK8V///d/i379+mmbVs+ePUVCQkKz97GsrEzY29uL8ePH66y7sefINKdZNeeYNfd8\nN7dZPWzz5s0CgMjJyRFCsFkZG5uVaWhvf4yxvzVPe+tvD2sqbNQH+1vj2N/kxf5mGtjf2N/aor/V\n1dO3b19RXFzc4nWwvzWO/U1e7G+moamw0QKtdPXqVWg0Gvj5+TU57sqVK9BoNBgwYIB2mlKphKOj\nIy5fvoyLFy+isLAQv//973WWMzc3x5IlSxpdp5ubG3r27Il58+ZhyZIlCAoKwtNPP/3IGtLS0lBa\nWorhw4frTB8xYgSsrKyQkpLS5D5YWVkBAKqqqpoc1xqPO06Pm99cCoUCwINnNz6ssrISSqWyRfVb\nWloiODgYwcHBSElJwfvvv4/Dhw9j1qxZuHLlChwcHB67D+np6SgsLMT48eNbVMPDWnrMDHm+614j\nX1NT0+p1AcD+/fsNsp7OJDk5We4S6DHa2zlifzMsY/W3tsL+1jj2N/m1t9+d1FB7O0fsb4YlV387\n9P/Zu/PwqMq7/+OfCVkmCQkJkJCYAAJB3JBFLRAWi2AUKSA7ilbl0iJaAUHLIiiyWC0tcEXh8aFS\nrFIhYRG0LNKIgGCktQHkwUJZZF/CFhJIgCz37w9/GRmzkMBJzkzyfl3X/ME599znO/dM8p18ODNn\n2TIlJydr7dq1CgkJue556G/Fo7/Zz9N+d6Ko0p6jGw4bjxw5IkmKiIgoddzFixclSRMmTNCECRPc\n9kVHRyszM1OSFBYWVuZjBwYGat26dRo7dqymTZumyZMna8CAAZo/f36xYVlGRoYkqWbNmkX2hYWF\nKSsrq8zHrijXWqdr7S+rwu/JKFz3QtnZ2bp06VK55ipJmzZt9Mknn+j555/X//zP/+jLL79Unz59\nrvkYjh8/Lunar6mysmrNymPlypWaPn26du7cqczMTMvf4AwcONDS+aqDWbNmadasWXaXAS9Cf7NW\nZfW3ykB/o795Evobyov+Zi07+tuiRYs0Y8YMrV+/XjfddNN1zVEc+hv9zZPQ37zbDV8gpvAMucuX\nL5c6rvAXz8yZM2WMcbulpqa6fkle/aWwZXHHHXfos88+07FjxzRmzBglJSXpj3/8Y7FjCxthcU0p\nIyNDsbGx5Tp2RbjWOl1rf1k1atRIISEhOnjwoNv2vXv3SpLuuuuuctfet2/fYs+UfOKJJyT9dIW0\naz2GwiuqFb65uFFWrVlZHTp0SL1791ZUVJS2bNmi8+fP6+2337b0GD9/HNxKv0lSUlKS7XVwK/1W\nkRcCuR70N2tVVn+rCPS3H9HfPO8m0d+84UZ/c0d/c99fXu+8844WLFigdevW3XDQSH/7Ef3N824S\n/c0bbqX1txsOG++88075+Phow4YNpY6rX7++nE6ntm3bVuz+m2++WbVr19batWvLfOxjx47p+++/\nl/TjL6Tf//73at26tWtbcbXWrFlT3377rdv2LVu26MqVK7r77rvLfOyKcq11utb+svL19dXDDz+s\njRs3qqCgwLV99erVcjgc6tmzZ7nnvHz5crFrX3j1scIA81qPIS4uTgEBAfrmm2/K9Diu9b9OVq1Z\nWe3YsUO5ubl6/vnn1bhxYzmdTjkcjko5NgDr0N+sVVn9rSLQ335EfwOqBvqbtSqrvxljNGbMGO3Y\nsUPLly8v9mzP8qK//Yj+BljvhsPGiIgI9e3bV0uWLNG8efOUmZmp7777TnPnznUb53Q69fTTT2vh\nwoWaM2eOMjMzlZ+fryNHjuj48eMKCAjQ+PHjtXHjRg0fPlxHjx5VQUGBsrKySmw+x44d03PPPadd\nu3bpypUr2rp1qw4ePKi2bdsWO97pdGr06NFatmyZFixYoMzMTO3YsUPDhg1TdHS0hg4deqPLUaLa\ntWvr2LFjOnDggLKyskr8BXutdbrW/vKYOHGiTp48qddff10XL15Uamqqpk+frqeeekrNmjVzjVu9\nerVCQ0M1bdq0a87Zu3dvJScnKyMjQ+fPn9eKFSs0duxY9erVy9WsrvUYwsLC9OSTT2rZsmWaO3eu\nsrKylJ2dXeQsTOnHxnb27FktX75cubm5OnXqVJFxVq5ZWZ7HBg0aSJJSUlJ06dIl7dmz55rfJwPA\n89DfysYT+1tZ0d9+Qn8Dqg/6W9l4Wn/7/vvv9Yc//EF//vOf5efnJ4fD4Xa7+uxQ+ttP6G+ATczP\nXM/VkrKysswzzzxj6tSpY2rWrGk6dOhgXnvtNSPJxMbGmu3btxtjjLl8+bIZM2aMadCggfH19TUR\nERGmb9++ZufOna653n33XdO8eXPjdDqN0+k0rVq1MrNnzzbvvPOOiYqKMpJMUFCQ6dmzpzlw4ICJ\nj4834eHhpkaNGuamm24yr776qsnLyyt2vDHGFBQUmOnTp5umTZsaPz8/Ex4ebnr37m12797tqmH2\n7NkmKCjISDJNmzY1+/btM3PnzjWhoaFGkmnYsKH573//W641SktLMw0bNjSBgYGmQ4cOZtSoUSYw\nMNBIMvXr1zcfffSRa+y11qks61hWGzZsML/4xS9MQECAiY6ONq+88oq5dOmS25hVq1aZkJAQM3Xq\n1FLnWrt2rRk4cKBp0qSJCQgIMP7+/qZZs2Zm0qRJRea81mO4cOGC+c1vfmPq1q1rfH19Te3atV1X\nSLv6amZnzpwxnTt3Nk6n0zRq1Mi8+OKL5pVXXjGSTFxcnDl06NA1j1ee5/vnz+OECROKfZ2NGTPG\n1K5d24SFhZn+/fubd99910gyTZo0MSNHjjT16tUzkkxwcLDp06dPmZ8vrmZ2fcTVzLyCp12t0xj6\nW1l4Yn9LTU017du3N9HR0a4ra0ZFRZn4+HizYcMG1zj6G/3N29HfvAP9jf5mRX/bsWOHq6cVd5s+\nfbprLP2N/ubt6G/eobSrUTuM+f8fiP//kpOTNXDgQP1sM2C7pUuXql+/ftq6datatmxpdzm24Ofz\n+jgcDiUlJWnAgAF2l4JSVPTrm58feCr6Gz+f14v+5h3ob6iu6G/8fF4v+pt3KOX1vfiGP0YNVBar\nrwgGAIAnoL8BAKoi+htQfRE2Xqddu3YV+Z6M4m6DBg2q1jUBALyLJ/YST6wJAOBdPLGXeGJNAGAF\nwsbrdOutt5bpUuCLFi2q1jVZZe7cuXruueckSb169dLRo0dtrgjVXUpKisaNG6elS5eqcePGrjeD\nTzzxRJGxCQkJCgkJUY0aNXTHHXcoLS3NhorLr6CgQDNnzlR8fHyRfZ9++qnefvtt5efn21AZKpIn\n9hJPrMkq9Dd4Gvob/a2q8sRe4ok1WYX+Bk9Df6vc/kbYCK/wm9/8RhkZGTLG6ODBg4qJibG7JFRj\nr7/+uhITEzV+/Hj17dtX+/fvV5MmTVSnTh0tWLBAK1eudBu/du1aLV68WD169NDOnTvVunVrmyov\nuz179qhTp04aNWqUsrOzi+zv2bOnnE6nunTpooyMDBsqBKoG+hs8Cf2N/gZYhf4GT0J/q/z+RtgI\noFxycnKK/Z8SbzvG9Xrrrbe0aNEiJScnKyQkxG1fYmKifHx8NHToUJ0/f96mCm/c9u3bNXbsWA0b\nNqzUL/MeMWKEWrRooYcfflh5eXmVWCEAWI/+Rn8rRH8DUJXQ3+hvhSqzvxE2AiiXefPmKT093euP\ncT327t2riRMn6o033pDT6SyyPz4+XiNHjtTRo0f18ssv21ChNVq0aKGlS5dq8ODBCggIKHXspEmT\ntG3bNs2aNauSqgOAikF/o79djf4GoKqgv9HfrlZZ/Y2wEajijDGaMWOGbrvtNgUEBCg8PFyPPPKI\ndu3a5RozfPhw+fv7KyoqyrXthRdeUHBwsBwOh06fPi1JGjlypEaPHq19+/bJ4XAoLi5OiYmJcjqd\nioyM1HPPPafo6Gg5nU7Fx8dry5YtlhxDktasWaPQ0FBNmzatQterNImJiTLGqGfPniWOmTp1qm65\n5Ra9//77SklJKXW+sjw3c+bMUXBwsIKCgrRixQp169ZNoaGhio2N1cKFC93my8/P12uvvaYGDRoo\nMDBQd911l5KSkm7sQV9DeHi47rvvPs2aNUvGmAo9FgBcjf5mHfpbUfQ3AHahv1mH/lZUpfU38zNJ\nSUmmmM0APMD1/Hy+9tprxt/f33z00UcmIyPDfPfdd6Z169ambt265sSJE65xgwcPNvXq1XO77/Tp\n040kc+rUKde2vn37miZNmriNGzp0qAkODjbff/+9uXTpktm5c6e59957TUhIiDl06JAlx/j73/9u\nQkJCzOTJk8v1+I0xRpJJSkoq9/1+rnHjxub2228vdl+TJk3MDz/8YIwx5uuvvzY+Pj7m5ptvNhcu\nXDDGGLN69WrTq1cvt/uU9bl59dVXjSTzxRdfmPPnz5v09HTTsWNHExwcbK5cueIa9/LLL5uAgACz\nZMkSc+7cOTN+/Hjj4+Nj/vWvf133Y27Tpo1p0aJFqWPGjRtnJJmtW7de93GMqfj+Q38DPBf9jf5G\nf/Pc+QFcP/ob/a2a9rdkzmwEqrCcnBzNmDFDffr00eOPP65atWqpefPmeu+993T69GnNnTvXsmP5\n+vq6/ofn9ttv15w5c5SVlaX58+dbMn/37t2VmZmpiRMnWjJfeV28eFE//PCDmjRpcs2x7dq100sv\nvaQDBw5o7NixxY65nucmPj5eoaGhioiI0KBBg3Tx4kUdOnRIknTp0iXNmTNHvXv3Vt++fRUWFqYJ\nEybIz8/PsuegJE2bNpUk7dixo0KPAwCF6G/Wob+VjP4GoLLR36xDfytZZfQ3wkagCtu5c6cuXLig\ne+65x237vffeK39/f7fT5K12zz33KCgoyO2Ucm+Wnp4uY4yCgoLKNH7q1Klq1qyZZs+erU2bNhXZ\nf6PPjb+/vyQpNzdXkrR7925lZ2frzjvvdI0JDAxUVFRUhT8HhWty8uTJCj0OABSiv1mH/lYy+huA\nykZ/sw79rWSV0d8IG4EqrPCS9jVr1iyyLywsTFlZWRV6/ICAAJ06dapCj1FZLl26JEnX/MLdQk6n\nU/Pnz5fD4dCQIUOUk5Pjtt/q5+bixYuSpAkTJsjhcLhuBw8eVHZ2drnmKq/AwEBJP60RAFQ0+pt1\n6G8lo78BqGz0N+vQ30pWGf2NsBGowsLCwiSp2F98GRkZio2NrbBj5+bmVvgxKlPhL+T8/Pwy36dd\nu3YaNWqU9uzZoylTprjts/q5iYiIkCTNnDlTxhi3W2pqarnmKq8rV65I+mmNAKCi0d+sQ38rGf0N\nQGWjv1mH/layyuhvhI1AFXbnnXeqZs2a+vbbb922b9myRVeuXNHdd9/t2ubr6+s6pdsK69evlzFG\nbdu2rbBjVKbIyEg5HA6dP3++XPebMmWKbr31Vm3dutVte3mem7KoX7++nE6ntm3bVq77WaFwTerV\nq1fpxwZQPdHfrEN/Kxn9DUBlo79Zh/5Wssrob4SNQBXmdDo1evRoLVu2TAsWLFBmZqZ27NihYcOG\nKTo6WkOHDnWNjYuL09mzZ7V8+XLl5ubq1KlTOnjwYJE5a9eurWPHjunAgQPKyspyNZ+CggKdO3dO\neXl5+u677zRy5Eg1aNBATz31lCXHWL16tUJDQzVt2jTrF6oMgoKC1LhxYx05cqRc9ys8Hb9GjRpF\ntpf1uSnrcZ5++mktXLhQc+bMUWZmpvLz83XkyBEdP35ckjRo0CDVq1dPaWlp5Zr7WgrXpHnz5pbO\nCwAlob9Zh/5WMvobgMpGf7MO/a1kldLfynHpagA2u56fz4KCAjN9+nTTtGlT4+fnZ8LDw03v3r3N\n7t273cadOXPGdO7c2TidTtOoUSPz4osvmldeecVIMnFxcebQoUPGGGPS0tJMw4YNTWBgoOnQoYM5\nceKEGTp0qPHz8zMxMTHG19fXhIaGmkceecTs27fPsmOsWrXKhISEmKlTp5Z73SSZpKSkct/v54YP\nH278/PxMdna2a9uyZctMkyZNjCRTt25d89vf/rbY+77yyiumV69ebtvK8tzMnj3bBAUFGUmmadOm\nZt++fWbu3LkmNDTUSDINGzY0//3vf40xxly+fNmMGTPGNGjQwPj6+pqIiAjTt29fs3PnTmOMMb17\n9zaSzGuvvVbq40xNTTXt27c30dHRRpKRZKKiokx8fLzZsGFDkfHdu3c3MTExpqCgoGwLWYKK7j/0\nN8Bz0d/ob/Q3z50fwPWjv9Hfqml/SyZsBLyIp/58Dh061NSuXdvuMkpkVbPas2eP8fX1NR999JEF\nVVW+/Px807FjRzNv3jzL5jx9+rRxOp3mj3/84w3PxR9jQPXlqT+f9DfvQH/zzJ8fAJ7780l/8w5e\n3N+S+Rg1AEuU54t3vVVcXJwmT56syZMn68KFC3aXUy75+flavny5srKyNGjQIMvmnTRpklq2bKnh\nw4dbNicAeBL6m2ejvwHA9aG/eTZv72+EjQBQDuPGjVP//v01aNCgcn/ZsJ3Wr1+vpUuXavXq1QoK\nCrJkzhkzZmjbtm1atWqV/Pz8LJkTAGAP+ttP6G8AUHXQ335Smf2NsBHADRk/frzmz5+v8+fPq1Gj\nRlqyZIndJVW4adOmafjw4fr9739vdyll1qVLF/3tb39TVFSUJfOtWLFCly9f1vr16xUeHm7JnADg\nSehv3oH+BgDlQ3/zDt7e33wr/AgAqrQ333xTb775pt1lVLqEhAQlJCTYXYZtevXqpV69etldBgBU\nGPpb9UR/A1DV0d+qp8rub5zZCAAAAAAAAMAShI0AAAAAAAAALEHYCAAAAAAAAMAShI0AAAAAAAAA\nLFHiBWL69+9fmXUAKIMjR45I4ufzesycOVOLFy+2bL7Dhw8rPz9fERERCg4Otmze6qzw9V3R+PkB\nPA/97fpZ3d9gPfqbdzHG6Ny5czp16pSio6MVGhpqd0nwYvS360d/83yl9TeHMcZcvSE1NVUzZsyo\n8KIAwJulpaXp4MGDys/PV1BQkCIiIhQZGamIiAgFBQXZXZ5Xq6g3FfQ34PqdOHFCW7duVbdu3ewu\nBfBa9DfPZIxRRkaG0tPTdfr0aZ06dUp5eXkKCAhQ69atFRMTY3eJAODRiulvi4uEjQCAssnLy9P2\n7duVkpKilJQUbdq0SZcuXVJ0dLQ6dOigrl276oEHHlCjRo3sLhUAbkhycrIGDhwo3jYC8Hb5+fna\ntWuXNm/e7HoPd+7cOUVERKhNmzau93CtW7eWw+Gwu1wA8EaEjQBglbKEjwkJCbr55pvtLhUAyoWw\nEYC3Kigo0H/+8x9XuPjFF1/o7NmzCgkJUZs2bdS1a1d17dpVrVq1ko8PlzQAAAsQNgJARcnJydG/\n//1v15vbr776SpcvX1bjxo3Vvn17dejQQQ8++KAaNmxod6kAUCrCRgDeZP/+/a7//F23bp3OnDlD\nuAgAlYewEQAqS3Z2ttLS0koNHx966CE1aNDA7lIBwA1hIwBPRrgIAB6FsBEA7JKdna2vv/5amzZt\n0ubNm7Vx40ZduXLFLXzs1q2b6tevb3epAKo5wkYAnuTqcPHLL7/U6dOnVbNmTbVt25ZwEQDsR9gI\nAJ7i4sWLSk1NLTZ87Nq1q9q3b6/7779fsbGxdpcKoJohbARgp8JwcdOmTVq3bp2OHj3qFi62b99e\nbdq0kZ+fn92lAgAIGwHAcxE+AvAUhI0AKtPV4eKXX36pI0eOKDg4WO3atSNcBADPR9gIAN6iMHws\nfPP9z3/+U7m5ua7wsWvXrurcubPq1q1rd6kAqhjCRgAV6epwcf369Tp8+LArXCz8aplOnTrJ39/f\n7lIBANdG2AgA3urChQv65ptvCB8BVDjCRgBW2r9/v+uTG2vWrNGhQ4cIFwGg6iBsBICq4urwMSUl\nRVu3blVBQYFb+Hj//ferTp06dpcKwMsQNgK4EcWFi0FBQYqPjydcBICqh7ARAKqqrKwsbdmypdTw\nsUuXLqpdu7bdpQLwcISNAMrj2LFj2rx5s1JSUvT555/r4MGDCgoKUqtWrdShQwd17dpVHTt2VEBA\ngN2lAgCsR9gIANVFceGjJN16662uN/6EjwCKQ9gIoDRXh4tr167VgQMHFBgYqNatWxMuAkD1Q9gI\nANVVceGjw+FQs2bNXH8YdO3aVeHh4XaXCsBmhI0ArnZ1uPiPf/xDP/zwg3x9fdWiRQvX+4cOHTrI\n6XTaXSoAoPIRNgIAfnTq1Cl98803rj8e0tLS5OPjo5YtW7q+T+mBBx5QWFiY3aUCqGSEjUD1dvz4\ncW3atMn1H5T79+8nXAQAlISwEQBQPMJHAIUIG4Hq5cSJE/rqq6+UkpKiTZs26fvvvy8SLrZv316B\ngYF2lwoA8DyEjQCAsklPT9eGDRtcV5MsLnxMSEhQrVq17C4VgMUIG4GqjXARAGAhwkYAwPU5efKk\nNm7cWGz4WPhHSadOnQgfgSqAsBGoWorr4TVq1HCFi+3bt9d9992n0NBQu0sFAHgfwkYAgDX4wwWo\nuggbAe/GfxACACoRYSMAoGLwkSyg6iBsBLwLX30CALARYSMAoHJcfSXLksJHrmQJeCbCRsCzcVE3\nAIAHIWwEANjj6vAxJSVF+/fvJ3wEPBRhI+BZyhIudu3aVeHh4XaXCgCofggbAQCe4dixY64/mv7x\nj3/ohx9+IHwEPARhI2CvrKwsbdmyxfUfdFu3bpXD4VCzZs1cwSLhIgDAQxA2AgA809Xh49q1a3Xg\nwAEFBgaqdevWrj+sOnbsqICAALtLBao8wkagchUXLkrSrbfe6uqBXbp0Ue3atW2uFACAIggbAQDe\n4erw8fPPP9fBgwcVFBSkVq1aET4CFYywEahYxYWLBQUFaty4seusRcJFAICXIGwEAHin/fv3u66y\nuWbNGh06dEhBQUGKj493fV9Vp06d5O/vb3epgNcjbASsdeHCBX3zzTelhov333+/6tSpY3epAACU\nF2EjAKBquDp8XL16tQ4fPqzg4GC1a9eO8BG4QYSNwI25OlzctGmT/vnPfyo3N9ctXOzcubPq1q1r\nd6kAANwowkYAQNW0f/9+1x9169evJ3wEbgBhI1A+Fy9eVGpqaonhYvv27dWlSxfFxMTYXSoAAFYj\nbAQAVA9Xh49ffvmljhw54gofC//wa9Omjfz8/OwuFfA4hI1A6QrDxcIz7Ddu3KgrV664hYv333+/\nYmNj7S4VAICKRtgIAKierg4f161bp6NHj6pmzZpq27Yt4SPwM4SNgLvs7Gx9/fXXpYaLnTt3Vv36\n9e0uFQCAykbYCACA9FP4mJKSoi+//FKnT592Cx+7du2qVq1aycfHx+5SgUpH2IjqLjs7W2lpadq8\nebNSUlL01Vdf6fLly2rcuLHrqzm6detGuAgAAGEjAADFuzp8XLdunc6cOaOQkBC1adOG8BHVDmEj\nqpuyhIsPPfSQGjRoYHepAAB4GsJGAADK4urw8YsvvtDZs2cJH1FtEDaiqsvJydG///3vIuFidHS0\nOnTooK5du+rBBx9Uw4YN7S4VAABPR9gIAEB5FRQU6D//+Y/rj1LCR1R1hI2oavLy8rR9+3bXfyJt\n2rRJly5dcgsXExISdPPNN9tdKgAA3oawEQCAG5Wfn69du3a5wseUlBSdO3dOERERatOmjesP19at\nW8vhcNhdLlCq3NxcXbhwwW3b8uXLNWTIEJ09e9Ztu8PhUFhYWGWWB1yXsoSLDzzwgBo1amR3qQAA\neDvCRgAArFZS+BgZGalf/OIXhI/waCdPnlRMTIzy8/OvObZz585at25dJVQFlM/Pw8XNmzcrJyfH\nLVzs2rWrGjdubHepAABUNYSNAABUtPz8fG3btk2bNm3S5s2b9Y9//EMZGRmKjIzUfffd57rYAOEj\nPMX999+vDRs2qKCgoMQxDodD//u//6tnn322EisDind1uLhp0yZt3LhRmZmZbuFi+/btdccdd9hd\nKgAAVR1hIwAAle3n4ePatWt1/vx51atXT506dSJ8hO3mz5+vZ555ptSw0dfXVydOnFCdOnUqsTLg\nRyWFi1FRUerYsSPhIgAA9iFsBADAboXhY+EfzV999VWx4ePdd99td6moJjIzMxUREaErV64Uu79G\njRrq1q2bPvvss0quDNVVWX9P8p80AADYjrARAABPwxk78ASPPPKIVq5cqby8vCL7fHx89PHHH2vg\nwIE2VIbqgDPAAQDwWoSNAAB4urJc6MCq8PH48eM6dOiQ2rRpY0Hl8GZLlizRgAEDVNxbRafTqdOn\nTys4ONiGyuApjDH67LPP1LNnzxuei++2BQCgyiBsBADA21TkVVY//PBDPf300xo9erTeeOMNBQYG\nVsAjgDe4dOmS6tatq4sXL7pt9/PzU79+/fTxxx/bVBk8wd69e/Xkk09qy5YtOnfunEJCQsp1//z8\nfO3atUubN292/S47d+6cIiIi1KZNG9fvMsJFAAC8DmEjAADe7ufh46ZNm3Tp0iW38PGBBx5Qo0aN\nrjnXkCFD9Ne//lU+Pj6qX7++PvroI7Vv374SHgU80a9//WstWrRIubm5btv//ve/q3v37jZVBTsV\nFBQoMTFRY8eOVUFBgXJzc7Vy5Uo9/PDDpd6PcBEAgGqDsBEAgKqmLOFjQkKCbr755iL3jYmJ0bFj\nxyT9eBGQgoICPfPMM5oxY4Zq1qxZyY8EdluzZo26devmti00NFSnTp2Sv7+/TVXBLvv27dOTTz6p\n1NRU15XK/f39NXLkSL399ttuYwsKCvSf//zHFS5+8cUXOnsyC1KcAAAgAElEQVT2rEJCQtSmTRvX\nGditWrWSj4+PHQ8HAABUDMJGAACqupycHP373/92/dH/1Vdf6fLly27h44MPPiiHw6GGDRsWub+v\nr6+ioqL017/+Vffff78NjwB2ycvLU7169XT27FlJP36EesiQIXrvvfdsrgyVqaCgQO+//75GjBih\n/Pz8Ime6tmjRQtu2bdP+/ftd/8mxbt06nTlzhnARAIDqh7ARAIDqJjs7W5s3b9b69eu1fv16/etf\n/1Jubq6ioqKUnp7uOmPpalef5finP/2p3N/PBu/129/+VnPnznUFTBs2bFCnTp1srgqVZefOnXri\niSe0ffv2Yn83SJLD4VBYWJjOnTun8PBwderUSZ07d9Yvf/lLNW/enHARAIDqhbARAIDq7uLFi9q8\nebN+97vf6fvvvy9y1tLVfH19FRkZqQ8++EAPPPBAJVYJu2zevFkdOnSQJNWrV0/Hjh0jPKoG8vLy\n9Kc//UkTJ06UpFJ/L0jSM888o+eff14tWrTg9QEAQPW2mHcCAABUc8HBwUpISFB6evo1A4W8vDyd\nPHlSCQkJ6tevn+vjtai64uPjFRMTI+nHC8YQJFV9O3bs0D333KNXX31Vubm51/y94O/vr7CwMD4i\nDQAAJEm+dhcAAEBqaqoOHz5sdxnVWnp6uo4fP16msfn5+ZKkpUuX6osvvtCwYcPUsmXLiiwPNrv3\n3nt19OhR1alTR8nJyXaXgwqSl5enTz75RMuWLZMxRmX9ANSVK1eUnJyse++9t4IrxLXEx8crNjbW\n7jIAANUcH6MGANiuf//+WrJkid1lAADg1ZKSkjRgwAC7ywAAVG+LObMRAOAR+vXrp8WLF9tdRrX1\n61//Wh999JGkHy8GU7t2bUVGRio2NlbR0dGKiopSvXr1FBkZqejoaEVGRioyMlIRERGSpOTkZA0c\nOLDMZ0LhRw6Hw2vCgSVLlqhfv352l4FKkJubq/T0dJ08eVInTpxQenq6Tpw4oZMnTyo9PV2HDh3S\niRMndPr0aWVkZLju9+mnn6pHjx42Vl69ORwOu0sAAEASH6MGAACSRo4cqbFjxyoyMlJ169a1uxx4\nIILG6sPPz08xMTGu7+osTW5urk6dOqWTJ0+qdu3alVAdAADwdISNAABArVu3trsEAF7Iz89PN910\nk2666Sa7SwEAAB6Cy8UBAAAAAAAAsARhIwAAAAAAAABLEDYCAAAAAAAAsARhIwAAAAAAAABLEDYC\nAACPsWrVKtWqVUufffaZ3aV4pOeee04Oh8N1e/zxx4uMSUlJ0bhx47R06VI1btzYNfaJJ54oMjYh\nIUEhISGqUaOG7rjjDqWlpVXGw7hhBQUFmjlzpuLj44vs+/TTT/X2228rPz/fkmOxnqxneV3Pei5f\nvtztZ7tu3bqVVS4AAJYjbAQAAB7DGGN3CR6vdu3aWr16tXbv3q158+a57Xv99deVmJio8ePHq2/f\nvtq/f7+aNGmiOnXqaMGCBVq5cqXb+LVr12rx4sXq0aOHdu7c6RVXJd+zZ486deqkUaNGKTs7u8j+\nnj17yul0qkuXLsrIyLihY7GerGd5Xe969urVS0eOHNHGjRv18MMPV2bJAABYjrARAAB4jO7du+v8\n+fPq0aOH3aUoJyen2DOT7BYYGKiHHnpIt9xyiwICAlzb33rrLS1atEjJyckKCQlxu09iYqJ8fHw0\ndOhQnT9/vrJLtsz27ds1duxYDRs2TC1btixx3IgRI9SiRQs9/PDDysvLu65jsZ4/YT3L5kbW0+Fw\nKCYmRh07dlTTpk0rq2QAACoEYSMAAEAx5s2bp/T0dLvLKJO9e/dq4sSJeuONN+R0Oovsj4+P18iR\nI3X06FG9/PLLNlRojRYtWmjp0qUaPHiwW9BanEmTJmnbtm2aNWtWuY/DehbFel5bZa0nAACejrAR\nAAB4hE2bNqlBgwZyOBx69913JUlz5sxRcHCwgoKCtGLFCnXr1k2hoaGKjY3VwoULXfdNTEyU0+lU\nZGSknnvuOUVHR8vpdCo+Pl5btmxxjRs+fLj8/f0VFRXl2vbCCy8oODhYDodDp0+fliSNHDlSo0eP\n1r59++RwOBQXFydJWrNmjUJDQzVt2rTKWJIyS0xMlDFGPXv2LHHM1KlTdcstt+j9999XSkpKqfMZ\nYzRjxgzddtttCggIUHh4uB555BHt2rXLNaasz40k5efn67XXXlODBg0UGBiou+66S0lJSTf2oK8h\nPDxc9913n2bNmlXuj+eznkWxnta6kfUEAMDTETYCAACP0KFDB3399ddu255//nm99NJLysnJUUhI\niJKSkrRv3z41btxYzz77rHJzcyX9GCI+9dRTys7O1ogRI3TgwAGlpaUpLy9PDzzwgA4fPizpx9Bj\nwIABbseYPXu23njjDbdts2bNUo8ePdSkSRMZY7R3715Jcl3UoaCgoELW4HqtXLlSzZo1U1BQUIlj\nAgMD9cEHH8jHx0fPPvusLl68WOLYSZMmady4cXr11VeVnp6ujRs36vDhw+rYsaNOnjwpqezPjSSN\nHTtWf/jDHzRz5kwdP35cPXr00GOPPaZvv/3WukUoRqtWrXT06FFt3769XPdjPYvHelrretcTAABP\nR9gIAAC8Qnx8vEJDQxUREaFBgwbp4sWLOnTokNsYX19f19lOt99+u+bMmaOsrCzNnz/fkhq6d++u\nzMxMTZw40ZL5rHDx4kX98MMPatKkyTXHtmvXTi+99JIOHDigsWPHFjsmJydHM2bMUJ8+ffT444+r\nVq1aat68ud577z2dPn1ac+fOLXKf0p6bS5cuac6cOerdu7f69u2rsLAwTZgwQX5+fpY9LyUp/O67\nHTt2lPk+rGfJWE9rXc96AgDgDQgbAQCA1/H395ckt7OTinPPPfcoKCjI7eOVVU16erqMMaWeNXa1\nqVOnqlmzZpo9e7Y2bdpUZP/OnTt14cIF3XPPPW7b7733Xvn7+7t9LL04P39udu/erezsbN15552u\nMYGBgYqKiqrw56VwTQrPdisL1rNkrKe1rmc9AQDwBoSNAACgSgsICNCpU6fsLqPCXLp0SZKueUGK\nQk6nU/Pnz5fD4dCQIUOUk5Pjtj8jI0OSVLNmzSL3DQsLU1ZWVrnqK/w47IQJE+RwOFy3gwcPKjs7\nu1xzlVdgYKCkn9aoLFjPkrGe1rqe9QQAwBsQNgIAgCorNzdXGRkZio2NtbuUClMYWBR+n2RZtGvX\nTqNGjdKePXs0ZcoUt31hYWGSVGxocz1rGRERIUmaOXOmjDFut9TU1HLNVV5XrlyR9NMalQXrWTLW\n01rXs54AAHgDwkYAAFBlrV+/XsYYtW3b1rXN19f3mh+/9iaRkZFyOBw6f/58ue43ZcoU3Xrrrdq6\ndavb9jvvvFM1a9YscnGMLVu26MqVK7r77rvLdZz69evL6XRq27Zt5bqfFQrXpF69emW+D+tZMtbT\nWtezngAAeAPCRgAAUGUUFBTo3LlzysvL03fffaeRI0eqQYMGeuqpp1xj4uLidPbsWS1fvly5ubk6\ndeqUDh48WGSu2rVr69ixYzpw4ICysrKUm5ur1atXKzQ0VNOmTavER1W6oKAgNW7cWEeOHCnX/Qo/\nrlqjRo0i20ePHq1ly5ZpwYIFyszM1I4dOzRs2DBFR0dr6NCh5T7O008/rYULF2rOnDnKzMxUfn6+\njhw5ouPHj0uSBg0apHr16iktLa1cc19L4Zo0b968zMdhPUvGelbsegIAUGUYAABs1q9fP9OvXz+7\ny8ANSEpKMjf6tuKdd94xUVFRRpIJCgoyPXv2NLNnzzZBQUFGkmnatKnZt2+fmTt3rgkNDTWSTMOG\nDc1///tfY4wxQ4cONX5+fiYmJsb4+vqa0NBQ88gjj5h9+/a5HefMmTOmc+fOxul0mkaNGpkXX3zR\nvPLKK0aSiYuLM4cOHTLGGJOWlmYaNmxoAgMDTYcOHcyJEyfMqlWrTEhIiJk6deoNPdZCkkxSUlKZ\nxw8dOtTExMQU2T58+HDj5+dnsrOzXduWLVtmmjRpYiSZunXrmt/+9rfFzvnKK6+YXr16uW0rKCgw\n06dPN02bNjV+fn4mPDzc9O7d2+zevds1pjzPzeXLl82YMWNMgwYNjK+vr4mIiDB9+/Y1O3fuNMYY\n07t3byPJvPbaa6U+/tTUVNO+fXsTHR1tJBlJJioqysTHx5sNGzYUGd+9e3cTExNjCgoKynUc1pP1\ntGM9C40YMcLUqVOn1GMVp7y/TwAAqCDJhI0AANsRNno/K8LGGzV06FBTu3ZtW2soL6vCxj179hhf\nX1/z0UcfWVlepcnPzzcdO3Y08+bNs2zO06dPG6fTaf74xz+W+zisZ1GsZ8WvZyHCRgCAl0vmY9QA\nAKDKKM9FKLxVTk6OPv/8c+3Zs8d1gYm4uDhNnjxZkydP1oULF2yusHzy8/O1fPlyZWVladCgQZbN\nO2nSJLVs2VLDhw8v93FYz6JYz4pdT2OMjh07pk2bNmnv3r2WHQcAADsQNgIAqoVnnnlGISEhcjgc\npV4IoKzjrqWgoEAzZ85UfHz8dc/xcx9//LEcDke556zsx46KdfbsWT300EO65ZZbNGTIENf2cePG\nqX///ho0aFC5L8Zhp/Xr12vp0qVavXq1goKCLJlzxowZ2rZtm1atWiU/P7/rOg7r+RPWs+LXc8WK\nFYqJiVHHjh21cuVKS44DAIBdHMYYY3cRAIDqrX///pKkxYsXV+hxFi1apEcffVRbt25Vy5Ytb3hc\nSfbs2aOnn35amzdvVosWLSwL7n71q19p165d2rdvn/bs2aO4uLgy37eiH3tycrIGDhwou95WjB8/\nXn/605905coV3XzzzZo+fbr69etnSy3l4XA4lJSUpAEDBlg259q1a7Vu3Tq99dZbls3pTVasWKHv\nv/9ev/vd74pcXOR6sJ6sp5WsXs+rVcTvEwAArsNiX7srAACgKtm+fbsmT56sYcOG6eLFi5aFb2fO\nnNH333+vKVOm6PHHH9eHH36oyZMnWzJ3VfDmm2/qzTfftLsMj5CQkKCEhAS7y7BNr1691KtXL8vm\nYz1ZTytZvZ4AAHgiPkYNAKg2HA6HpeOK06JFCy1dulSDBw9WQEDAdc/zc8nJyerevbt69uwpp9Op\njz76qFxBZmU8dgAAAAAgbAQAeKWvvvpKt99+u2rVqiWn06nmzZvr888/d+03xmj69Olq1qyZAgIC\nVKtWLb3yyitF5inrOKutWbNGoaGhmjZtWpnGf/zxx+rTp49CQkKUkJCgAwcO6Kuvvip2rKc/dgAA\nAABVF2EjAMArnTx5UgMHDtSBAwd07Ngx1axZU4MHD3btnzhxosaMGaOhQ4fq5MmTOnHihMaOHVtk\nnrKOs1rhVZMLCgquOfbQoUPavXu3OnXqJOmn77j88MMPix3v6Y8dAAAAQNXFdzYCALxSv3793C4A\n0rNnT40fP16nTp1SzZo1NXPmTHXt2lWjRo1yjaldu7bbHDk5OWUaVxG6d++uzMzMMo39+OOP9atf\n/cp1MYGePXsqICBAixcv1jvvvKPAwEDX2LI+JjsfOwAAAICqi7ARAFAl+Pn5SfrxjMG9e/cqOztb\nXbp0KfU+ZR1nt48//tjt4iehoaFKSEjQZ599phUrVmjQoEGufXY/9sKzLlF2M2fOrPArsQMAAACV\nhbARAOCVVq5cqenTp2vnzp3KzMxUbm6ua9+RI0ckSREREaXOUdZxdvq///s/7dixQz169Ch2/4cf\nfugWNlalxw4AAADA+xA2AgC8zqFDh9S7d2/16dNHf/nLX3TTTTfpnXfe0e9+9ztJktPplCRdvny5\n1HnKOs5Of/vb3/Too4/q448/dtt+7tw5xcTEaO3atTpx4oSioqIk2f/YOUOvfBwOh1566SUNGDDA\n7lIAeDmHw2F3CQAASOICMQAAL7Rjxw7l5ubq+eefV+PGjeV0Ot3+yLrzzjvl4+OjDRs2lDpPWcfZ\nxRijRYsW6YUXXiiyLzw8XP3791d+fr5bEFlVHjsAAAAA70TYCADwOg0aNJAkpaSk6NKlS9qzZ4+2\nbNni2h8REaG+fftqyZIlmjdvnjIzM/Xdd99p7ty5bvOUdVxFWL16tUJDQzVt2rQSx3z99dcKDQ1V\n+/bti90/bNgwSe5XpfaGxw4AAACg6iJsBAB4nebNm2vMmDGaPXu2oqOj9eqrr+qXv/ylJKlDhw46\nfPiw/vKXv+jpp5/WmDFjFBMToxdeeEEdO3aUJPXo0UPfffedJJV5XFl988036tChg2666SZt2bJF\n27dvV3R0tNq3b6+NGzeWeZ5nnnlGDz74oL7//nu1bNlSW7dudds/depU9enTR5K0fft2xcbGas6c\nOeV6TFY/dgAAAABwGGOM3UUAAKq3wisY831/3is5OVkDBw4UbyvKx+FwKCkpie9sBHDD+H0CAPAQ\nizmzEQAAAAAAAIAlCBsBACjFrl275HA4rnkbNGiQ3aUCqAApKSkaN26cli5dqsaNG7t+5p944oki\nYxMSEhQSEqIaNWrojjvuUFpamg0Vl19BQYFmzpyp+Pj4Ivs+/fRTvf3228rPz7ehMgAA4I0IGwEA\nKMWtt94qY8w1b4sWLbK7VAAWe/3115WYmKjx48erb9++2r9/v5o0aaI6depowYIFWrlypdv4tWvX\navHixerRo4d27typ1q1b21R52e3Zs0edOnXSqFGjlJ2dXWR/z5495XQ61aVLF2VkZNhQIQAA8DaE\njQAAoErIyckp9swsbzsGPMNbb72lRYsWKTk5WSEhIW77EhMT5ePjo6FDh+r8+fM2VXjjtm/frrFj\nx2rYsGFq2bJlieNGjBihFi1a6OGHH1ZeXl4lVggAALwRYSMAAKgS5s2bp/T0dK8/Buy3d+9eTZw4\nUW+88YacTmeR/fHx8Ro5cqSOHj2ql19+2YYKrdGiRQstXbpUgwcPVkBAQKljJ02apG3btmnWrFmV\nVB0AAPBWhI0AAMAWxhjNmDFDt912mwICAhQeHq5HHnlEu3btco0ZPny4/P39FRUV5dr2wgsvKDg4\nWA6HQ6dPn5YkjRw5UqNHj9a+ffvkcDgUFxenxMREOZ1ORUZG6rnnnlN0dLScTqfi4+O1ZcsWS44h\nSWvWrFFoaKimTZtWoeuFypOYmChjjHr27FnimKlTp+qWW27R+++/r5SUlFLnK8trfc6cOQoODlZQ\nUJBWrFihbt26KTQ0VLGxsVq4cKHbfPn5+XrttdfUoEEDBQYG6q677lJSUtKNPehrCA8P13333adZ\ns2Zx1XkAAFAqwkYAAGCLSZMmady4cXr11VeVnp6ujRs36vDhw+rYsaNOnjwp6cfQZ8CAAW73mz17\ntt544w23bbNmzVKPHj3UpEkTGWO0d+9eDR8+XE899ZSys7M1YsQIHThwQGlpacrLy9MDDzygw4cP\n3/AxJLkunFFQUGDd4sBWK1euVLNmzRQUFFTimMDAQH3wwQfy8fHRs88+q4sXL5Y4tiyv9eeff14v\nvfSScnJyFBISoqSkJO3bt0+NGzfWs88+q9zcXNd8Y8eO1R/+8AfNnDlTx48fV48ePfTYY4/p22+/\ntW4RitGqVSsdPXpU27dvr9DjAAAA70bYCAAAKl1OTo5mzJihPn366PHHH1etWrXUvHlzvffeezp9\n+rTmzp1r2bF8fX1dZ5TdfvvtmjNnjrKysjR//nxL5u/evbsyMzM1ceJES+aDvS5evKgffvhBTZo0\nuebYdu3a6aWXXtKBAwc0duzYYsdcz2s9Pj5eoaGhioiI0KBBg3Tx4kUdOnRIknTp0iXNmTNHvXv3\nVt++fRUWFqYJEybIz8/Pstd0SZo2bSpJ2rFjR4UeBwAAeDfCRgAAUOl27typCxcu6J577nHbfu+9\n98rf39/tY85Wu+eeexQUFOT2EVagUHp6uowxpZ7VeLWpU6eqWbNmmj17tjZt2lRk/42+1v39/SXJ\ndWbj7t27lZ2drTvvvNM1JjAwUFFRURX+mi5ck8KzMQEAAIpD2AgAACpdRkaGJKlmzZpF9oWFhSkr\nK6tCjx8QEKBTp05V6DHgnS5duiRJ17xgSiGn06n58+fL4XBoyJAhysnJcdtv9Wu98OPaEyZMkMPh\ncN0OHjyo7Ozscs1VXoGBgZJ+WiMAAIDiEDYCAIBKFxYWJknFBi0ZGRmKjY2tsGPn5uZW+DHgvQoD\ntcLv4iyLdu3aadSoUdqzZ4+mTJnits/q13pERIQkaebMmTLGuN1SU1PLNVd5XblyRdJPawQAAFAc\nwkYAAFDp7rzzTtWsWbPIBS22bNmiK1eu6O6773Zt8/X1dbs4xo1av369jDFq27ZthR0D3isyMlIO\nh0Pnz58v1/2mTJmiW2+9VVu3bnXbXp7XelnUr19fTqdT27ZtK9f9rFC4JvXq1av0YwMAAO9B2AgA\nACqd0+nU6NGjtWzZMi1YsECZmZnasWOHhg0bpujoaA0dOtQ1Ni4uTmfPntXy5cuVm5urU6dO6eDB\ng0XmrF27to4dO6YDBw4oKyvLFR4WFBTo3LlzysvL03fffaeRI0eqQYMGeuqppyw5xurVqxUaGqpp\n06ZZv1CodEFBQWrcuLGOHDlSrvsVfpy6Ro0aRbaX9bVe1uM8/fTTWrhwoebMmaPMzEzl5+fryJEj\nOn78uCRp0KBBqlevntLS0so197UUrknz5s0tnRcAAFQthI0AAMAWr7/+ut58801NnjxZdevW1X33\n3aebb75Z69evV3BwsGvc888/r86dO+vRRx9Vs2bNNGXKFNfHONu1a6fDhw9LkoYNG6bIyEjdfvvt\nevjhh3X27FlJP36/XPPmzRUYGKiOHTvqlltu0Zdffun2nXw3egxULd27d9fOnTvdvn/xk08+UVxc\nnPbt26d7771XL774YpH7tW3bVqNGjSqyvSyv9Tlz5mjmzJmSpLvuukv79+/Xn//8Z40ePVqS9NBD\nD2nPnj2SpFmzZumll17S22+/rTp16ig6OlojR47UuXPnJP34cef09HStWLGi1Mf5zTffqEOHDrrp\nppu0ZcsWbd++XdHR0Wrfvr02btxYZPy//vUvxcTE6K677irLMgIAgGrKYYwxdhcBAKje+vfvL0la\nvHixzZXgeiUnJ2vgwIHytLcVzz33nBYvXqwzZ87YXUqxHA6HkpKSNGDAALtLwVX27t2r2267TfPn\nz9fjjz9udznlVlBQoF/+8pd66qmnNGTIEEvmPHPmjGJjYzV16lRXAArPwu8TAICHWMyZjQAAoEor\nz4U+AOnHj9VPnjxZkydP1oULF+wup1zy8/O1fPlyZWVladCgQZbNO2nSJLVs2VLDhw+3bE4AAFA1\nETYCAAAAPzNu3Dj1799fgwYNKvfFYuy0fv16LV26VKtXr1ZQUJAlc86YMUPbtm3TqlWr5OfnZ8mc\nAACg6iJsBAAAVdL48eM1f/58nT9/Xo0aNdKSJUvsLgleZtq0aRo+fLh+//vf211KmXXp0kV/+9vf\nFBUVZcl8K1as0OXLl7V+/XqFh4dbMicAAKjafO0uAAAAoCK8+eabevPNN+0uA14uISFBCQkJdpdh\nm169eqlXr152lwEAALwIZzYCAAAAAAAAsARhIwAAAAAAAABLEDYCAAAAAAAAsARhIwAAAAAAAABL\nEDYCAAAAAAAAsARXowYAeIQlS5bI4XDYXQZuEM9h+Q0cOFADBw60uwwAAADAEg5jjLG7CABA9Zaa\nmqrDhw/bXQaAEqSmpmrWrFlKSkqyuxQApYiPj1dsbKzdZQAAqrfFhI0AAAAoVXJysgYOHCjeNgIA\nAOAaFvOdjQAAAAAAAAAsQdgIAAAAAAAAwBKEjQAAAAAAAAAsQdgIAAAAAAAAwBKEjQAAAAAAAAAs\nQdgIAAAAAAAAwBKEjQAAAAAAAAAsQdgIAAAAAAAAwBKEjQAAAAAAAAAsQdgIAAAAAAAAwBKEjQAA\nAAAAAAAsQdgIAAAAAAAAwBKEjQAAAAAAAAAsQdgIAAAAAAAAwBKEjQAAAAAAAAAsQdgIAAAAAAAA\nwBKEjQAAAAAAAAAsQdgIAAAAAAAAwBKEjQAAAAAAAAAsQdgIAAAAAAAAwBKEjQAAAAAAAAAsQdgI\nAAAAAAAAwBKEjQAAAAAAAAAsQdgIAAAAAAAAwBKEjQAAAAAAAAAsQdgIAAAAAAAAwBKEjQAAAAAA\nAAAsQdgIAAAAAAAAwBKEjQAAAAAAAAAsQdgIAAAAAAAAwBKEjQAAAAAAAAAsQdgIAAAAAAAAwBK+\ndhcAAAAAz3Hq1Cl98sknbtu+/fZbSdLcuXPdtoeEhOjRRx+ttNoAAADg+RzGGGN3EQAAAPAMly9f\nVmRkpC5cuKAaNWpIkgrfLjocDte43NxcPfnkk/rggw/sKBMAAACeaTEfowYAAIBLQECA+vXrJ19f\nX+Xm5io3N1d5eXnKy8tz/Ts3N1eS9Nhjj9lcLQAAADwNYSMAAADcPPbYY7py5UqpY8LCwnT//fdX\nUkUAAADwFoSNAAAAcNO5c2dFRESUuN/Pz0+PP/64fH35+m8AAAC4I2wEAACAGx8fHw0ePFh+fn7F\n7s/NzeXCMAAAACgWYSMAAACKePTRR13fzfhzN910k9q1a1fJFQEAAMAbEDYCAACgiF/84hdq2LBh\nke3+/v568skn3a5MDQAAABQibAQAAECxnnjiiSIfpb5y5QofoQYAAECJCBsBAABQrMGDBxf5KHVc\nXJyaN29uU0UAAADwdISNAAAAKNatt96q22+/3fWRaT8/Pz399NM2VwUAAABPRtgIAACAEv36179W\njRo1JEl5eXl8hBoAAAClImwEAABAiR599FHl5+dLklq3bq1GjRrZXBEAAAA8GWEjAAAAStSgQQO1\nadNGkvTkk0/aXA0AAAA8na/dBQAAUNXNmDFDqampdpcBXLfLly/L4XBo7dq12rhxo93lANdt1KhR\nateund1lAABQpXFmIwAAFSw1NVXffPON3WUA1y02NpyGRIMAACAASURBVFb16tWT0+mUJB05ckRL\nliyxuSrvs2TJEh05csTuMqqtJUuW6PDhw3aXAQBAlceZjQAAVIK2bdtq8eLFdpcBXLe9e/cqLi5O\nkpScnKyBAwfymi4nh8Ohl156SQMGDLC7lGqp8KrqAACgYnFmIwAAAK6pMGgEAAAASkPYCAAAAAAA\nAMAShI0AAAAAAAAALEHYCAAAAAAAAMAShI0AAAAAAAAALEHYCAAAAFusWrVKtWrV0meffWZ3KR4v\nJSVF48aN09KlS9W4cWM5HA45HA498cQTRcYmJCQoJCRENWrU0B133KG0tDQbKi6/goICzZw5U/Hx\n8UX2ffrpp3r77beVn59vQ2UAAKA8CBsBAABgC2OM3SV4hddff12JiYkaP368+vbtq/3796tJkyaq\nU6eOFixYoJUrV7qNX7t2rRYvXqwePXpo586dat26tU2Vl92ePXvUqVMnjRo1StnZ2UX29+zZU06n\nU126dFFGRoYNFQIAgLIibAQAAIAtunfvrvPnz6tHjx52l6KcnJxiz6iz21tvvaVFixYpOTlZISEh\nbvsSExPl4+OjoUOH6vz58zZVeOO2b9+usWPHatiwYWrZsmWJ40aMGKEWLVro4YcfVl5eXiVWCAAA\nyoOwEQAAANXevHnzlJ6ebncZbvbu3auJEyfqjTfekNPpLLI/Pj5eI0eO1NGjR/Xyyy/bUKE1WrRo\noaVLl2rw4MEKCAgodeykSZO0bds2zZo1q5KqAwAA5UXYCAAAgEq3adMmNWjQQA6HQ++++64kac6c\nOQoODlZQUJBWrFihbt26KTQ0VLGxsVq4cKHrvomJiXI6nYqMjNRzzz2n6OhoOZ1OxcfHa8uWLa5x\nw4cPl7+/v6KiolzbXnjhBQUHB8vhcOj06dOSpJEjR2r06NHat2+fHA6H4uLiJElr1qxRaGiopk2b\nVhlLUkRiYqKMMerZs2eJY6ZOnapbbrlF77//vlJSUkqdzxijGTNm6LbbblNAQIDCw8P1yCOPaNeu\nXa4xZX0OJCk/P1//r707D6rqzPM//rkqcFkFjQoJGgPELaIm6q8Etzh2nEQHjQuC0STGbkcxFWRJ\nT8BoXBCjyYxQttIZkwyZbqMiarS71HTKmhBjxdBJo2jTpQMoikvEjUVBZTm/Pyxu9x1UuHrhQny/\nqs4fOec5z/O9zxOrqE+dc5733ntPPXr0kKurqwYMGKCMjIyH+9GN8PHx0ejRo5Wamspr+AAAtFKE\njQAAAGhxI0aM0HfffWd1bsGCBYqNjVVVVZU8PT2VkZGhwsJCBQQEaO7cuaqurpZ0J0ScPXu2Kisr\ntXDhQhUVFSknJ0c1NTV64YUXVFxcLOlOWDd9+nSrMTZs2KDly5dbnUtNTVVYWJgCAwNlGIYKCgok\nybIZSV1dXbPMQWP27Nmj3r17y83N7Z5tXF1d9dlnn6ldu3aaO3eubty4cc+2y5YtU2Jiot59912V\nlJTowIEDKi4u1siRI3Xx4kVJTV8DSUpISNAHH3yglJQUXbhwQWFhYXrllVf0448/2m8S7uLZZ5/V\nuXPnlJub26zjAACAB0PYCAAAgFYnNDRUXl5e6tKliyIjI3Xjxg2dOXPGqk2HDh0sT+n169dPaWlp\nqqioUHp6ul1qmDBhgsrLy7VkyRK79GeLGzdu6NSpUwoMDGy0bUhIiGJjY1VUVKSEhIS7tqmqqtLa\ntWs1ZcoUzZo1Sx07dlRwcLA++ugjXb58WRs3bmxwz/3W4ObNm0pLS9PkyZM1depUeXt7a/HixXJy\ncrLb/N/L008/LUk6duxYs44DAAAeDGEjAAAAWjVnZ2dJsnqq7m6GDBkiNzc3q9eC26qSkhIZhnHf\npxr/0cqVK9W7d29t2LBBBw8ebHA9Ly9P169f15AhQ6zODx06VM7Ozlavn9/N/12DEydOqLKyUv37\n97e0cXV1la+vb7PPf/2c1D+NCQAAWhfCRgAAAPxsuLi46NKlS44u46HdvHlTkhrdMKWe2WxWenq6\nTCaT5syZo6qqKqvrpaWlkiQPD48G93p7e6uiosKm+upf1168eLFMJpPlOH36tCorK23qy1aurq6S\n/j5HAACgdSFsBAAAwM9CdXW1SktL5e/v7+hSHlp9oFb/3cimCAkJUVxcnPLz85WUlGR1zdvbW5Lu\nGio+yJx16dJFkpSSkiLDMKyOQ4cO2dSXrW7fvi3p73MEAABaF8JGAAAA/CxkZWXJMAwNGzbMcq5D\nhw6Nvn7dGnXt2lUmk0llZWU23ZeUlKQ+ffro8OHDVuf79+8vDw+PBpu3ZGdn6/bt2xo8eLBN43Tv\n3l1ms1lHjhyx6T57qJ+Tbt26tfjYAACgcYSNAAAAaJPq6up07do11dTU6OjRo4qJiVGPHj00e/Zs\nS5ugoCBdvXpVu3btUnV1tS5duqTTp0836KtTp046f/68ioqKVFFRoerqau3bt09eXl5KTk5uwV91\nh5ubmwICAnT27Fmb7qt/nbp9+/YNzsfHx2vnzp3atGmTysvLdezYMUVFRcnPz0/z5s2zeZw33nhD\nW7ZsUVpamsrLy1VbW6uzZ8/qwoULkqTIyEh169ZNOTk5NvXdmPo5CQ4Otmu/AADAPggbAQAA0OLW\nr1+voUOHSpLeeecdTZo0SWlpaUpJSZEkDRgwQCdPntTHH3+s+Ph4SdKLL76o/Px8Sx83b95UcHCw\nXF1dNXLkSPXq1Utff/211XcOFyxYoDFjxmjGjBnq3bu3kpKSLK/fhoSEqLi4WJIUFRWlrl27ql+/\nfho/fryuXr3aIvNwPxMmTFBeXp7V9xe/+OILBQUFqbCwUEOHDtVbb73V4L5hw4YpLi6uwfmlS5dq\n1apVWrFihR577DGNHj1aPXv2VFZWltzd3SXJpjVITU1VbGys1qxZo86dO8vPz08xMTG6du2apDuv\nO5eUlGj37t33/Z3ff/+9RowYoccff1zZ2dnKzc2Vn5+fhg8frgMHDjRo/8MPP+iJJ57QgAEDmjKN\nAACghZkMwzAcXQQAAD9n4eHhkqTMzEwHVwLYx7Zt2xQRESFH/hk5f/58ZWZm6sqVKw6rwVYmk0kZ\nGRmaPn16k9oXFBSob9++Sk9P16xZs5q5Ovurq6vT888/r9mzZ2vOnDl26fPKlSvy9/fXypUrLQFo\nU9k6/wAA4IFk8mQjAAAA2iRbNk9pi4KCgrRixQqtWLFC169fd3Q5NqmtrdWuXbtUUVGhyMhIu/W7\nbNkyDRo0SNHR0XbrEwAA2BdhIwAAANBKJSYmKjw8XJGRkTZvFuNIWVlZ2rFjh/bt2yc3Nze79Ll2\n7VodOXJEe/fulZOTk136BAAA9kfYCABAK/Pv//7vlp1oP/roo2Yfb+jQoWrfvr0GDRpk872/+tWv\n5OnpKZPJ1Ky70tbV1SklJUWhoaF3vb5mzRr16dNHrq6ucnd3V58+fbRkyRKVl5fbPNaOHTsUEBAg\nk8kkk8kkX19fh73C2hbWxhEWLVqk9PR0lZWV6amnntL27dsdXVKzSk5OVnR0tN5//31Hl9JkY8eO\n1eeffy5fX1+79Ld7927dunVLWVlZ8vHxsUufAACgeRA2AgDQyrz99tv67rvvWmy8H374QWPGjHmg\nez/55BN9/PHHdq7IWn5+vkaNGqW4uDhVVlbetc23336ruXPn6syZM7p48aKSkpK0Zs0aTZs2zebx\npk6dqpMnTyowMFAdO3bUTz/9pE2bNj3sz3ggrX1tHGXVqlW6deuWDMPQqVOnHmid25px48Zp9erV\nji7DYSZNmqTExMQGu2wDAIDWp4OjCwAAAK2DyWRydAkN5ObmasWKFYqKitKNGzfuuSGJs7Oz3nzz\nTZnNZkl3NuXJzMxUZmamLly4ID8/v5Ys2+5a49oAAAAAd8OTjQAAQJIe+BtozRmEDRw4UDt27NDM\nmTPl4uJyz3Y7d+60BI31nnjiCUlqcxtr3E1rXBsAAADgbggbAQBoI7799lv169dPHTt2lNlsVnBw\nsP70pz9JklJTU+Xu7q527dpp8ODB6tatm5ycnOTu7q7nnntOI0eOVPfu3WU2m+Xt7a1/+7d/a9B/\nQUGB+vTpI3d3d7m6umrkyJE6ePCgVRvDMPThhx+qd+/ecnFxUceOHfXrX//aplpbSn5+vry9vfXk\nk09azn355Zfy8vJScnKyXcdibQAAAIA7CBsBAGgjLl68qIiICBUVFen8+fPy8PDQzJkzJUkxMTH6\n9a9/LcMw9Nvf/lanTp3STz/9pFGjRunw4cNKTEzU4cOHdfXqVb3++uv68MMPlZuba9W/j4+Pvvzy\nS5WVlenHH39UdXW1XnjhBeXn51vaLFmyRO+8847mzZunixcv6qefflJCQoJNtTan6upqnTt3TuvX\nr9f+/fv1m9/8Rs7OzpbrtbW1ku5sOGNPrA0AAABwB2EjAABtxLRp07R06VL5+PioU6dOmjhxoq5c\nuaJLly5ZtevXr5/c3NzUuXNnzZgxQ5LUo0cPPfbYY3Jzc7PsrHz8+HGr+zw9PdWzZ0916NBBzzzz\njD7++GPdvHlTGzdulCRVVVUpJSVFv/jFLxQXFydvb2+5urqqU6dOD1yrvXXv3l3+/v5atmyZPvjg\nA0VERFhdnzBhgsrLy7VkyRK7jsvaAAAAAHcQNgIA0EbVf8ev/mm9u6l/qq+mpqbBfdXV1fftPzg4\nWB07dtTRo0cl3XmVt7KyUmPHjm2WWu2huLhYJSUl2rx5s/77v/9bzz77rEpKSpp1zLt5VNbGZDJx\n2HBIUkREhMPreFQPAADQMtiNGgCANmLPnj368MMPlZeXp/Ly8kYDKXtwcnKyjHP27FlJUpcuXRq9\nzxG1Snfq7dKli8aNG6ennnpKvXr10qpVq5Samtqs4z6qa5ORkfHA9z6KIiIiFBMTo5CQEEeX8kj6\nv086AwCA5kHYCABAG3DmzBlNnjxZU6ZM0X/913/p8ccf129+85u7biZiLzU1Nbp69ap69OghSZbd\nnm/dutXqar2boKAgtW/fXnl5eXbv+8CBA/rLX/6i2NjYR3ptpk+f/kD3PaoiIiIUEhLCvDkIYSMA\nAC2D16gBAGgDjh07purqai1YsEABAQEym83N/lrg119/rbq6Oj333HOSpP79+6tdu3b65ptvWlWt\nV65c0SuvvNLgfH5+vmpra9W9e3e7j/mXv/xF7u7uklgbAAAA4B8RNgIA0AbUP8G2f/9+3bx5U/n5\n+crOzrbrGLdv31ZZWZlqamqUk5Oj6OhoPfnkk5o9e7akO6/oTp06Vdu3b9enn36q8vJyHT161LJJ\nSUvW+o/c3d311Vdf6X/+538srwUfPnxYr7/+utzd3RUXF2dpu2/fPnl5eSk5OfmBxqqurtbFixeV\nlZVlCRtZGwAAAOAfGAAAoFlNmzbNmDZtWpPb/8d//IfRrVs3Q5Lh7u5uTJkyxTAMw3jnnXeMTp06\nGd7e3kZ4eLixfv16Q5IRGBhoxMfHG25uboYko2fPnsa3335rrF692ujYsaMhyejWrZvx+eefG1u3\nbrX07ePjY2zZssUwDMNIT083xowZY3Tt2tXo0KGD0blzZ2PGjBnG6dOnrWqrqKgwfvWrXxmdO3c2\nPDw8jBEjRhjvvfeeIcnw9/c3cnNzG631zJkzTZ6LQ4cOGcOHDzf8/PwMSYYkw9fX1wgNDTW++eYb\nS7uJEycaTz31lOHh4WG4uLgYgYGBRmRkpHHs2DGr/vbu3Wt4enoaK1euvOeYO3fuNAIDAy3j3evY\nuXOn5Z5HbW0yMjIM/oy0nSQjIyPD0WU8sph/AABaxDaTYRhGSwWbAAA8isLDwyVJmZmZDq4EsI9t\n27YpIiJC/BlpG5PJpIyMDL7Z6CDMPwAALSKT16gBAAAAAAAA2AVhIwAAaDHHjx+XyWRq9IiMjHR0\nqUCrsn//fiUmJmrHjh0KCAiw/Ft59dVXG7QdN26cPD091b59ez3zzDPKyclxQMW2q6urU0pKikJD\nQxtc+8Mf/qA1a9aotrbWAZUBAABbEDYCAIAW06dPHxmG0eixdetWR5cKtBpLly7VunXrtGjRIk2d\nOlUnT55UYGCgOnfurE2bNmnPnj1W7b/66itlZmYqLCxMeXl5ll3LW7P8/HyNGjVKcXFxqqysbHB9\n4sSJMpvNGjt2rEpLSx1QIQAAaCrCRgAAALQ5VVVVd30Crq2N0ZjVq1dr69at2rZtmzw9Pa2urVu3\nTu3atdO8efNUVlbmoAofXm5urhISEhQVFaVBgwbds93ChQs1cOBAjR8/XjU1NS1YIQAAsAVhIwAA\nANqcTz/9VCUlJW1+jPspKCjQkiVLtHz5cpnN5gbXQ0NDFRMTo3Pnzuntt992QIX2MXDgQO3YsUMz\nZ86Ui4vLfdsuW7ZMR44cUWpqagtVBwAAbEXYCAAAgGZnGIbWrl2rvn37ysXFRT4+Pnr55Zd1/Phx\nS5vo6Gg5OzvL19fXcu7NN9+Uu7u7TCaTLl++LEmKiYlRfHy8CgsLZTKZFBQUpHXr1slsNqtr166a\nP3++/Pz8ZDabFRoaquzsbLuMIUlffvmlvLy8lJyc3KzzJd15ctEwDE2cOPGebVauXKlevXrpk08+\n0f79++/bX1PWIC0tTe7u7nJzc9Pu3bv10ksvycvLS/7+/tqyZYtVf7W1tXrvvffUo0cPubq6asCA\nAcrIyHi4H90IHx8fjR49WqmpqeyGDgBAK0XYCAAAgGa3bNkyJSYm6t1331VJSYkOHDig4uJijRw5\nUhcvXpR0J1ybPn261X0bNmzQ8uXLrc6lpqYqLCxMgYGBMgxDBQUFio6O1uzZs1VZWamFCxeqqKhI\nOTk5qqmp0QsvvKDi4uKHHkOSZYOSuro6+03OPezZs0e9e/eWm5vbPdu4urrqs88+U7t27TR37lzd\nuHHjnm2bsgYLFixQbGysqqqq5OnpqYyMDBUWFiogIEBz585VdXW1pb+EhAR98MEHSklJ0YULFxQW\nFqZXXnlFP/74o/0m4S6effZZnTt3Trm5uc06DgAAeDCEjQAAAGhWVVVVWrt2raZMmaJZs2apY8eO\nCg4O1kcffaTLly9r48aNdhurQ4cOlif3+vXrp7S0NFVUVCg9Pd0u/U+YMEHl5eVasmSJXfq7lxs3\nbujUqVMKDAxstG1ISIhiY2NVVFSkhISEu7Z5kDUIDQ2Vl5eXunTposjISN24cUNnzpyRJN28eVNp\naWmaPHmypk6dKm9vby1evFhOTk52m+t7efrppyVJx44da9ZxAADAgyFsBAAAQLPKy8vT9evXNWTI\nEKvzQ4cOlbOzs9VrzvY2ZMgQubm5Wb0q3BaUlJTIMIz7PtX4j1auXKnevXtrw4YNOnjwYIPrD7sG\nzs7OkmR5svHEiROqrKxU//79LW1cXV3l6+vb7HNdPyf1T2MCAIDWhbARAAAAzaq0tFSS5OHh0eCa\nt7e3KioqmnV8FxcXXbp0qVnHsLebN29KUqMbptQzm81KT0+XyWTSnDlzVFVVZXXd3mtQ/7r24sWL\nZTKZLMfp06dVWVlpU1+2cnV1lfT3OQIAAK0LYSMAAACalbe3tyTdNdAqLS2Vv79/s41dXV3d7GM0\nh/pArf4bkU0REhKiuLg45efnKykpyeqavdegS5cukqSUlBQZhmF1HDp0yKa+bHX79m1Jf58jAADQ\nuhA2AgAAoFn1799fHh4eDTYOyc7O1u3btzV48GDLuQ4dOlhtQvKwsrKyZBiGhg0b1mxjNIeuXbvK\nZDKprKzMpvuSkpLUp08fHT582Oq8LWvQFN27d5fZbNaRI0dsus8e6uekW7duLT42AABoHGEjAAAA\nmpXZbFZ8fLx27typTZs2qby8XMeOHVNUVJT8/Pw0b948S9ugoCBdvXpVu3btUnV1tS5duqTTp083\n6LNTp046f/68ioqKVFFRYQkP6+rqdO3aNdXU1Ojo0aOKiYlRjx49NHv2bLuMsW/fPnl5eSk5Odn+\nE/UP3NzcFBAQoLNnz9p0X/3r1O3bt29wvqlr0NRx3njjDW3ZskVpaWkqLy9XbW2tzp49qwsXLkiS\nIiMj1a1bN+Xk5NjUd2Pq5yQ4ONiu/QIAAPsgbAQAAECzW7p0qVatWqUVK1boscce0+jRo9WzZ09l\nZWXJ3d3d0m7BggUaM2aMZsyYod69eyspKcnyumxISIiKi4slSVFRUeratav69eun8ePH6+rVq5Lu\nfMcvODhYrq6uGjlypHr16qWvv/7a6tuHDztGS5kwYYLy8vKsvr/4xRdfKCgoSIWFhRo6dKjeeuut\nBvcNGzZMcXFxDc43ZQ3S0tKUkpIiSRowYIBOnjypjz/+WPHx8ZKkF198Ufn5+ZKk1NRUxcbGas2a\nNercubP8/PwUExOja9euSbrzunNJSYl2795939/5/fffa8SIEXr88ceVnZ2t3Nxc+fn5afjw4Tpw\n4ECD9j/88IOeeOIJDRgwoCnTCAAAWpjJMAzD0UUAAPBzFh4eLknKzMx0cCWAfWzbtk0RERFqbX9G\nzp8/X5mZmbpy5YqjS7krk8mkjIwMTZ8+vUntCwoK1LdvX6Wnp2vWrFnNXJ391dXV6fnnn9fs2bM1\nZ84cu/R55coV+fv7a+XKlZYAtKlsnX8AAPBAMnmyEQAAAD8btmyo0toFBQVpxYoVWrFiha5fv+7o\ncmxSW1urXbt2qaKiQpGRkXbrd9myZRo0aJCio6Pt1icAALAvwkYAAACglUpMTFR4eLgiIyNt3izG\nkbKysrRjxw7t27dPbm5udulz7dq1OnLkiPbu3SsnJye79AkAAOyPsBEAAABt3qJFi5Senq6ysjI9\n9dRT2r59u6NLspvk5GRFR0fr/fffd3QpTTZ27Fh9/vnn8vX1tUt/u3fv1q1bt5SVlSUfHx+79AkA\nAJpHB0cXAAAAADysVatWadWqVY4uo9mMGzdO48aNc3QZDjNp0iRNmjTJ0WUAAIAm4MlGAAAAAAAA\nAHZB2AgAAAAAAADALggbAQAAAAAAANgFYSMAAAAAAAAAu2CDGAAAWsDZs2e1bds2R5cB2MWhQ4ck\nif+nH0D93AEAAPxcmQzDMBxdBAAAP2fh4eHavn27o8sAgEdeRkaGpk+f7ugyAAD4OcskbAQAAMB9\nbdu2TREREeLPRgAAADQik282AgAAAAAAALALwkYAAAAAAAAAdkHYCAAAAAAAAMAuCBsBAAAAAAAA\n2AVhIwAAAAAAAAC7IGwEAAAAAAAAYBeEjQAAAAAAAADsgrARAAAAAAAAgF0QNgIAAAAAAACwC8JG\nAAAAAAAAAHZB2AgAAAAAAADALggbAQAAAAAAANgFYSMAAAAAAAAAuyBsBAAAAAAAAGAXhI0AAAAA\nAAAA7IKwEQAAAAAAAIBdEDYCAAAAAAAAsAvCRgAAAAAAAAB2QdgIAAAAAAAAwC4IGwEAAAAAAADY\nBWEjAAAAAAAAALsgbAQAAAAAAABgF4SNAAAAAAAAAOyCsBEAAAAAAACAXRA2AgAAAAAAALALwkYA\nAAAAAAAAdkHYCAAAAAAAAMAuCBsBAAAAAAAA2AVhIwAAAAAAAAC7IGwEAAAAAAAAYBeEjQAAAAAA\nAADsgrARAAAAAAAAgF0QNgIAAAAAAACwiw6OLgAAAACtx9mzZ/X666+rtrbWcu7atWvy9PTU888/\nb9W2d+/e+s///M8WrhAAAACtGWEjAAAALPz9/XX69GkVFhY2uPbNN99Y/feoUaNaqiwAAAC0EbxG\nDQAAACuvvfaanJycGm0XGRnZAtUAAACgLSFsBAAAgJWZM2eqpqbmvm2eeeYZ9evXr4UqAgAAQFtB\n2AgAAAArgYGBGjBggEwm012vOzk56fXXX2/hqgAAANAWEDYCAACggddee03t27e/67WamhqFh4e3\ncEUAAABoCwgbAQAA0MCMGTNUV1fX4Hy7du00bNgw9ezZs+WLAgAAQKtH2AgAAIAG/Pz8NHz4cLVr\nZ/3nYrt27fTaa685qCoAAAC0doSNAAAAuKtXX321wTnDMDRlyhQHVAMAAIC2gLARAAAAdzVt2jSr\n7za2b99ev/jFL9S1a1cHVgUAAIDWjLARAAAAd+Xj46MXXnjBEjgahqFZs2Y5uCoAAAC0ZoSNAAAA\nuKdZs2ZZNopxcnLSyy+/7OCKAAAA0JoRNgIAAOCeJk6cKBcXF0lSWFiYPDw8HFwRAAAAWjPCRgAA\nANyTu7u75WlGXqEGAABAY0yGYRiOLgIAgLYkPDxc27dvd3QZAIBmlpGRoenTpzu6DAAA2pLMDo6u\nAACAtmjYsGGKjY11dBlAi6itrVVGRoZeeeWVB7r/0KFDSk1NVUZGhp0r+3mLiIhQTEyMQkJCHF3K\nIykiIsLRJQAA0CYRNgIA8AD8/f152gWPlMmTJ8tsNj/w/ampqfybsVFERIRCQkKYNwchbAQA4MHw\nzUYAAAA06mGCRgAAADw6CBsBAAAAAAAA2AVhIwAAAAAAAAC7IGwEAAAAAAAAYBeEjQAAAAAAAADs\ngrARAAAAbcLevXvVsWNH/fGPf3R0Ka3e/v37lZiYqB07diggIEAmk0kmk0mvvvpqg7bjxo2Tp6en\n2rdvr2eeeUY5OTkOqNh2dXV1SklJUWhoaINrf/jDH7RmzRrV1tY6oDIAAB5thI0AAABoEwzDcHQJ\nbcLSpUu1bt06LVq0SFOnTtXJkycVGBiozp07a9OmTdqzZ49V+6+++kqZmZkKCwtTXl6ennvuOQdV\n3nT5+fkaNWqU4uLiVFlZ2eD6xIkTZTabNXbsWJWWljqgQgAAHl2EjQAAAGgTJkyYoLKyMoWFhTm6\nFFVVVd31iTpHW716tbZu3apt27bJ09PT6tq6devUrl07zZs3T2VlZQ6q8OHl5uYqISFBUVFRGjRo\n0D3bLVy4UAMHDtT48eNVU1PTghUCAPBoI2wEAAAAbPTpp5+qpKTE0WVYKSgo0JIlS7R8+XKZzeYG\n10NDQxUTE6Nz587p7bffdkCF9jFw4EDt2LFDICEcRAAAGO9JREFUM2fOlIuLy33bLlu2TEeOHFFq\namoLVQcAAAgbAQAA0OodPHhQPXr0kMlk0vr16yVJaWlpcnd3l5ubm3bv3q2XXnpJXl5e8vf315Yt\nWyz3rlu3TmazWV27dtX8+fPl5+cns9ms0NBQZWdnW9pFR0fL2dlZvr6+lnNvvvmm3N3dZTKZdPny\nZUlSTEyM4uPjVVhYKJPJpKCgIEnSl19+KS8vLyUnJ7fElDSwbt06GYahiRMn3rPNypUr1atXL33y\nySfav3//ffszDENr165V37595eLiIh8fH7388ss6fvy4pU1T10CSamtr9d5776lHjx5ydXXVgAED\nlJGR8XA/uhE+Pj4aPXq0UlNTeQ0fAIAWQtgIAACAVm/EiBH67rvvrM4tWLBAsbGxqqqqkqenpzIy\nMlRYWKiAgADNnTtX1dXVku6EiLNnz1ZlZaUWLlyooqIi5eTkqKamRi+88IKKi4sl3Qnrpk+fbjXG\nhg0btHz5cqtzqampCgsLU2BgoAzDUEFBgSRZNiOpq6trljlozJ49e9S7d2+5ubnds42rq6s+++wz\ntWvXTnPnztWNGzfu2XbZsmVKTEzUu+++q5KSEh04cEDFxcUaOXKkLl68KKnpayBJCQkJ+uCDD5SS\nkqILFy4oLCxMr7zyin788Uf7TcJdPPvsszp37pxyc3ObdRwAAHAHYSMAAADavNDQUHl5ealLly6K\njIzUjRs3dObMGas2HTp0sDyl169fP6WlpamiokLp6el2qWHChAkqLy/XkiVL7NKfLW7cuKFTp04p\nMDCw0bYhISGKjY1VUVGREhIS7tqmqqpKa9eu1ZQpUzRr1ix17NhRwcHB+uijj3T58mVt3LixwT33\nW4ObN28qLS1NkydP1tSpU+Xt7a3FixfLycnJbvN/L08//bQk6dixY806DgAAuIOwEQAAAD8rzs7O\nkmT1VN3dDBkyRG5ublavBbdVJSUlMgzjvk81/qOVK1eqd+/e2rBhgw4ePNjgel5enq5fv64hQ4ZY\nnR86dKicnZ2tXj+/m/+7BidOnFBlZaX69+9vaePq6ipfX99mn//6Oal/GhMAADQvwkYAAAA8slxc\nXHTp0iVHl/HQbt68KUmNbphSz2w2Kz09XSaTSXPmzFFVVZXV9dLSUkmSh4dHg3u9vb1VUVFhU331\nr2svXrxYJpPJcpw+fVqVlZU29WUrV1dXSX+fIwAA0LwIGwEAAPBIqq6uVmlpqfz9/R1dykOrD9Tq\nvxvZFCEhIYqLi1N+fr6SkpKsrnl7e0vSXUPFB5mzLl26SJJSUlJkGIbVcejQIZv6stXt27cl/X2O\nAABA8yJsBAAAwCMpKytLhmFo2LBhlnMdOnRo9PXr1qhr164ymUwqKyuz6b6kpCT16dNHhw8ftjrf\nv39/eXh4NNi8JTs7W7dv39bgwYNtGqd79+4ym806cuSITffZQ/2cdOvWrcXHBgDgUUTYCAAAgEdC\nXV2drl27ppqaGh09elQxMTHq0aOHZs+ebWkTFBSkq1evateuXaqurtalS5d0+vTpBn116tRJ58+f\nV1FRkSoqKlRdXa19+/bJy8tLycnJLfir7nBzc1NAQIDOnj1r0331r1O3b9++wfn4+Hjt3LlTmzZt\nUnl5uY4dO6aoqCj5+flp3rx5No/zxhtvaMuWLUpLS1N5eblqa2t19uxZXbhwQZIUGRmpbt26KScn\nx6a+G1M/J8HBwXbtFwAA3B1hIwAAAFq99evXa+jQoZKkd955R5MmTVJaWppSUlIkSQMGDNDJkyf1\n8ccfKz4+XpL04osvKj8/39LHzZs3FRwcLFdXV40cOVK9evXS119/bfWdwwULFmjMmDGaMWOGevfu\nraSkJMvrtyEhISouLpYkRUVFqWvXrurXr5/Gjx+vq1evtsg83M+ECROUl5dn9f3FL774QkFBQSos\nLNTQoUP11ltvNbhv2LBhiouLa3B+6dKlWrVqlVasWKHHHntMo0ePVs+ePZWVlSV3d3dJsmkNUlNT\nFRsbqzVr1qhz587y8/NTTEyMrl27JunO684lJSXavXv3fX/n999/rxEjRujxxx9Xdna2cnNz5efn\np+HDh+vAgQMN2v/www964oknNGDAgKZMIwAAeEgmwzAMRxcBAEBbEh4eLknKzMx0cCVA27Bt2zZF\nRETIkX92zp8/X5mZmbpy5YrDarCVyWRSRkaGpk+f3qT2BQUF6tu3r9LT0zVr1qxmrs7+6urq9Pzz\nz2v27NmaM2eOXfq8cuWK/P39tXLlSksA2lS2zj8AAJAkZfJkIwAAAB4Jtmye0hYFBQVpxYoVWrFi\nha5fv+7ocmxSW1urXbt2qaKiQpGRkXbrd9myZRo0aJCio6Pt1icAALg/wkYAANqovXv3qmPHjvrj\nH//4sxyvKerq6pSSkqLQ0FC79bl582aZTCa79lmPNUNzS0xMVHh4uCIjI23eLMaRsrKytGPHDu3b\nt09ubm526XPt2rU6cuSI9u7dKycnJ7v0CQAAGkfYCABAG9XSr6S2ti+v5Ofna9SoUYqLi1NlZaXd\n+t28ebMCAwN16NAhFRQU2K1fiTVzlEWLFik9PV1lZWV66qmntH37dkeX1KySk5MVHR2t999/39Gl\nNNnYsWP1+eefy9fX1y797d69W7du3VJWVpZ8fHzs0icAAGgawkYAANqAqqqqBk/aTZgwQWVlZQoL\nC2vz49kqNzdXCQkJioqK0qBBg+zW75UrV/S3v/1Ny5cvlyT97ne/e+C+WLPWY9WqVbp165YMw9Cp\nU6c0bdo0R5fU7MaNG6fVq1c7ugyHmTRpkhITExvssg0AAJofYSMAAG3Ap59+qpKSkp/teLYaOHCg\nduzYoZkzZ1rtJPywtm3bpgkTJmjixIkym836/e9//8BPB7JmAAAAeBQRNgIA0AK+/fZb9evXTx07\ndpTZbFZwcLD+9Kc/WbX5/e9/ryFDhshsNsvd3V09e/ZUUlKSYmJiFB8fr8LCQplMJgUFBengwYPq\n0aOHTCaT1q9fL0nq27evTCaT2rVrp8GDB1teLf63f/s3y7ifffZZo/U0dTzpzmu6a9euVd++feXi\n4iIfHx+9/PLLOn78uKVNWlqa3N3d5ebmpt27d+ull16Sl5eX/P39tWXLluacdn355Zfy8vJScnJy\nk9pv3rxZU6ZMkaenp8aNG6eioiJ9++2392zPmgEAAADWCBsBAGgBFy9eVEREhIqKinT+/Hl5eHho\n5syZluupqal67bXXNG3aNJ0/f15nz57VokWLdOLECaWmpiosLEyBgYEyDEMFBQUaMWKEvvvuO6sx\n/vrXv6pnz57q3r27/vznP1s2Wfjggw/0y1/+UqtXr9bs2bMbraep40l3dnpNTEzUu+++q5KSEh04\ncEDFxcUaOXKkLl68KElasGCBYmNjVVVVJU9PT2VkZKiwsFABAQGaO3euqqurm2PKJf199+G6urpG\n2545c0YnTpzQqFGjJEnh4eGS7v0qNWsGAAAANETYCABAC5g2bZqWLl0qHx8fderUSRMnTtSVK1d0\n6dIlVVdXa/ny5RozZowSEhLUqVMn+fj46Je//KWGDh3a5DHat2+vhQsX6syZM9q5c6flfGVlpXbs\n2KE5c+Y0qZ6mqqqq0tq1azVlyhTNmjVLHTt2VHBwsD766CNdvnxZGzdubHBPaGiovLy81KVLF0VG\nRurGjRs6c+ZMk8e01YQJE1ReXq4lS5Y02nbz5s36l3/5F8s33iZOnCgXFxdlZmaqqqrKqi1r1nxr\nBgAAgLatg6MLAADgUeTk5CTpzpN3R48eVWlpqf75n//Zqk19EGWLX/3qV1q2bJlSU1MtT+Zt2rRJ\nL7/8sry8vJpUT1Pl5eXp+vXrGjJkiNX5oUOHytnZWdnZ2fe939nZWZJazVNymzdv1qpVqyz/7eXl\npXHjxumPf/yjdu/ercjISMs11uzB1mzbtm0PdN+j7NChQ44uAQAAwCaEjQAAtIA9e/boww8/VF5e\nnsrLy63CmvLyckmSt7f3Q4/j4eGhf/3Xf9WHH36oP//5z/p//+//6be//a22b9/e5HqaqrS01DLm\n/+Xt7a2KiooH+xEO8Ne//lXHjh27567Nv/vd76zCRtbswURERDRr/z9HqampSk1NdXQZAAAATcZr\n1AAANLMzZ85o8uTJ8vX1VXZ2tsrKyrRmzRrL9ccff1ySdPnyZbuMFx0dLScnJ6WkpOjAgQPq3r27\nAgMDm1xPU9UHbXcLqEpLS+Xv7//gP6KFff7555oxY4YMw7A6rl69KldXV3311Vf66aefLO1Zswfz\nf+eX4/6HJGVkZDi8jkf1AAAAD4awEQCAZnbs2DFVV1drwYIFCggIkNlslslkslzv2bOnOnXqpK++\n+sou4/n7+2v69Onavn27lixZopiYGJvqaar+/fvLw8NDP/74o9X57Oxs3b59W4MHD36o39FSDMPQ\n1q1b9eabbza45uPjo/DwcNXW1mrz5s2W86wZAAAAcHeEjQAANLMePXpIkvbv36+bN28qPz/f6tt4\nLi4uWrRokQ4cOKDo6GidO3dOdXV1qqio0N/+9jdJUqdOnXT+/HkVFRWpoqKi0Vdo4+PjVVNTo2vX\nrumf/umfbKqnqeOZzWbFx8dr586d2rRpk8rLy3Xs2DFFRUXJz89P8+bNs32y7Gzfvn3y8vJScnLy\nPdt899138vLy0vDhw+96PSoqSpL1rtSsGQAAAHAPBgAAsMm0adOMadOm2XTPO++8Y3Tq1Mnw9vY2\nwsPDjfXr1xuSjMDAQOPMmTOGYRjG+vXrjeDgYMNsNhtms9l49tlnjQ0bNhiGYRg5OTnGk08+abi6\nuhojRowwFi9ebPj6+hqSDDc3N2PixIkNxhwzZozxySefPFA9TR2vrq7O+PDDD42nn37acHJyMnx8\nfIzJkycbJ06csIy1YcMGw83NzZBkPP3000ZhYaGxceNGw8vLy5BkPPnkk8b//u//2jSfhw4dMoYP\nH274+fkZkgxJhq+vrxEaGmp88803lnZ79+41PD09jZUrV961n1/+8peGu7u70aFDB2PgwIFGTk6O\n1fWkpCSrMZ544gnLmhgGa9ZUGRkZBn922k6SkZGR4egyHlnMPwAAD2SbyTD4IAkAALao3zE4MzPT\nwZUAbcO2bdsUERHBd/BsZDKZlJGRoenTpzu6lEcS8w8AwAPJ5DVqAAAAAAAAAHZB2AgAABzu+PHj\nMplMjR6RkZGOLhUAAADAfRA2AgAAh+vTp48Mw2j02Lp1q6NLBdqE/fv3KzExUTt27FBAQIAlsH/1\n1VcbtB03bpw8PT3Vvn17PfPMM8rJyXFAxbarq6tTSkqKQkNDG1z7wx/+oDVr1qi2ttYBlQEA8Ggj\nbAQAAAB+RpYuXap169Zp0aJFmjp1qk6ePKnAwEB17txZmzZt0p49e6zaf/XVV8rMzFRYWJjy8vL0\n3HPPOajypsvPz9eoUaMUFxenysrKBtcnTpwos9mssWPHqrS01AEVAgDw6CJsBAAAwM9eVVXVXZ+A\na2tjNGb16tXaunWrtm3bJk9PT6tr69atU7t27TRv3jyVlZU5qMKHl5ubq4SEBEVFRWnQoEH3bLdw\n4UINHDhQ48ePV01NTQtWCADAo42wEQAAAD97n376qUpKStr8GPdTUFCgJUuWaPny5TKbzQ2uh4aG\nKiYmRufOndPbb7/tgArtY+DAgdqxY4dmzpwpFxeX+7ZdtmyZjhw5otTU1BaqDgAAEDYCAACg1TEM\nQ2vXrlXfvn3l4uIiHx8fvfzyyzp+/LilTXR0tJydneXr62s59+abb8rd3V0mk0mXL1+WJMXExCg+\nPl6FhYUymUwKCgrSunXrZDab1bVrV82fP19+fn4ym80KDQ1Vdna2XcaQpC+//FJeXl5KTk5u1vmS\n7jy5aBiGJk6ceM82K1euVK9evfTJJ59o//799+2vKWuQlpYmd3d3ubm5affu3XrppZfk5eUlf39/\nbdmyxaq/2tpavffee+rRo4dcXV01YMAAZWRkPNyPboSPj49Gjx6t1NRUGYbRrGMBAIA7CBsBAADQ\n6ixbtkyJiYl69913VVJSogMHDqi4uFgjR47UxYsXJd0J16ZPn25134YNG7R8+XKrc6mpqQoLC1Ng\nYKAMw1BBQYGio6M1e/ZsVVZWauHChSoqKlJOTo5qamr0wgsvqLi4+KHHkGTZoKSurs5+k3MPe/bs\nUe/eveXm5nbPNq6urvrss8/Url07zZ07Vzdu3Lhn26aswYIFCxQbG6uqqip5enoqIyNDhYWFCggI\n0Ny5c1VdXW3pLyEhQR988IFSUlJ04cIFhYWF6ZVXXtGPP/5ov0m4i2effVbnzp1Tbm5us44DAADu\nIGwEAABAq1JVVaW1a9dqypQpmjVrljp27Kjg4GB99NFHunz5sjZu3Gi3sTp06GB5cq9fv35KS0tT\nRUWF0tPT7dL/hAkTVF5eriVLltilv3u5ceOGTp06pcDAwEbbhoSEKDY2VkVFRUpISLhrmwdZg9DQ\nUHl5ealLly6KjIzUjRs3dObMGUnSzZs3lZaWpsmTJ2vq1Kny9vbW4sWL5eTkZLe5vpenn35aknTs\n2LFmHQcAANxB2AgAAIBWJS8vT9evX9eQIUOszg8dOlTOzs5Wrznb25AhQ+Tm5mb1qnBbUFJSIsMw\n7vtU4z9auXKlevfurQ0bNujgwYMNrj/sGjg7O0uS5cnGEydOqLKyUv3797e0cXV1la+vb7PPdf2c\n1D+NCQAAmhdhIwAAAFqV0tJSSZKHh0eDa97e3qqoqGjW8V1cXHTp0qVmHcPebt68KUmNbphSz2w2\nKz09XSaTSXPmzFFVVZXVdXuvQf3r2osXL5bJZLIcp0+fVmVlpU192crV1VXS3+cIAAA0L8JGAAAA\ntCre3t6SdNdAq7S0VP7+/s02dnV1dbOP0RzqA7X6b0Q2RUhIiOLi4pSfn6+kpCSra/Zegy5dukiS\nUlJSZBiG1XHo0CGb+rLV7du3Jf19jgAAQPMibAQAAECr0r9/f3l4eDTYOCQ7O1u3b9/W4MGDLec6\ndOhgtQnJw8rKypJhGBo2bFizjdEcunbtKpPJpLKyMpvuS0pKUp8+fXT48GGr87asQVN0795dZrNZ\nR44csek+e6ifk27durX42AAAPIoIGwEAANCqmM1mxcfHa+fOndq0aZPKy8t17NgxRUVFyc/PT/Pm\nzbO0DQoK0tWrV7Vr1y5VV1fr0qVLOn36dIM+O3XqpPPnz6uoqEgVFRWW8LCurk7Xrl1TTU2Njh49\nqpiYGPXo0UOzZ8+2yxj79u2Tl5eXkpOT7T9R/8DNzU0BAQE6e/asTffVv07dvn37BuebugZNHeeN\nN97Qli1blJaWpvLyctXW1urs2bO6cOGCJCkyMlLdunVTTk6OTX03pn5OgoOD7dovAAC4O8JGAAAA\ntDpLly7VqlWrtGLFCj322GMaPXq0evbsqaysLLm7u1vaLViwQGPGjNGMGTPUu3dvJSUlWV6XDQkJ\nUXFxsSQpKipKXbt2Vb9+/TR+/HhdvXpV0p3v+AUHB8vV1VUjR45Ur1699PXXX1t9+/Bhx2gpEyZM\nUF5entX3F7/44gsFBQWpsLBQQ4cO1VtvvdXgvmHDhikuLq7B+aasQVpamlJSUiRJAwYM0MmTJ/Xx\nxx8rPj5ekvTiiy8qPz9fkpSamqrY2FitWbNGnTt3lp+fn2JiYnTt2jVJd153Likp0e7du+/7O7//\n/nuNGDFCjz/+uLKzs5Wbmys/Pz8NHz5cBw4caND+hx9+0BNPPKEBAwY0ZRoBAMBDMhmGYTi6CAAA\n2pLw8HBJUmZmpoMrAdqGbdu2KSIiQq3tz8758+crMzNTV65ccXQpd2UymZSRkaHp06c3qX1BQYH6\n9u2r9PR0zZo1q5mrs7+6ujo9//zzmj17tubMmWOXPq9cuSJ/f3+tXLnSEoA2la3zDwAAJEmZPNkI\nAACAR5YtG6q0dkFBQVqxYoVWrFih69evO7ocm9TW1mrXrl2qqKhQZGSk3fpdtmyZBg0apOjoaLv1\nCQAA7o+wEQAAAPiZSExMVHh4uCIjI23eLMaRsrKytGPHDu3bt09ubm526XPt2rU6cuSI9u7dKycn\nJ7v0CQAAGkfYCAAAgEfOokWLlJ6errKyMj311FPavn27o0uym+TkZEVHR+v99993dClNNnbsWH3+\n+efy9fW1S3+7d+/WrVu3lJWVJR8fH7v0CQAAmqaDowsAAAAAWtqqVau0atUqR5fRbMaNG6dx48Y5\nugyHmTRpkiZNmuToMgAAeCTxZCMAAAAAAAAAuyBsBAAAAAAAAGAXhI0AAAAAAAAA7IKwEQAAAAAA\nAIBdsEEMAAAP4Pvvv1d4eLijywDahLNnz0oS/2YeQEpKijIzMx1dBgAAQJMRNgIAYKOQkBBHlwC0\nKf7+/po2bZqjy2hzmDPHmjZtmrp37+7oMgAAaHNMhmEYji4CAAAAAAAAQJuXyTcbAQAAAAAAANgF\nYSMAAAAAAAAAuyBsBAAAAAAAAGAXhI0AAAAAAAAA7OL/AwCxGWOzFC3sAAAAAElFTkSuQmCC\n", 270 | "text/plain": [ 271 | "" 272 | ] 273 | }, 274 | "execution_count": 25, 275 | "metadata": { 276 | "tags": [] 277 | }, 278 | "output_type": "execute_result" 279 | } 280 | ], 281 | "source": [ 282 | "tf.keras.utils.plot_model(model, 'boosted_tree.png', show_shapes=True)\n" 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "execution_count": null, 288 | "metadata": { 289 | "colab": {}, 290 | "colab_type": "code", 291 | "id": "wsAR2JFxX4gl" 292 | }, 293 | "outputs": [], 294 | "source": [ 295 | "tree_pred = []\n", 296 | "num_unpack = 3\n", 297 | "for idx in range(0, len(weights), num_unpack):\n", 298 | " param = weights[idx:idx+num_unpack]\n", 299 | " other_idx = idx//num_unpack\n", 300 | " coef, inter, leaf = param\n", 301 | " sparse = sparse_info[other_idx]\n", 302 | " route = routes_list[other_idx]\n", 303 | " #model.layers[other_idx+1].layers[0].set_weights([coef, inter, sparse])\n", 304 | " model.layers[other_idx+1].layers[0].set_weights([coef*sparse, inter])\n", 305 | " model.layers[other_idx+1].layers[2].set_weights([route.T])\n", 306 | " # we didn't transfer leaves...\n", 307 | " model.layers[other_idx+1].layers[4].set_weights([leaf[:, np.newaxis]])\n" 308 | ] 309 | }, 310 | { 311 | "cell_type": "code", 312 | "execution_count": 31, 313 | "metadata": { 314 | "colab": { 315 | "base_uri": "https://localhost:8080/", 316 | "height": 35 317 | }, 318 | "colab_type": "code", 319 | "id": "B3pd4QUbgf7X", 320 | "outputId": "90906e7d-b562-4728-9649-795bbc8d3c94" 321 | }, 322 | "outputs": [ 323 | { 324 | "data": { 325 | "text/plain": [ 326 | "" 327 | ] 328 | }, 329 | "execution_count": 31, 330 | "metadata": { 331 | "tags": [] 332 | }, 333 | "output_type": "execute_result" 334 | } 335 | ], 336 | "source": [ 337 | "# let's train and see what happens\n", 338 | "model.compile(loss='binary_crossentropy',\n", 339 | " optimizer=tf.keras.optimizers.Adam(),\n", 340 | " metrics=['accuracy'])\n", 341 | "model.fit(X, y, epochs=1000, verbose=0)" 342 | ] 343 | }, 344 | { 345 | "cell_type": "code", 346 | "execution_count": 37, 347 | "metadata": { 348 | "colab": { 349 | "base_uri": "https://localhost:8080/", 350 | "height": 35 351 | }, 352 | "colab_type": "code", 353 | "id": "awW-laqXgehz", 354 | "outputId": "e92bad9d-ff97-4e6b-c50c-f34867a8a6c3" 355 | }, 356 | "outputs": [ 357 | { 358 | "data": { 359 | "text/plain": [ 360 | "0.78" 361 | ] 362 | }, 363 | "execution_count": 37, 364 | "metadata": { 365 | "tags": [] 366 | }, 367 | "output_type": "execute_result" 368 | } 369 | ], 370 | "source": [ 371 | "accuracy_score(y, np.round(model.predict(X)))" 372 | ] 373 | }, 374 | { 375 | "cell_type": "code", 376 | "execution_count": 38, 377 | "metadata": { 378 | "colab": { 379 | "base_uri": "https://localhost:8080/", 380 | "height": 35 381 | }, 382 | "colab_type": "code", 383 | "id": "8-4E88miZCij", 384 | "outputId": "b649f10e-9bb4-4e11-de54-5e02c9224e42" 385 | }, 386 | "outputs": [ 387 | { 388 | "data": { 389 | "text/plain": [ 390 | "0.81" 391 | ] 392 | }, 393 | "execution_count": 38, 394 | "metadata": { 395 | "tags": [] 396 | }, 397 | "output_type": "execute_result" 398 | } 399 | ], 400 | "source": [ 401 | "accuracy_score(y, lgb_model.predict(X))\n" 402 | ] 403 | } 404 | ], 405 | "metadata": { 406 | "colab": { 407 | "collapsed_sections": [], 408 | "name": "treegrad_binary.ipynb", 409 | "provenance": [], 410 | "version": "0.3.2" 411 | }, 412 | "kernelspec": { 413 | "display_name": "Python [default]", 414 | "language": "python", 415 | "name": "python3" 416 | }, 417 | "language_info": { 418 | "codemirror_mode": { 419 | "name": "ipython", 420 | "version": 3 421 | }, 422 | "file_extension": ".py", 423 | "mimetype": "text/x-python", 424 | "name": "python", 425 | "nbconvert_exporter": "python", 426 | "pygments_lexer": "ipython3", 427 | "version": "3.6.5" 428 | } 429 | }, 430 | "nbformat": 4, 431 | "nbformat_minor": 2 432 | } 433 | --------------------------------------------------------------------------------