├── examples ├── slda ├── modules │ ├── __init__.py │ └── helpers.py ├── KL_thresh_rtm.ipynb ├── KL_thresh_blslda.ipynb └── KL_thresh_lda.ipynb ├── slda ├── tests │ ├── __init__.py │ └── test_slda.py ├── __init__.py ├── topic_models.py └── _topic_models.pyx ├── setup.cfg ├── environment.yml ├── .gitignore ├── LICENSE ├── .travis.yml ├── setup.py └── README.md /examples/slda: -------------------------------------------------------------------------------- 1 | ../slda -------------------------------------------------------------------------------- /slda/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/modules/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | -------------------------------------------------------------------------------- /slda/__init__.py: -------------------------------------------------------------------------------- 1 | from .topic_models import (LDA, SLDA, BLSLDA, GRTM) 2 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: slda 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - cython 6 | - jupyter 7 | - matplotlib 8 | - numpy 9 | - pypolyagamma 10 | - pytest 11 | - python=3 12 | - scikit-learn 13 | - scipy 14 | - seaborn 15 | - cythongsl 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | *.c 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | 47 | # Translations 48 | *.mo 49 | *.pot 50 | 51 | # Django stuff: 52 | *.log 53 | 54 | # Sphinx documentation 55 | docs/_build/ 56 | 57 | # PyBuilder 58 | target/ 59 | 60 | *.ipynb_checkpoints 61 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | Copyright (c) 2016 Savvysherpa, Inc. 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 5 | 6 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 7 | 8 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 9 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # Config file for automatic testing at travis-ci.org 2 | 3 | language: python 4 | 5 | env: 6 | - USE_CYTHON=True 7 | 8 | before_install: 9 | # conda instructions from http://conda.pydata.org/docs/travis.html 10 | - sudo apt-get update 11 | # We do this conditionally because it saves us some downloading if the 12 | # version is the same. 13 | - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then 14 | wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh; 15 | else 16 | wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; 17 | fi 18 | - bash miniconda.sh -b -p $HOME/miniconda 19 | - export PATH="$HOME/miniconda/bin:$PATH" 20 | - hash -r 21 | - conda config --set always_yes yes --set changeps1 no 22 | - conda update -q conda 23 | # Useful for debugging any issues with conda 24 | - conda info -a 25 | install: 26 | - conda env create 27 | - source activate slda 28 | - python setup.py build_ext --inplace 29 | 30 | # command to run tests, e.g. python setup.py test 31 | script: py.test slda 32 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from setuptools import setup 4 | from setuptools.extension import Extension 5 | 6 | try: 7 | import numpy as np 8 | import cython_gsl 9 | except ImportError: 10 | print("Please install numpy and cythongsl.") 11 | 12 | # Dealing with Cython 13 | USE_CYTHON = os.environ.get('USE_CYTHON', False) 14 | ext = '.pyx' if USE_CYTHON else '.c' 15 | 16 | extensions = [ 17 | Extension('slda._topic_models', ['slda/_topic_models' + ext], 18 | libraries=cython_gsl.get_libraries(), 19 | library_dirs=[cython_gsl.get_library_dir()], 20 | include_dirs=[np.get_include(), cython_gsl.get_include()],), 21 | ] 22 | 23 | if USE_CYTHON: 24 | from Cython.Build import cythonize 25 | extensions = cythonize(extensions) 26 | 27 | setup( 28 | name='slda', 29 | version='0.1.6', 30 | description='''Cython implementations of Gibbs sampling for latent 31 | Dirichlet allocation and its supervised variants''', 32 | author='Berton Earnshaw, Mimi Felicilda', 33 | author_email='bearnshaw@savvysherpa.com, lfelicilda@savvysherpa.com', 34 | url='https://github.com/Savvysherpa/slda', 35 | license="MIT", 36 | packages=['slda'], 37 | ext_modules=extensions, 38 | install_requires=[ 39 | 'Cython >= 0.20.1', 40 | 'cythongsl', 41 | 'numpy', 42 | 'pypolyagamma', 43 | 'pytest', 44 | 'scikit-learn', 45 | 'scipy', 46 | ], 47 | classifiers=[ 48 | 'Intended Audience :: Science/Research', 49 | 'Programming Language :: Python',], 50 | keywords=['lda', 'slda', 'supervised', 'latent', 'Dirichlet', 'allocation'], 51 | platforms='ALL', 52 | ) 53 | -------------------------------------------------------------------------------- /examples/modules/helpers.py: -------------------------------------------------------------------------------- 1 | from math import ceil, sqrt 2 | from itertools import product 3 | from numpy.linalg import norm 4 | from scipy.stats import entropy 5 | 6 | 7 | def plot_images(mpl_plt, images, image_shape, layout=None, titles=None, 8 | imshow_kwargs={'cmap': 'gray', 'interpolation': 'nearest'}, 9 | **kwargs): 10 | """ 11 | Plots sequence of images. 12 | 13 | Parameters 14 | ---------- 15 | mpl_plt : matplotlib.plot 16 | A matplotlib plot handler. 17 | 18 | images : array-like 19 | An array of images, all of the same shape. 20 | 21 | image_shape : 2-tuple 22 | The 2-dimensional shape of each image. 23 | 24 | layout : 2-tuple (optional) 25 | Tuple of form (n_rows, n_cols) describing layout of images. 26 | If None, automatically calculates the most-square layout. 27 | 28 | titles : array-like (optional) 29 | An array of titles. Should be same length as images. 30 | 31 | cmap : matplotlib.colormap (optional) 32 | A colormap for the images. Default is 'gray'. 33 | 34 | kwargs : key-value pairs (optional) 35 | Keyword arguments to be passed to mpl_plt. 36 | 37 | Returns 38 | ------- 39 | fig : matplotlib.plot.Figure 40 | Matplotlib figure. 41 | 42 | Notes 43 | ----- 44 | This function should show up in zeku at some point, but zeku needs to be 45 | ported to python 3 first. 46 | """ 47 | n = len(images) 48 | if layout: 49 | n_rows, n_cols = layout 50 | else: 51 | n_rows = int(ceil(sqrt(n))) 52 | if n <= n_rows * (n_rows - 1): 53 | n_cols = n_rows - 1 54 | else: 55 | n_cols = n_rows 56 | titles = titles or range(n) 57 | if len(titles) != n: 58 | raise ValueError('titles should be the same length as images') 59 | fig, axes = mpl_plt.subplots(n_rows, n_cols, **kwargs) 60 | if n_rows == 1: 61 | pairs = range(n_cols) 62 | elif n_cols == 1: 63 | pairs = range(n_rows) 64 | else: 65 | pairs = product(range(n_rows), range(n_cols)) 66 | pairs = list(pairs)[:n] 67 | for title, image, pair in zip(titles, images, pairs): 68 | axes[pair].imshow(image.reshape(*image_shape), **imshow_kwargs) 69 | axes[pair].set_title('%s' % title) 70 | 71 | 72 | def JSD(P, Q): 73 | M = 0.5 * (P + Q) 74 | return 0.5 * (entropy(P, M) + entropy(Q, M)) 75 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # slda 2 | This repository contains [Cython](http://cython.org/) implementations of [Gibbs 3 | sampling](https://en.wikipedia.org/wiki/Gibbs_sampling) for [latent Dirichlet 4 | allocation](https://en.wikipedia.org/wiki/Latent_Dirichlet_allocation) and 5 | various supervised LDAs: 6 | 7 | - supervised LDA (linear regression) 8 | - binary logistic supervised LDA (logistic regression) 9 | - binary logistic hierarchical supervised LDA (trees) 10 | - generalized relational topic models (graphs) 11 | 12 | [![Build Status](https://travis-ci.org/Savvysherpa/slda.png)](https://travis-ci.org/Savvysherpa/slda) 13 | 14 | ## Installation 15 | 16 | ### The easy way 17 | Use the conda-forge version [here](https://github.com/conda-forge/slda-feedstock). 18 | 19 | ### The hard way... 20 | (Kept for posterity's sake.) 21 | 22 | ### Dependencies 23 | 24 | #### GNU Scientific Library 25 | This module depends on [GSL](http://www.gnu.org/software/gsl/), please install 26 | it. For macosx users using [homebrew](http://brew.sh/), this is as simple as 27 | ```bash 28 | $ brew install gsl 29 | ``` 30 | 31 | #### pypolyagamma-3 and gcc 32 | This package depends on [pypolyagamma-3](https://github.com/Savvysherpa/pypolyagamma), 33 | which is a bit of a pain because `pypolyagamma-3` requies a C/C++ compiler with 34 | [OpenMP](http://openmp.org/) support. Unfortunately for macosx users, Apple's native 35 | compiler, clang, does not ship with that support, so you need to install and 36 | use one that does. For macosx users using [homebrew](http://brew.sh/), 37 | this is as simple as: 38 | ```bash 39 | $ brew install gcc --without-multilib 40 | ``` 41 | This will install a version of `gcc` with OpenMP support. However, Apple makes 42 | things worse by aliasing gcc to point to clang! So you need to explicitly tell 43 | the shell which gcc compiler to use. As of the writing of this README, brew 44 | installs major version 6 of gcc, and as a result will create a binary called 45 | gcc-6 in your path. So export the following to your shell 46 | ```bash 47 | $ export CC=gcc-6 CXX=g++-6 48 | ``` 49 | or you can prefix the commands below with `CC=gcc-6 CXX=g++-6`. 50 | 51 | As a result of this export, it may turn out that your shell cannot find the 52 | libraries associated with gcc. If this is the case, specify the path to your gcc 53 | library in the environment variable `DYLD_LIBRARY_PATH`. For example, if 54 | you used `brew` to install gcc as above, then this is probably the right thing 55 | to do: 56 | ```bash 57 | $ export DYLD_LIBRARY_PATH=/usr/local/Cellar/gcc/6.1.0/lib/gcc/6/ 58 | ``` 59 | 60 | ### Instructions 61 | 62 | #### Conda environment 63 | 64 | First create the conda environment by running 65 | ```bash 66 | $ conda env create 67 | ``` 68 | This will install a conda environment called `slda`, defined in 69 | `environment.yml`, that contains all the dependencies. Activate it by running 70 | ```bash 71 | $ source activate slda 72 | ``` 73 | Next we need to compile the C code in this repository. To do this, run 74 | ```bash 75 | $ python setup.py build_ext --inplace 76 | ``` 77 | 78 | #### pip install slda 79 | 80 | If you want slda installed in your environment, run: 81 | ```bash 82 | $ pip install . 83 | ``` 84 | 85 | ## Tests 86 | 87 | To run the tests, run 88 | ```bash 89 | $ py.test slda 90 | ``` 91 | This may take as long as 15 minutes, so be patient. 92 | 93 | ## License 94 | 95 | This code is open source under the MIT license. 96 | 97 | Many thanks to [Allen Riddell](https://github.com/ariddell) and his [LDA 98 | library](https://github.com/ariddell/lda) for inspiration (and code :) 99 | -------------------------------------------------------------------------------- /slda/tests/test_slda.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | from itertools import product 4 | from scipy.stats import entropy as KL_divergence 5 | from sklearn.cross_validation import StratifiedKFold 6 | from ..topic_models import LDA, SLDA, BLSLDA, GRTM 7 | 8 | 9 | def gen_topics(rows): 10 | topics = [] 11 | topic_base = np.concatenate((np.ones((1, rows)) * (1/rows), 12 | np.zeros((rows-1, rows))), axis=0).ravel() 13 | for i in range(rows): 14 | topics.append(np.roll(topic_base, i * rows)) 15 | topic_base = np.concatenate((np.ones((rows, 1)) * (1/rows), 16 | np.zeros((rows, rows-1))), axis=1).ravel() 17 | for i in range(rows): 18 | topics.append(np.roll(topic_base, i)) 19 | return np.array(topics) 20 | 21 | 22 | def gen_thetas(alpha, D): 23 | return np.random.dirichlet(alpha, size=D) 24 | 25 | 26 | def gen_doc(seed, K, N, thetas, V, topics, D): 27 | topic_assignments = np.array([np.random.choice(range(K), size=N, p=theta) 28 | for theta in thetas]) 29 | word_assignments = \ 30 | np.array([[np.random.choice(range(V), size=1, 31 | p=topics[topic_assignments[d, n]])[0] 32 | for n in range(N)] for d in range(D)]) 33 | return np.array([np.histogram(word_assignments[d], bins=V, 34 | range=(0, V - 1))[0] for d in range(D)]) 35 | 36 | 37 | def language(document_size): 38 | # Generate topics 39 | # We assume a vocabulary of 'rows'^2 terms, and create 'rows'*2 "topics", 40 | # where each topic assigns exactly 'rows' consecutive terms equal 41 | # probability. 42 | rows = 3 43 | V = rows * rows 44 | K = rows * 2 45 | N = K * K 46 | D = document_size 47 | seed = 42 48 | topics = gen_topics(rows) 49 | 50 | # Generate documents from topics 51 | # We generate D documents from these V topics by sampling D topic 52 | # distributions, one for each document, from a Dirichlet distribution with 53 | # parameter α=(1,…,1) 54 | alpha = np.ones(K) 55 | np.random.seed(seed) 56 | thetas = gen_thetas(alpha, D) 57 | doc_term_matrix = gen_doc(seed, K, N, thetas, V, topics, D) 58 | return {'V': V, 'K': K, 'D': D, 'seed': seed, 'alpha': alpha, 59 | 'topics': topics, 'thetas': thetas, 60 | 'doc_term_matrix': doc_term_matrix, 'n_report_iters': 100} 61 | 62 | 63 | def assert_probablity_distribution(results): 64 | assert (results >= 0).all() 65 | assert results.sum(axis=1).all() 66 | 67 | 68 | def check_KL_divergence(topics, results, thresh): 69 | for res in results: 70 | minimized_KL = 1 71 | for topic in topics: 72 | KL = KL_divergence(topic, res) 73 | if KL < minimized_KL: 74 | minimized_KL = KL 75 | print(minimized_KL) 76 | assert minimized_KL < thresh 77 | 78 | 79 | def test_lda(): 80 | l = language(10000) 81 | n_iter = 2000 82 | KL_thresh = 0.001 83 | 84 | np.random.seed(l['seed']) 85 | _beta = np.repeat(0.01, l['V']) 86 | lda = LDA(l['K'], l['alpha'], _beta, n_iter, seed=l['seed'], 87 | n_report_iter=l['n_report_iters']) 88 | lda.fit(l['doc_term_matrix']) 89 | 90 | assert_probablity_distribution(lda.phi) 91 | check_KL_divergence(l['topics'], lda.phi, KL_thresh) 92 | 93 | 94 | def test_slda(): 95 | l = language(10000) 96 | n_iter = 2000 97 | KL_thresh = 0.001 98 | 99 | nu2 = l['K'] 100 | sigma2 = 1 101 | np.random.seed(l['seed']) 102 | eta = np.random.normal(scale=nu2, size=l['K']) 103 | y = [np.dot(eta, l['thetas'][i]) for i in range(l['D'])] + \ 104 | np.random.normal(scale=sigma2, size=l['D']) 105 | _beta = np.repeat(0.01, l['V']) 106 | _mu = 0 107 | slda = SLDA(l['K'], l['alpha'], _beta, _mu, nu2, sigma2, n_iter, 108 | seed=l['seed'], n_report_iter=l['n_report_iters']) 109 | slda.fit(l['doc_term_matrix'], y) 110 | 111 | assert_probablity_distribution(slda.phi) 112 | check_KL_divergence(l['topics'], slda.phi, KL_thresh) 113 | 114 | 115 | def test_blslda(): 116 | l = language(10000) 117 | n_iter = 1500 118 | KL_thresh = 0.03 119 | 120 | mu = 0. 121 | nu2 = 1. 122 | np.random.seed(l['seed']) 123 | eta = np.random.normal(loc=mu, scale=nu2, size=l['K']) 124 | zeta = np.array([np.dot(eta, l['thetas'][i]) for i in range(l['D'])]) 125 | y = (zeta >= 0).astype(int) 126 | _beta = np.repeat(0.01, l['V']) 127 | _b = 7.25 128 | blslda = BLSLDA(l['K'], l['alpha'], _beta, mu, nu2, _b, n_iter, 129 | seed=l['seed'], 130 | n_report_iter=l['n_report_iters']) 131 | blslda.fit(l['doc_term_matrix'], y) 132 | 133 | assert_probablity_distribution(blslda.phi) 134 | check_KL_divergence(l['topics'], blslda.phi, KL_thresh) 135 | 136 | 137 | def test_grtm(): 138 | l = language(1000) 139 | n_iter = 1000 140 | KL_thresh = 0.3 141 | 142 | mu = 0. 143 | nu2 = 1. 144 | np.random.seed(l['seed']) 145 | H = np.random.normal(loc=mu, scale=nu2, size=(l['K'], l['K'])) 146 | zeta = pd.DataFrame([(i, j, np.dot(np.dot(l['thetas'][i], H), 147 | l['thetas'][j])) 148 | for i, j in product(range(l['D']), repeat=2)], 149 | columns=('tail', 'head', 'zeta')) 150 | zeta['y'] = (zeta.zeta >= 0).astype(int) 151 | y = zeta[['tail', 'head', 'y']].values 152 | skf = StratifiedKFold(y[:, 2], n_folds=100) 153 | _, train_idx = next(iter(skf)) 154 | _K = l['K'] 155 | _alpha = l['alpha'][:_K] 156 | _beta = np.repeat(0.01, l['V']) 157 | _b = 1. 158 | grtm = GRTM(_K, _alpha, _beta, mu, nu2, _b, n_iter, seed=l['seed'], 159 | n_report_iter=l['n_report_iters']) 160 | grtm.fit(l['doc_term_matrix'], y[train_idx]) 161 | 162 | assert_probablity_distribution(grtm.phi) 163 | check_KL_divergence(l['topics'], grtm.phi, KL_thresh) 164 | -------------------------------------------------------------------------------- /examples/KL_thresh_rtm.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# This notebook is used to decide on a tolerable level of corruptableness.\n", 12 | "%matplotlib inline\n", 13 | "\n", 14 | "import numpy as np\n", 15 | "from scipy.stats import entropy as KL_divergence\n", 16 | "import pandas as pd\n", 17 | "import matplotlib.pyplot as plt\n", 18 | "\n", 19 | "from slda.topic_models import RTM\n", 20 | "from modules.helpers import plot_images" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 2, 26 | "metadata": { 27 | "collapsed": false 28 | }, 29 | "outputs": [ 30 | { 31 | "ename": "AttributeError", 32 | "evalue": "'NoneType' object has no attribute 'getnnz'", 33 | "output_type": "error", 34 | "traceback": [ 35 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", 36 | "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", 37 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 50\u001b[0m \u001b[0mrtm\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mRTM\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_K\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_alpha\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_beta\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_mu\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_sigma2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_nu\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mn_iter\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mseed\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m42\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 51\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 52\u001b[0;31m \u001b[0mrtm\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdoc_term_matrix\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 53\u001b[0m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgrtm\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mphi\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", 38 | "\u001b[0;32m/Users/mattburbidge/github/ss/lda-cython/lda_cython/topic_models.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, X, y)\u001b[0m\n\u001b[1;32m 466\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mn_topics\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mn_docs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mn_terms\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mn_tokens\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 467\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0malpha\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbeta\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmu\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msigma2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnu\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 468\u001b[0;31m doc_lookup, term_lookup, self.adjacency_matrix, self.seed)\n\u001b[0m\u001b[1;32m 469\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 470\u001b[0m \u001b[0;32mclass\u001b[0m \u001b[0mBLHSLDA\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mTopicModelBase\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", 39 | "\u001b[0;32m/Users/mattburbidge/github/ss/lda-cython/lda_cython/_topic_models.pyx\u001b[0m in \u001b[0;36mlda_cython._topic_models.gibbs_sampler_rtm (lda_cython/_topic_models.c:14586)\u001b[0;34m()\u001b[0m\n", 40 | "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'getnnz'" 41 | ] 42 | } 43 | ], 44 | "source": [ 45 | " # Generate topics\n", 46 | "# We assume a vocabulary of 'rows'^2 terms, and create 'rows'*2 \"topics\",\n", 47 | "# where each topic assigns exactly 'rows' consecutive terms equal probability.\n", 48 | "rows = 3\n", 49 | "V = rows * rows\n", 50 | "K = rows * 2\n", 51 | "N = K * K\n", 52 | "D = 10000\n", 53 | "seed = 42\n", 54 | "topics = []\n", 55 | "topic_base = np.concatenate((np.ones((1, rows)) * (1/rows),\n", 56 | " np.zeros((rows-1, rows))), axis=0).ravel()\n", 57 | "for i in range(rows):\n", 58 | " topics.append(np.roll(topic_base, i * rows))\n", 59 | "topic_base = np.concatenate((np.ones((rows, 1)) * (1/rows),\n", 60 | " np.zeros((rows, rows-1))), axis=1).ravel()\n", 61 | "for i in range(rows):\n", 62 | " topics.append(np.roll(topic_base, i))\n", 63 | "topics = np.array(topics)\n", 64 | "\n", 65 | "# Generate documents from topics\n", 66 | "# We generate D documents from these V topics by sampling D topic\n", 67 | "# distributions, one for each document, from a Dirichlet distribution with\n", 68 | "# parameter α=(1,…,1)\n", 69 | "alpha = np.ones(K)\n", 70 | "np.random.seed(seed)\n", 71 | "thetas = np.random.dirichlet(alpha, size=D)\n", 72 | "topic_assignments = np.array([np.random.choice(range(K), size=N, p=theta)\n", 73 | " for theta in thetas])\n", 74 | "word_assignments = np.array([[np.random.choice(range(V), size=1,\n", 75 | " p=topics[topic_assignments[d, n]])[0]\n", 76 | " for n in range(N)] for d in range(D)])\n", 77 | "doc_term_matrix = np.array([np.histogram(word_assignments[d], bins=V,\n", 78 | " range=(0, V - 1))[0] for d in range(D)])\n", 79 | "\n", 80 | "# choose parameter values\n", 81 | "mu = 0.\n", 82 | "sigma2 = 1.\n", 83 | "nu = 1.\n", 84 | "np.random.seed(14)\n", 85 | "\n", 86 | "# Estimate parameters\n", 87 | "_K = K\n", 88 | "_alpha = alpha[:_K]\n", 89 | "_beta = np.repeat(0.01, V)\n", 90 | "_mu = mu\n", 91 | "_sigma2 = sigma2\n", 92 | "_nu = nu\n", 93 | "n_iter = 1000\n", 94 | "rtm = RTM(_K, _alpha, _beta, _mu, _sigma2, _nu, n_iter, seed=42)\n", 95 | "\n", 96 | "rtm.fit(doc_term_matrix)\n", 97 | "results = grtm.phi" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": null, 103 | "metadata": { 104 | "collapsed": false 105 | }, 106 | "outputs": [], 107 | "source": [ 108 | "for res in results:\n", 109 | " minimized_KL = 1\n", 110 | " for topic in topics:\n", 111 | " KL = KL_divergence(topic, res)\n", 112 | " if KL < minimized_KL:\n", 113 | " minimized_KL = KL\n", 114 | " print(minimized_KL)" 115 | ] 116 | }, 117 | { 118 | "cell_type": "code", 119 | "execution_count": null, 120 | "metadata": { 121 | "collapsed": true 122 | }, 123 | "outputs": [], 124 | "source": [ 125 | "plot_images(plt, results, (rows, rows), (2, rows))\n", 126 | "plt.figure()\n", 127 | "plt.plot(rtm.loglikelihoods)\n", 128 | "plt.figure()\n", 129 | "plt.plot(np.diff(rtm.loglikelihoods)[-100:])" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": null, 135 | "metadata": { 136 | "collapsed": true 137 | }, 138 | "outputs": [], 139 | "source": [] 140 | } 141 | ], 142 | "metadata": { 143 | "kernelspec": { 144 | "display_name": "Python 3", 145 | "language": "python", 146 | "name": "python3" 147 | }, 148 | "language_info": { 149 | "codemirror_mode": { 150 | "name": "ipython", 151 | "version": 3 152 | }, 153 | "file_extension": ".py", 154 | "mimetype": "text/x-python", 155 | "name": "python", 156 | "nbconvert_exporter": "python", 157 | "pygments_lexer": "ipython3", 158 | "version": "3.5.1" 159 | }, 160 | "widgets": { 161 | "state": {}, 162 | "version": "1.1.2" 163 | } 164 | }, 165 | "nbformat": 4, 166 | "nbformat_minor": 0 167 | } 168 | -------------------------------------------------------------------------------- /slda/topic_models.py: -------------------------------------------------------------------------------- 1 | """ 2 | Topic models using Gibbs sampling. 3 | 4 | Draws from Allen Riddell's LDA library https://github.com/ariddell/lda 5 | """ 6 | 7 | import numpy as np 8 | from sklearn.base import BaseEstimator, TransformerMixin 9 | from scipy.sparse import issparse 10 | from ._topic_models import (gibbs_sampler_lda, gibbs_sampler_slda, 11 | gibbs_sampler_blslda, gibbs_sampler_grtm, 12 | gibbs_sampler_rtm, gibbs_sampler_blhslda, 13 | iterated_pseudo_counts) 14 | 15 | 16 | class TopicModelBase(BaseEstimator, TransformerMixin): 17 | """ 18 | Base class for topic models. 19 | """ 20 | n_topics = None 21 | alpha = None 22 | beta = None 23 | theta = None 24 | phi = None 25 | loglikelihoods = None 26 | 27 | def __init__(self): 28 | raise NotImplementedError 29 | 30 | def _create_lookups(self, X): 31 | """ 32 | Create document and term lookups for all tokens. 33 | """ 34 | docs, terms = np.nonzero(X) 35 | if issparse(X): 36 | x = np.array(X[docs, terms])[0] 37 | else: 38 | x = X[docs, terms] 39 | doc_lookup = np.ascontiguousarray(np.repeat(docs, x), dtype=np.intc) 40 | term_lookup = np.ascontiguousarray(np.repeat(terms, x), dtype=np.intc) 41 | return doc_lookup, term_lookup 42 | 43 | def fit(self): 44 | """ 45 | Estimate the topic distributions per document (theta) and term 46 | distributions per topic (phi). 47 | 48 | Parameters 49 | ---------- 50 | X : array-like, shape = (n_docs, n_terms) 51 | The document-term matrix 52 | """ 53 | 54 | raise NotImplementedError 55 | 56 | def fit_transform(self, X): 57 | """ 58 | Estimate the topic distributions per document (theta) and term 59 | distributions per topic (phi), then return theta. 60 | 61 | Parameters 62 | ---------- 63 | X : array-like, shape = (n_docs, n_terms) 64 | The document-term matrix 65 | 66 | Returns 67 | _______ 68 | theta : numpy array, shape = (n_docs, n_topics) 69 | The topic distribution of each document 70 | """ 71 | 72 | self.fit(X) 73 | return self.theta 74 | 75 | def transform(self, X, max_iter=20, tol=1e-16): 76 | """ 77 | Estimate the topic distributions of new documents given the fit model. 78 | """ 79 | 80 | if self.phi is None: 81 | raise RuntimeError('self.phi is None, which means the model has ' + 82 | 'not been fit yet. Please fit the model first.') 83 | n_docs, n_topics = X.shape 84 | doc_lookup, term_lookup = self._create_lookups(X) 85 | return iterated_pseudo_counts(doc_lookup, term_lookup, n_docs, 86 | self.alpha, self.beta, self.phi, 87 | max_iter, tol) 88 | 89 | 90 | class LDA(TopicModelBase): 91 | """ 92 | Latent Dirichlet allocation, using collapsed Gibbs sampling implemented in 93 | Cython. 94 | 95 | Parameters 96 | ---------- 97 | n_topics : int 98 | Number of topics 99 | 100 | alpha : array-like, shape = (n_topics,) 101 | Dirichlet distribution parameter for each document's topic 102 | distribution. 103 | 104 | beta : array-like, shape = (n_terms,) 105 | Dirichlet distribution parameter for each topic's term distribution. 106 | 107 | n_iter : int, default=500 108 | Number of iterations of Gibbs sampler 109 | 110 | n_report_iter : int, default=10 111 | Number of iterations of Gibbs sampler between progress reports. 112 | 113 | random_state : int, optional 114 | Seed for random number generator 115 | """ 116 | 117 | def __init__(self, n_topics, alpha, beta, n_iter=500, n_report_iter=10, 118 | seed=None): 119 | self.n_topics = n_topics 120 | self.alpha = np.ascontiguousarray(alpha, dtype=np.float64) 121 | self.beta = np.ascontiguousarray(beta, dtype=np.float64) 122 | self.n_iter = n_iter 123 | self.n_report_iter = n_report_iter 124 | self.seed = seed 125 | 126 | def fit(self, X): 127 | """ 128 | Estimate the topic distributions per document (theta) and term 129 | distributions per topic (phi). 130 | 131 | Parameters 132 | ---------- 133 | X : array-like, shape = (n_docs, n_terms) 134 | The document-term matrix 135 | """ 136 | 137 | self.doc_term_matrix = X 138 | self.n_docs, self.n_terms = X.shape 139 | self.n_tokens = X.sum() 140 | doc_lookup, term_lookup = self._create_lookups(X) 141 | # iterate 142 | self.theta, self.phi, self.loglikelihoods = gibbs_sampler_lda( 143 | self.n_iter, self.n_report_iter, 144 | self.n_topics, self.n_docs, self.n_terms, self.n_tokens, 145 | self.alpha, self.beta, doc_lookup, term_lookup, self.seed) 146 | 147 | 148 | class SLDA(TopicModelBase): 149 | """ 150 | Supervised (regression) latent Dirichlet allocation, using collapsed Gibbs 151 | sampling implemented in Cython. 152 | 153 | Parameters 154 | ---------- 155 | n_topics : int 156 | Number of topics 157 | 158 | alpha : array-like, shape = (n_topics,) 159 | Dirichlet distribution parameter for each document's topic 160 | distribution. 161 | 162 | beta : array-like, shape = (n_terms,) 163 | Dirichlet distribution parameter for each topic's term distribution. 164 | 165 | mu : float 166 | Mean of regression coefficients (eta). 167 | 168 | nu2 : float 169 | Variance of regression coefficients (eta). 170 | 171 | sigma2 : float 172 | Variance of response (y). 173 | 174 | n_iter : int, default=500 175 | Number of iterations of Gibbs sampler 176 | 177 | n_report_iter : int, default=10 178 | Number of iterations of Gibbs sampler between progress reports. 179 | 180 | random_state : int, optional 181 | Seed for random number generator 182 | """ 183 | 184 | def __init__(self, n_topics, alpha, beta, mu, nu2, sigma2, n_iter=500, 185 | n_report_iter=10, seed=None): 186 | self.n_topics = n_topics 187 | self.alpha = alpha 188 | self.beta = beta 189 | self.mu = mu 190 | self.nu2 = nu2 191 | self.sigma2 = sigma2 192 | self.n_iter = n_iter 193 | self.n_report_iter = n_report_iter 194 | self.seed = seed 195 | 196 | def fit(self, X, y): 197 | """ 198 | Estimate the topic distributions per document (theta), term 199 | distributions per topic (phi), and regression coefficients (eta). 200 | 201 | Parameters 202 | ---------- 203 | X : array-like, shape = (n_docs, n_terms) 204 | The document-term matrix. 205 | 206 | y : array-like, shape = (n_docs,) 207 | Response values for each document. 208 | """ 209 | 210 | self.doc_term_matrix = X 211 | self.n_docs, self.n_terms = X.shape 212 | self.n_tokens = X.sum() 213 | doc_lookup, term_lookup = self._create_lookups(X) 214 | # iterate 215 | self.theta, self.phi, self.eta, self.loglikelihoods = gibbs_sampler_slda( 216 | self.n_iter, self.n_report_iter, 217 | self.n_topics, self.n_docs, self.n_terms, self.n_tokens, 218 | self.alpha, self.beta, self.mu, self.nu2, self.sigma2, 219 | doc_lookup, term_lookup, 220 | np.ascontiguousarray(y, dtype=np.float64), self.seed) 221 | 222 | 223 | class BLSLDA(TopicModelBase): 224 | """ 225 | Binary logistic supervised latent Dirichlet allocation, using collapsed 226 | Gibbs sampling implemented in Cython. 227 | 228 | Parameters 229 | ---------- 230 | n_topics : int 231 | Number of topics 232 | 233 | alpha : array-like, shape = (n_topics,) 234 | Dirichlet distribution parameter for each document's topic 235 | distribution. 236 | 237 | beta : array-like, shape = (n_terms,) 238 | Dirichlet distribution parameter for each topic's term distribution. 239 | 240 | mu : float 241 | Mean of regression coefficients (eta). 242 | 243 | nu2 : float 244 | Variance of regression coefficients (eta). 245 | 246 | b : float 247 | Regularization parameter. 248 | 249 | n_iter : int, default=500 250 | Number of iterations of Gibbs sampler 251 | 252 | n_report_iter : int, default=10 253 | Number of iterations of Gibbs sampler between progress reports. 254 | 255 | random_state : int, optional 256 | Seed for random number generator 257 | """ 258 | 259 | def __init__(self, n_topics, alpha, beta, mu, nu2, b, n_iter=500, 260 | n_report_iter=10, seed=None): 261 | self.n_topics = n_topics 262 | self.alpha = alpha 263 | self.beta = beta 264 | self.mu = mu 265 | self.nu2 = nu2 266 | self.b = b 267 | self.n_iter = n_iter 268 | self.n_report_iter = n_report_iter 269 | self.seed = seed 270 | 271 | def fit(self, X, y): 272 | """ 273 | Estimate the topic distributions per document (theta), term 274 | distributions per topic (phi), and regression coefficients (eta). 275 | 276 | Parameters 277 | ---------- 278 | X : array-like, shape = (n_docs, n_terms) 279 | The document-term matrix. 280 | 281 | y : array-like, shape = (n_docs,) 282 | Response values for each document. 283 | """ 284 | 285 | self.doc_term_matrix = X 286 | self.n_docs, self.n_terms = X.shape 287 | self.n_tokens = X.sum() 288 | doc_lookup, term_lookup = self._create_lookups(X) 289 | # iterate 290 | self.theta, self.phi, self.eta, self.loglikelihoods = gibbs_sampler_blslda( 291 | self.n_iter, self.n_report_iter, 292 | self.n_topics, self.n_docs, self.n_terms, self.n_tokens, 293 | self.alpha, self.beta, self.mu, self.nu2, self.b, 294 | doc_lookup, term_lookup, 295 | np.ascontiguousarray(y, dtype=np.float64), self.seed) 296 | 297 | 298 | class GRTM(TopicModelBase): 299 | """ 300 | Generalized relational topic models, using collapsed Gibbs sampling 301 | implemented in Cython. 302 | 303 | Parameters 304 | ---------- 305 | n_topics : int 306 | Number of topics 307 | 308 | alpha : array-like, shape = (n_topics,) 309 | Dirichlet distribution parameter for each document's topic 310 | distribution. 311 | 312 | beta : array-like, shape = (n_terms,) 313 | Dirichlet distribution parameter for each topic's term distribution. 314 | 315 | mu : float 316 | Mean of regression coefficients (eta). 317 | 318 | nu2 : float 319 | Variance of regression coefficients (eta). 320 | 321 | b : float 322 | Regularization parameter. 323 | 324 | n_iter : int, default=500 325 | Number of iterations of Gibbs sampler 326 | 327 | n_report_iter : int, default=10 328 | Number of iterations of Gibbs sampler between progress reports. 329 | 330 | random_state : int, optional 331 | Seed for random number generator 332 | """ 333 | 334 | def __init__(self, n_topics, alpha, beta, mu, nu2, b, n_iter=500, 335 | n_report_iter=10, seed=None): 336 | self.n_topics = n_topics 337 | self.alpha = alpha 338 | self.beta = beta 339 | self.mu = mu 340 | self.nu2 = nu2 341 | self.b = b 342 | self.n_iter = n_iter 343 | self.n_report_iter = n_report_iter 344 | self.seed = seed 345 | 346 | def _create_edges(self, y, order='tail'): 347 | y.sort(order=order) 348 | _docs, _counts = np.unique(y[order], return_counts=True) 349 | counts = np.zeros(self.n_docs) 350 | counts[_docs] = _counts 351 | docs = np.ascontiguousarray( 352 | np.concatenate(([0], np.cumsum(counts))), dtype=np.intc) 353 | edges = np.ascontiguousarray(y['index'].flatten(), dtype=np.intc) 354 | return docs, edges 355 | 356 | def fit(self, X, y): 357 | """ 358 | Estimate the topic distributions per document (theta), term 359 | distributions per topic (phi), and regression coefficients (eta). 360 | 361 | Parameters 362 | ---------- 363 | X : array-like, shape = (n_docs, n_terms) 364 | The document-term matrix. 365 | 366 | y : array-like, shape = (n_edges, 3) 367 | Each entry of y is an ordered triple (d_1, d_2, y_(d_1, d_2)), 368 | where d_1 and d_2 are documents and y_(d_1, d_2) is an indicator of 369 | a directed edge from d_1 to d_2. 370 | """ 371 | 372 | self.doc_term_matrix = X 373 | self.n_docs, self.n_terms = X.shape 374 | self.n_tokens = X.sum() 375 | self.n_edges = y.shape[0] 376 | doc_lookup, term_lookup = self._create_lookups(X) 377 | # edge info 378 | y = np.ascontiguousarray(np.column_stack((range(self.n_edges), y))) 379 | # we use a view here so that we can sort in-place using named columns 380 | y_rec = y.view(dtype=list(zip(('index', 'tail', 'head', 'data'), 381 | 4 * [y.dtype]))) 382 | edge_tail = np.ascontiguousarray(y_rec['tail'].flatten(), 383 | dtype=np.intc) 384 | edge_head = np.ascontiguousarray(y_rec['head'].flatten(), 385 | dtype=np.intc) 386 | edge_data = np.ascontiguousarray(y_rec['data'].flatten(), 387 | dtype=np.float64) 388 | out_docs, out_edges = self._create_edges(y_rec, order='tail') 389 | in_docs, in_edges = self._create_edges(y_rec, order='head') 390 | # iterate 391 | self.theta, self.phi, self.H, self.loglikelihoods = gibbs_sampler_grtm( 392 | self.n_iter, self.n_report_iter, self.n_topics, self.n_docs, 393 | self.n_terms, self.n_tokens, self.n_edges, self.alpha, self.beta, 394 | self.mu, self.nu2, self.b, doc_lookup, term_lookup, out_docs, 395 | out_edges, in_docs, in_edges, edge_tail, edge_head, edge_data, 396 | self.seed) 397 | 398 | 399 | class RTM(TopicModelBase): 400 | """ 401 | Relational topic models, using collapsed Gibbs sampling implemented in 402 | Cython. 403 | 404 | Parameters 405 | ---------- 406 | n_topics : int 407 | Number of topics 408 | 409 | alpha : array-like, shape = (n_topics,) 410 | Dirichlet distribution parameter for each document's topic 411 | distribution. 412 | 413 | beta : array-like, shape = (n_terms,) 414 | Dirichlet distribution parameter for each topic's term distribution. 415 | 416 | mu : 417 | 418 | sigma2: 419 | 420 | nu : 421 | 422 | n_iter : int, default=500 423 | Number of iterations of Gibbs sampler 424 | 425 | n_report_iter : int, default=10 426 | Number of iterations of Gibbs sampler between progress reports. 427 | 428 | random_state : int, optional 429 | Seed for random number generator 430 | """ 431 | 432 | def __init__(self, n_topics, alpha, beta, mu, sigma2, nu, n_iter=500, 433 | n_report_iter=10, seed=None): 434 | self.n_topics = n_topics 435 | self.alpha = alpha 436 | self.beta = beta 437 | self.mu = mu 438 | self.sigma2 = sigma2 439 | self.nu = nu 440 | self.n_iter = n_iter 441 | self.n_report_iter = n_report_iter 442 | self.seed = seed 443 | 444 | def fit(self, X, y=None): 445 | """ 446 | Estimate the topic distributions per document (theta), the term 447 | distributions per topic (phi), and the regression coefficients (eta). 448 | 449 | Parameters 450 | ---------- 451 | X : array-like, shape = (n_docs, n_terms) 452 | The document-term matrix 453 | 454 | y : array-like, shape = (n_docs, n_docs) 455 | The adjacency matrix of the document network 456 | """ 457 | 458 | self.doc_term_matrix = X 459 | self.n_docs, self.n_terms = X.shape 460 | self.n_tokens = X.sum() 461 | doc_lookup, term_lookup = self._create_lookups(X) 462 | self.adjacency_matrix = y 463 | # iterate 464 | self.theta, self.phi, self.eta, self.loglikelihoods = gibbs_sampler_rtm( 465 | self.n_iter, self.n_report_iter, 466 | self.n_topics, self.n_docs, self.n_terms, self.n_tokens, 467 | self.alpha, self.beta, self.mu, self.sigma2, self.nu, 468 | doc_lookup, term_lookup, self.adjacency_matrix, self.seed) 469 | 470 | 471 | class BLHSLDA(TopicModelBase): 472 | """ 473 | Binary Logistic Heirarchical supervised latent Dirichlet allocation, using 474 | collapsed Gibbs sampling implemented in Cython. 475 | 476 | Parameters 477 | ---------- 478 | n_topics : int 479 | Number of topics 480 | 481 | alpha : array-like, shape = (n_topics,) 482 | Dirichlet distribution parameter for each document's topic 483 | distribution. 484 | 485 | beta : array-like, shape = (n_terms,) 486 | Dirichlet distribution parameter for each topic's term distribution. 487 | 488 | mu : float 489 | Mean of regression coefficients (eta). 490 | 491 | nu2 : float 492 | Variance of regression coefficients (eta). 493 | 494 | b : float 495 | Regularization parameter. 496 | 497 | n_iter : int, default=500 498 | Number of iterations of Gibbs sampler 499 | 500 | n_report_iter : int, default=10 501 | Number of iterations of Gibbs sampler between progress reports. 502 | 503 | random_state : int, optional 504 | Seed for random number generator 505 | """ 506 | 507 | def __init__(self, n_topics, alpha, beta, mu, nu2, b, n_iter=500, 508 | n_report_iter=10, seed=None): 509 | self.n_topics = n_topics 510 | self.alpha = alpha 511 | self.beta = beta 512 | self.mu = mu 513 | self.nu2 = nu2 514 | self.b = b 515 | self.n_iter = n_iter 516 | self.n_report_iter = n_report_iter 517 | self.seed = seed 518 | 519 | def fit(self, X, y, hier): 520 | """ 521 | Estimate the topic distributions per document (theta), term 522 | distributions per topic (phi), and regression coefficients (eta). 523 | 524 | Parameters 525 | ---------- 526 | X : array-like, shape = (n_docs, n_terms) 527 | The document-term matrix. 528 | 529 | y : array-like, shape = (n_docs, n_labels) 530 | Response values for each document for each labels. 531 | 532 | hier : 1D array-like, size = n_labels 533 | The index of the list corresponds to the current label 534 | and the value of the indexed position is the parent of the label. 535 | Set -1 as the root. 536 | """ 537 | 538 | self.doc_term_matrix = X 539 | self.n_docs, self.n_terms = X.shape 540 | self.n_tokens = X.sum() 541 | doc_lookup, term_lookup = self._create_lookups(X) 542 | 543 | # iterate 544 | self.theta, self.phi, self.eta, self.loglikelihoods = gibbs_sampler_blhslda( 545 | self.n_iter, self.n_report_iter, 546 | self.n_topics, self.n_docs, self.n_terms, self.n_tokens, 547 | self.alpha, self.beta, self.mu, self.nu2, self.b, doc_lookup, 548 | term_lookup, np.ascontiguousarray(y, dtype=np.intc), 549 | np.ascontiguousarray(hier, dtype=np.intc), self.seed) 550 | -------------------------------------------------------------------------------- /slda/_topic_models.pyx: -------------------------------------------------------------------------------- 1 | #cython: language_level=3 2 | #cython: boundscheck=False 3 | #cython: wraparound=False 4 | #cython: cdivision=True 5 | 6 | """ 7 | The heavy-lifting is here in cython. 8 | 9 | Draws from Allen Riddell's LDA library https://github.com/ariddell/lda 10 | """ 11 | 12 | from datetime import (datetime, timedelta) 13 | import numpy as np 14 | from libc.math cimport fabs 15 | from cython.operator cimport (preincrement, predecrement) 16 | from cython_gsl cimport (gsl_sf_lngamma as lngamma, gsl_sf_exp as exp, 17 | gsl_sf_log as ln, gsl_rng, gsl_rng_mt19937, 18 | gsl_rng_alloc, gsl_rng_set, 19 | gsl_rng_uniform, gsl_rng_uniform_int, 20 | gsl_ran_gaussian as gaussian) 21 | from pypolyagamma import PyPolyaGamma 22 | 23 | 24 | # we choose this number since it is a large prime 25 | cdef unsigned int n_rands = 1000003 26 | cdef gsl_rng *r = gsl_rng_alloc(gsl_rng_mt19937) 27 | 28 | 29 | cdef double[:] create_rands(unsigned int n_rands, seed=None): 30 | """ 31 | Create array of uniformly random numbers on the interval [0, 1). 32 | """ 33 | 34 | cdef: 35 | int i 36 | double[::1] rands = np.empty(n_rands, dtype=np.float64, order='C') 37 | if seed is not None: 38 | gsl_rng_set(r, seed) 39 | for i in range(n_rands): 40 | rands[i] = gsl_rng_uniform(r) 41 | return rands 42 | 43 | 44 | cdef int[:] create_topic_lookup(unsigned int n_tokens, unsigned int n_topics, 45 | seed=None): 46 | """ 47 | Create array of uniformly random numbers on the interval [0, 1). 48 | """ 49 | 50 | cdef: 51 | int i 52 | int[::1] topic_lookup = np.empty(n_tokens, dtype=np.intc, order='C') 53 | if seed is not None: 54 | gsl_rng_set(r, seed) 55 | for i in range(n_tokens): 56 | topic_lookup[i] = gsl_rng_uniform_int(r, n_topics) 57 | return topic_lookup 58 | 59 | 60 | cdef int searchsorted(double[:] a, double v): 61 | """ 62 | Find indices where elements should be inserted to maintain order. 63 | 64 | Find the indices into a sorted array a such that, if the corresponding 65 | elements in v were inserted before the indices, the order of a would be 66 | preserved. 67 | 68 | Like numpy.searchsorted 69 | (http://docs.scipy.org/doc/numpy/reference/generated/numpy.searchsorted.html). 70 | """ 71 | 72 | cdef: 73 | int imin = 0 74 | int imax = a.shape[0] 75 | int imid 76 | while imin < imax: 77 | imid = imin + ((imax - imin) >> 1) 78 | if v > a[imid]: 79 | imin = imid + 1 80 | else: 81 | imax = imid 82 | return imin 83 | 84 | 85 | cdef double loglikelihood_lda(int[:, :] nzw, int[:, :] ndz, int[:] nz, 86 | double[:] alpha, double[:] beta, double sum_beta, 87 | double lBeta_alpha, double lBeta_beta, 88 | double lGamma_sum_alpha_nd): 89 | """ 90 | Log likelihood calculation for LDA. 91 | 92 | This is an exact calculation. 93 | """ 94 | 95 | cdef int k, d 96 | cdef int n_docs = ndz.shape[0] 97 | cdef int n_topics = ndz.shape[1] 98 | cdef int n_terms = nzw.shape[1] 99 | cdef double ll = 0 100 | # calculate log p(w|z) 101 | ll += n_topics * lBeta_beta 102 | for k in range(n_topics): 103 | ll -= lngamma(sum_beta + nz[k]) 104 | for w in range(n_terms): 105 | ll += lngamma(beta[w] + nzw[k, w]) 106 | # calculate log p(z) 107 | ll += n_docs * lBeta_alpha 108 | ll -= lGamma_sum_alpha_nd 109 | for d in range(n_docs): 110 | for k in range(n_topics): 111 | ll += lngamma(alpha[k] + ndz[d, k]) 112 | return ll 113 | 114 | 115 | cdef double loglikelihood_slda(int[:, :] nzw, int[:, :] ndz, int[:] nz, 116 | double[:] alpha, double[:] beta, double sum_beta, 117 | double mu, double nu2, double sigma2, 118 | double[:] eta, double[:] y, double[:, :] Z): 119 | """ 120 | Log likelihood calculation for supervised LDA. 121 | 122 | This is not an exact calculation (constants not included). 123 | """ 124 | 125 | cdef int k, d 126 | cdef int n_docs = ndz.shape[0] 127 | cdef int n_topics = ndz.shape[1] 128 | cdef int n_terms = nzw.shape[1] 129 | cdef double ll = 0. 130 | cdef double eta_z = 0. 131 | # calculate log p(w|z) and log p(eta) 132 | for k in range(n_topics): 133 | ll -= lngamma(sum_beta + nz[k]) 134 | ll -= (eta[k] - mu) * (eta[k] - mu) / 2 / nu2 135 | for w in range(n_terms): 136 | ll += lngamma(beta[w] + nzw[k, w]) 137 | # calculate log p(z) and log p(y|eta) 138 | for d in range(n_docs): 139 | eta_z = 0. 140 | for k in range(n_topics): 141 | eta_z += eta[k] * Z[k, d] 142 | ll += lngamma(alpha[k] + ndz[d, k]) 143 | ll -= (y[d] - eta_z) * (y[d] - eta_z) / 2 / sigma2 144 | return ll 145 | 146 | 147 | cdef double loglikelihood_blslda(int[:, :] nzw, int[:, :] ndz, int[:] nz, 148 | double[:] alpha, double[:] beta, double sum_beta, 149 | double mu, double nu2, double b, 150 | double[:] eta, double[:] y, double[:, :] Z): 151 | """ 152 | Log likelihood calculation for binary logistic supervised LDA. 153 | 154 | This is not an exact calculation (constants not included). 155 | """ 156 | 157 | cdef int k, d 158 | cdef int n_docs = ndz.shape[0] 159 | cdef int n_topics = ndz.shape[1] 160 | cdef int n_terms = nzw.shape[1] 161 | cdef double ll = 0. 162 | cdef double eta_z = 0. 163 | # calculate log p(w|z) and log p(eta) 164 | for k in range(n_topics): 165 | ll -= lngamma(sum_beta + nz[k]) 166 | ll -= (eta[k] - mu) * (eta[k] - mu) / 2 / nu2 167 | for w in range(n_terms): 168 | ll += lngamma(beta[w] + nzw[k, w]) 169 | # calculate log p(z) and log p(y|eta, z) 170 | for d in range(n_docs): 171 | eta_z = 0. 172 | for k in range(n_topics): 173 | eta_z += eta[k] * Z[k, d] 174 | ll += lngamma(alpha[k] + ndz[d, k]) 175 | ll += b * (y[d] * eta_z - ln(1 + exp(eta_z))) 176 | return ll 177 | 178 | 179 | cdef double loglikelihood_grtm(int[:, :] nzw, int[:, :] ndz, int[:] nz, 180 | double[:] alpha, double[:] beta, double sum_beta, 181 | double mu, double nu2, double b, 182 | double[:, :] H, double[:] y, double[:] zeta): 183 | """ 184 | Log likelihood calculation for generalized relational topic models with 185 | data augmentation. 186 | 187 | This is not an exact calculation (constants not included). 188 | """ 189 | 190 | cdef: 191 | int k, _k, d 192 | int n_docs = ndz.shape[0] 193 | int n_topics = ndz.shape[1] 194 | int n_terms = nzw.shape[1] 195 | int n_edges = y.shape[0] 196 | double ll = 0. 197 | # calculate log p(w|z) and log p(H) 198 | for k in range(n_topics): 199 | ll -= lngamma(sum_beta + nz[k]) 200 | for _k in range(n_topics): 201 | ll -= (H[k, _k] - mu) * (H[k, _k] - mu) / 2 / nu2 202 | for w in range(n_terms): 203 | ll += lngamma(beta[w] + nzw[k, w]) 204 | # calculate log p(z) 205 | for d in range(n_docs): 206 | for k in range(n_topics): 207 | ll += lngamma(alpha[k] + ndz[d, k]) 208 | # calculate log p(y|H, z) 209 | for e in range(n_edges): 210 | ll += b * (y[e] * zeta[e] - ln(1 + exp(zeta[e]))) 211 | return ll 212 | 213 | 214 | cdef double loglikelihood_rtm(int[:, :] nzw, int[:, :] ndz, int[:] nz, 215 | double[:] alpha, double[:] beta, double sum_beta, 216 | double mu, double nu, double sigma2, 217 | double[:] eta, double[:] bold_z): 218 | """ 219 | Log likelihood calculation for RTM. 220 | 221 | Only non-constant terms are included in this calculation. 222 | """ 223 | 224 | cdef int k, d 225 | cdef int n_docs = ndz.shape[0] 226 | cdef int n_topics = ndz.shape[1] 227 | cdef int n_terms = nzw.shape[1] 228 | cdef double ll = 0 229 | # calculate log p(w|z) 230 | for k in range(n_topics): 231 | ll -= lngamma(sum_beta + nz[k]) 232 | for w in range(n_terms): 233 | ll += lngamma(beta[w] + nzw[k, w]) 234 | # calculate log p(z) 235 | for d in range(n_docs): 236 | for k in range(n_topics): 237 | ll += lngamma(alpha[k] + ndz[d, k]) 238 | # calculate log p(eta) and log p(y|eta,z) 239 | for k in range(n_topics): 240 | ll += eta[k] * bold_z[k] / nu - (eta[k] - mu) * (eta[k] - mu) / 2 / sigma2 241 | return ll 242 | 243 | cdef double loglikelihood_blhslda(int[:, :] nzw, int[:, :] ndz, int[:] nz, 244 | double[:] alpha, double[:] beta, double sum_beta, 245 | double mu, double nu2, double b, 246 | double[:,:] eta, int[:,:] y, double[:, :] Z): 247 | """ 248 | Log likelihood calculation for binary logistic hierarchical supervised LDA. 249 | 250 | This is not an exact calculation (constants not included). 251 | """ 252 | 253 | cdef int k, d, l 254 | cdef int n_docs = ndz.shape[0] 255 | cdef int n_topics = ndz.shape[1] 256 | cdef int n_terms = nzw.shape[1] 257 | cdef int n_labels = eta.shape[0] 258 | cdef double ll = 0. 259 | cdef double eta_z = 0. 260 | # calculate log p(w|z) and log p(eta) 261 | for l in range(n_labels): 262 | for k in range(n_topics): 263 | ll -= lngamma(sum_beta + nz[k]) 264 | ll -= (eta[l, k] - mu) * (eta[l, k] - mu) / 2 / nu2 265 | for w in range(n_terms): 266 | ll += lngamma(beta[w] + nzw[k, w]) 267 | # calculate log p(z) and log p(y|eta, z) 268 | for d in range(n_docs): 269 | eta_z = 0. 270 | for l in range(n_labels): 271 | for k in range(n_topics): 272 | eta_z += eta[l, k] * Z[k, d] 273 | ll += lngamma(alpha[k] + ndz[d, k]) 274 | ll += b * (y[d,l] * eta_z - ln(1 + exp(eta_z))) 275 | return ll 276 | 277 | 278 | cdef print_progress(start_time, int n_report_iter, int i, 279 | double lL_now, double lL_last): 280 | """ 281 | Print progress of iterations. 282 | """ 283 | 284 | if i > 0 and i % n_report_iter == 0: 285 | now_time = datetime.now() 286 | print('{} {} elapsed, iter {:>4}, LL {:.4f}, {:.2f}% change from last' 287 | .format(now_time, 288 | now_time - start_time, 289 | i, 290 | lL_now, 291 | (lL_now - lL_last) / fabs(lL_last) * 100)) 292 | 293 | 294 | def estimate_matrix(int[:, :] counts, double[:] psuedo_counts, int n_things): 295 | """ 296 | Create estimates for theta and phi from counts. 297 | """ 298 | 299 | mat = np.asarray(counts) + np.tile(psuedo_counts, (n_things, 1)) 300 | return (mat.T / mat.sum(axis=1)).T 301 | 302 | 303 | def iterated_pseudo_counts(doc_lookup, term_lookup, int n_docs, 304 | double[:] alpha, double[:] beta, double[:, :] phi, 305 | int max_iter, double tol): 306 | """ 307 | Estimate the topic distributions of new documents using the 308 | iterated pseudo-counts method mentioned in Wallach et al. (2009) and 309 | derived in Buntine (2009). 310 | """ 311 | 312 | cdef: 313 | int d, i, k, s, w, n_tokens_d 314 | int n_topics = phi.shape[0] 315 | int[::1] term_lookup_d 316 | double sum_over_k, _tmp_double 317 | double[::1] q_sum 318 | double[:, ::1] q, q_new 319 | double[:, ::1] theta = np.empty((n_docs, n_topics), dtype=np.float64, order='C') 320 | for d in range(n_docs): 321 | term_lookup_d = np.ascontiguousarray(term_lookup[doc_lookup == d]) 322 | n_tokens_d = term_lookup_d.shape[0] 323 | # initialize proposal distribution q 324 | q = np.empty((n_tokens_d, n_topics), dtype=np.float64, order='C') 325 | for i in range(n_tokens_d): 326 | w = term_lookup_d[i] 327 | sum_over_k = 0. 328 | for k in range(n_topics): 329 | _tmp_double = phi[k, w] * alpha[k] 330 | q[i, k] = _tmp_double 331 | sum_over_k += _tmp_double 332 | for k in range(n_topics): 333 | q[i, k] /= sum_over_k 334 | # do fixed point iteration 335 | q_new = np.empty_like(q, dtype=np.float64, order='C') 336 | for s in range(max_iter): 337 | # q_sum is 338 | q_sum = np.zeros(n_topics, dtype=np.float64, order='C') 339 | for i in range(n_tokens_d): 340 | for k in range(n_topics): 341 | q_sum[k] += q[i, k] 342 | for i in range(n_tokens_d): 343 | w = term_lookup_d[i] 344 | sum_over_k = 0. 345 | for k in range(n_topics): 346 | _tmp_double = phi[k, w] * (alpha[k] + q_sum[k] - q[i, k]) 347 | q_new[i, k] = _tmp_double 348 | sum_over_k += _tmp_double 349 | for k in range(n_topics): 350 | q_new[i, k] /= sum_over_k 351 | # return if difference between iterations is small 352 | sum_over_k = 0. 353 | for i in range(n_tokens_d): 354 | for k in range(n_topics): 355 | sum_over_k += fabs(q_new[i, k] - q[i, k]) 356 | # set q here in case we break 357 | q = q_new 358 | if sum_over_k < tol: 359 | break 360 | # calculate topic distributions 361 | q_sum = np.zeros(n_topics, dtype=np.float64, order='C') 362 | sum_over_k = 0. 363 | for i in range(n_tokens_d): 364 | for k in range(n_topics): 365 | q_sum[k] += q[i, k] 366 | sum_over_k += q[i, k] 367 | for k in range(n_topics): 368 | theta[d, k] = q_sum[k] / sum_over_k 369 | return np.array(theta) 370 | 371 | 372 | def gibbs_sampler_lda(int n_iter, int n_report_iter, 373 | int n_topics, int n_docs, 374 | int n_terms, int n_tokens, 375 | double[:] alpha, double[:] beta, 376 | int[:] doc_lookup, int[:] term_lookup, 377 | seed=None): 378 | """ 379 | Perform (collapsed) Gibbs sampling inference for LDA. 380 | """ 381 | 382 | cdef: 383 | int i, j, k, d, w, z, new_z 384 | double p_sum, uval 385 | double sum_alpha = 0. 386 | double sum_beta = 0. 387 | double lBeta_alpha = 0. 388 | double lBeta_beta = 0. 389 | double lGamma_sum_alpha_nd = 0. 390 | int[:] topic_lookup = create_topic_lookup(n_tokens, n_topics, seed) 391 | # log likelihoods 392 | double[::1] lL = np.empty(n_iter, dtype=np.float64, order='C') 393 | # number of tokens in document d assigned to topic z, shape = (n_docs, n_topics) 394 | int[:, ::1] ndz = np.zeros((n_docs, n_topics), dtype=np.intc, order='C') 395 | # number of tokens assigned to topic z equal to term w, shape = (n_topics, n_terms) 396 | int[:, ::1] nzw = np.zeros((n_topics, n_terms), dtype=np.intc, order='C') 397 | # number of tokens assigned to topic k, shape = (n_topics,) 398 | int[::1] nz = np.zeros(n_topics, dtype=np.intc, order='C') 399 | # number of tokens in doc d, shape = (n_docs,) 400 | int[::1] nd = np.zeros(n_docs, dtype=np.intc, order='C') 401 | # (weighted) probabilities for the discrete distribution 402 | double[::1] p_cumsum = np.empty(n_topics, dtype=np.float64, order='C') 403 | # preallocate uniformly random numbers on the interval [0, 1) 404 | double[:] rands = create_rands(n_rands=n_rands, seed=seed) 405 | int u = 0 406 | # initialize counts 407 | for j in range(n_tokens): 408 | preincrement(ndz[doc_lookup[j], topic_lookup[j]]) 409 | preincrement(nzw[topic_lookup[j], term_lookup[j]]) 410 | preincrement(nz[topic_lookup[j]]) 411 | preincrement(nd[doc_lookup[j]]) 412 | # initialize sum_alpha, lBeta_alpha 413 | for k in range(n_topics): 414 | sum_alpha += alpha[k] 415 | lBeta_alpha += lngamma(alpha[k]) 416 | lBeta_alpha -= lngamma(sum_alpha) 417 | # initialize sum_beta, lBeta_beta 418 | for w in range(n_terms): 419 | sum_beta += beta[w] 420 | lBeta_beta += lngamma(beta[w]) 421 | lBeta_beta -= lngamma(sum_beta) 422 | # initialize lGamma_sum_alpha_nd 423 | for d in range(n_docs): 424 | lGamma_sum_alpha_nd += lngamma(sum_alpha + nd[d]) 425 | # iterate 426 | start_time = datetime.now() 427 | print('{} start iterations'.format(start_time)) 428 | for i in range(n_iter): 429 | for j in range(n_tokens): 430 | d = doc_lookup[j] 431 | w = term_lookup[j] 432 | z = topic_lookup[j] 433 | predecrement(ndz[d, z]) 434 | predecrement(nzw[z, w]) 435 | predecrement(nz[z]) 436 | p_sum = 0. 437 | for k in range(n_topics): 438 | p_sum += (nzw[k, w] + beta[w]) / (nz[k] + sum_beta) * (ndz[d, k] + alpha[k]) 439 | p_cumsum[k] = p_sum 440 | preincrement(u) 441 | if u == n_rands: 442 | u = 0 443 | uval = rands[u] * p_sum 444 | new_z = topic_lookup[j] = searchsorted(p_cumsum, uval) 445 | preincrement(ndz[d, new_z]) 446 | preincrement(nzw[new_z, w]) 447 | preincrement(nz[new_z]) 448 | lL[i] = loglikelihood_lda(nzw, ndz, nz, alpha, beta, sum_beta, 449 | lBeta_alpha, lBeta_beta, lGamma_sum_alpha_nd) 450 | # print progress 451 | print_progress(start_time, n_report_iter, i, lL[i], lL[i - n_report_iter]) 452 | # populate the topic and word distributions 453 | theta = estimate_matrix(ndz, alpha, n_docs) 454 | phi = estimate_matrix(nzw, beta, n_topics) 455 | return theta, phi, np.asarray(lL) 456 | 457 | 458 | def gibbs_sampler_slda(int n_iter, int n_report_iter, 459 | int n_topics, int n_docs, 460 | int n_terms, int n_tokens, 461 | double[:] alpha, double[:] beta, 462 | double mu, double nu2, double sigma2, 463 | int[:] doc_lookup, int[:] term_lookup, 464 | double[:] y, seed=None): 465 | """ 466 | Perform (collapsed) Gibbs sampling inference for supervised LDA. 467 | """ 468 | 469 | cdef: 470 | int i, j, k, d, w, z, new_z 471 | double p_sum, uval, y_sum 472 | double sum_alpha = 0. 473 | double sum_beta = 0. 474 | int[:] topic_lookup = create_topic_lookup(n_tokens, n_topics, seed) 475 | # log likelihoods 476 | double[::1] lL = np.empty(n_iter, dtype=np.float64, order='C') 477 | # number of tokens in document d assigned to topic z, shape = (n_docs, n_topics) 478 | int[:, ::1] ndz = np.zeros((n_docs, n_topics), dtype=np.intc, order='C') 479 | # number of tokens assigned to topic z equal to term w, shape = (n_topics, n_terms) 480 | int[:, ::1] nzw = np.zeros((n_topics, n_terms), dtype=np.intc, order='C') 481 | # number of tokens assigned to topic k, shape = (n_topics,) 482 | int[::1] nz = np.zeros(n_topics, dtype=np.intc, order='C') 483 | # number of tokens in doc d, shape = (n_docs,) 484 | int[::1] nd = np.zeros(n_docs, dtype=np.intc, order='C') 485 | # (weighted) probabilities for the discrete distribution 486 | double[::1] p_cumsum = np.empty(n_topics, dtype=np.float64, order='C') 487 | # preallocate uniformly random numbers on the interval [0, 1) 488 | double[:] rands = create_rands(n_rands=n_rands, seed=seed) 489 | int u = 0 490 | # regression coefficients 491 | double[:, ::1] eta = np.ascontiguousarray( 492 | np.tile(mu, (n_iter + 1, n_topics)), dtype=np.float64) 493 | double[:, ::1] etand = np.empty((n_docs, n_topics), dtype=np.float64, order='C') 494 | double[::1] eta_tmp = np.empty(n_topics, dtype=np.float64, order='C') 495 | # initialize counts 496 | for j in range(n_tokens): 497 | preincrement(ndz[doc_lookup[j], topic_lookup[j]]) 498 | preincrement(nzw[topic_lookup[j], term_lookup[j]]) 499 | preincrement(nz[topic_lookup[j]]) 500 | preincrement(nd[doc_lookup[j]]) 501 | # initialize sum_alpha, lBeta_alpha 502 | for k in range(n_topics): 503 | sum_alpha += alpha[k] 504 | # initialize sum_beta, lBeta_beta 505 | for w in range(n_terms): 506 | sum_beta += beta[w] 507 | # define numpy variables 508 | Inu2 = np.identity(n_topics) / nu2 509 | # iterate 510 | start_time = datetime.now() 511 | print('{} start iterations'.format(start_time)) 512 | for i in range(n_iter): 513 | # initialize etand for iteration i 514 | for d in range(n_docs): 515 | for k in range(n_topics): 516 | etand[d, k] = eta[i, k] / nd[d] 517 | # sample z 518 | for j in range(n_tokens): 519 | d = doc_lookup[j] 520 | w = term_lookup[j] 521 | z = topic_lookup[j] 522 | predecrement(ndz[d, z]) 523 | predecrement(nzw[z, w]) 524 | predecrement(nz[z]) 525 | p_sum = 0. 526 | y_sum = y[d] 527 | for k in range(n_topics): 528 | y_sum -= etand[d, k] * ndz[d, k] 529 | y_sum = 2 * y_sum 530 | for k in range(n_topics): 531 | p_sum += (nzw[k, w] + beta[w]) \ 532 | / (nz[k] + sum_beta) \ 533 | * (ndz[d, k] + alpha[k]) \ 534 | * exp(etand[d, k] / 2 / sigma2 * (y_sum - etand[d, k])) 535 | p_cumsum[k] = p_sum 536 | preincrement(u) 537 | if u == n_rands: 538 | u = 0 539 | uval = rands[u] * p_sum 540 | new_z = topic_lookup[j] = searchsorted(p_cumsum, uval) 541 | preincrement(ndz[d, new_z]) 542 | preincrement(nzw[new_z, w]) 543 | preincrement(nz[new_z]) 544 | # sample eta 545 | # (actually, we are setting eta to mean) 546 | Z = (np.asarray(ndz) / np.asarray(nd)[:, np.newaxis]).T 547 | eta_tmp = np.linalg.solve(Inu2 + np.dot(Z, Z.T) / sigma2, 548 | np.dot(Z, np.asarray(y) / sigma2)) 549 | for k in range(n_topics): 550 | eta[i + 1, k] = eta_tmp[k] 551 | lL[i] = loglikelihood_slda(nzw, ndz, nz, alpha, beta, sum_beta, 552 | mu, nu2, sigma2, eta[i + 1], y, Z) 553 | # print progress 554 | print_progress(start_time, n_report_iter, i, lL[i], lL[i - n_report_iter]) 555 | # populate the topic and word distributions 556 | theta = estimate_matrix(ndz, alpha, n_docs) 557 | phi = estimate_matrix(nzw, beta, n_topics) 558 | return theta, phi, np.asarray(eta), np.asarray(lL) 559 | 560 | 561 | def gibbs_sampler_blslda(int n_iter, int n_report_iter, 562 | int n_topics, int n_docs, 563 | int n_terms, int n_tokens, 564 | double[:] alpha, double[:] beta, 565 | double mu, double nu2, double b, 566 | int[:] doc_lookup, int[:] term_lookup, 567 | double[:] y, seed): 568 | """ 569 | Perform collapsed Gibbs sampling inference for binary logistic supervised 570 | LDA using Polson et al.'s data augmentation strategy[1] and Zhu et al.'s 571 | regularization strategy[2]. 572 | 573 | 1. Polson, N. G., Scott, J. G., & Windle, J. (2013). Bayesian Inference 574 | for Logistic Models Using Pólya–Gamma Latent Variables. Journal of the 575 | American Statistical Association, 108(504), 1339–1349. 576 | http://doi.org/10.1080/01621459.2013.829001 577 | 578 | 2. Zhu, J., Zheng, X., & Zhang, B. (2013, October). Improved Bayesian 579 | Logistic Supervised Topic Models with Data Augmentation. arXiv.org. 580 | """ 581 | 582 | cdef: 583 | int i, j, k, d, w, z, new_z 584 | double p_sum, uval, eta_sum, kappa_sum 585 | double sum_alpha = 0. 586 | double sum_beta = 0. 587 | int[:] topic_lookup = create_topic_lookup(n_tokens, n_topics, seed) 588 | # log likelihoods 589 | double[::1] lL = np.empty(n_iter, dtype=np.float64, order='C') 590 | # number of tokens in document d assigned to topic z, shape = (n_docs, n_topics) 591 | int[:, ::1] ndz = np.zeros((n_docs, n_topics), dtype=np.intc, order='C') 592 | # number of tokens assigned to topic z equal to term w, shape = (n_topics, n_terms) 593 | int[:, ::1] nzw = np.zeros((n_topics, n_terms), dtype=np.intc, order='C') 594 | # number of tokens assigned to topic k, shape = (n_topics,) 595 | int[::1] nz = np.zeros(n_topics, dtype=np.intc, order='C') 596 | # number of tokens in doc d, shape = (n_docs,) 597 | int[::1] nd = np.zeros(n_docs, dtype=np.intc, order='C') 598 | # (weighted) probabilities for the discrete distribution 599 | double[::1] p_cumsum = np.empty(n_topics, dtype=np.float64, order='C') 600 | # preallocate uniformly random numbers on the interval [0, 1) 601 | double[:] rands = create_rands(n_rands=n_rands, seed=seed) 602 | int u = 0 603 | # regression coefficients 604 | double[:, ::1] eta = np.ascontiguousarray( 605 | np.tile(mu, (n_iter + 1, n_topics)), dtype=np.float64) 606 | double[:, ::1] etand = np.empty((n_docs, n_topics), dtype=np.float64, order='C') 607 | double[::1] eta_mean = np.empty(n_topics, dtype=np.float64, order='C') 608 | # omega: notice I'm initializing omega here 609 | double[::1] omega = np.ascontiguousarray(np.repeat(1., n_docs)) 610 | # kappa: a transformation of y 611 | double[::1] kappa = b * (np.asarray(y) - 0.5) 612 | # initialize counts 613 | for j in range(n_tokens): 614 | preincrement(ndz[doc_lookup[j], topic_lookup[j]]) 615 | preincrement(nzw[topic_lookup[j], term_lookup[j]]) 616 | preincrement(nz[topic_lookup[j]]) 617 | preincrement(nd[doc_lookup[j]]) 618 | # initialize sum_alpha, lBeta_alpha 619 | for k in range(n_topics): 620 | sum_alpha += alpha[k] 621 | # initialize sum_beta, lBeta_beta 622 | for w in range(n_terms): 623 | sum_beta += beta[w] 624 | # define numpy variables 625 | Inu2 = np.identity(n_topics) / nu2 626 | munu2 = np.repeat(mu / nu2, n_topics) 627 | # define PolyaGamma sampler 628 | pg_rng = PyPolyaGamma(seed=seed or 42) 629 | # iterate 630 | start_time = datetime.now() 631 | print('{} start iterations'.format(start_time)) 632 | for i in range(n_iter): 633 | # sample omega 634 | for d in range(n_docs): 635 | eta_sum = 0. 636 | for k in range(n_topics): 637 | # initialize etand for iteration i 638 | etand[d, k] = eta[i, k] / nd[d] 639 | eta_sum += etand[d, k] * ndz[d, k] 640 | omega[d] = pg_rng.pgdraw(b, eta_sum) 641 | # sample z 642 | for j in range(n_tokens): 643 | d = doc_lookup[j] 644 | w = term_lookup[j] 645 | z = topic_lookup[j] 646 | predecrement(ndz[d, z]) 647 | predecrement(nzw[z, w]) 648 | predecrement(nz[z]) 649 | p_sum = 0. 650 | kappa_sum = kappa[d] 651 | for k in range(n_topics): 652 | kappa_sum -= omega[d] * etand[d, k] * ndz[d, k] 653 | for k in range(n_topics): 654 | p_sum += (nzw[k, w] + beta[w]) \ 655 | / (nz[k] + sum_beta) \ 656 | * (ndz[d, k] + alpha[k]) \ 657 | * exp(etand[d, k] * (kappa_sum - omega[d] / 2 * etand[d, k])) 658 | p_cumsum[k] = p_sum 659 | preincrement(u) 660 | if u == n_rands: 661 | u = 0 662 | uval = rands[u] * p_sum 663 | new_z = topic_lookup[j] = searchsorted(p_cumsum, uval) 664 | preincrement(ndz[d, new_z]) 665 | preincrement(nzw[new_z, w]) 666 | preincrement(nz[new_z]) 667 | # sample eta 668 | Z = (np.asarray(ndz) / np.asarray(nd)[:, np.newaxis]).T 669 | Omega = np.asarray(omega)[np.newaxis, :] 670 | eta_mean = np.linalg.solve(Inu2 + np.dot(Z * Omega, Z.T), 671 | munu2 + np.dot(Z, kappa)) 672 | # TODO currently setting eta to mean, but need to sample 673 | for k in range(n_topics): 674 | eta[i + 1, k] = eta_mean[k] 675 | # compute log-likelihood 676 | lL[i] = loglikelihood_blslda(nzw, ndz, nz, alpha, beta, sum_beta, 677 | mu, nu2, b, eta[i + 1], y, Z) 678 | # print progress 679 | print_progress(start_time, n_report_iter, i, lL[i], lL[i - n_report_iter]) 680 | # populate the topic and word distributions 681 | theta = estimate_matrix(ndz, alpha, n_docs) 682 | phi = estimate_matrix(nzw, beta, n_topics) 683 | return theta, phi, np.asarray(eta), np.asarray(lL) 684 | 685 | 686 | def gibbs_sampler_grtm(int n_iter, int n_report_iter, 687 | int n_topics, int n_docs, 688 | int n_terms, int n_tokens, int n_edges, 689 | double[:] alpha, double[:] beta, 690 | double mu, double nu2, double b, 691 | int[:] doc_lookup, int[:] term_lookup, 692 | int[:] out_docs, int[:] out_edges, 693 | int[:] in_docs, int[:] in_edges, 694 | int[:] edge_tail, int[:] edge_head, 695 | double[:] y, seed): 696 | """ 697 | Perform collapsed Gibbs sampling inference for relational topic models 698 | using Polson et al.'s data augmentation strategy[1] and Zhu et al.'s 699 | regularization strategy[2]. 700 | 701 | 1. Polson, N. G., Scott, J. G., & Windle, J. (2013). Bayesian Inference 702 | for Logistic Models Using Pólya–Gamma Latent Variables. Journal of the 703 | American Statistical Association, 108(504), 1339–1349. 704 | http://doi.org/10.1080/01621459.2013.829001 705 | 706 | 2. Chen, N., Zhu, J., Xia, F., & Zhang, B. (2013). Generalized relational topic 707 | models with data augmentation. Presented at the IJCAI'13: Proceedings of the 708 | Twenty-Third international joint conference on Artificial Intelligence,  AAAI 709 | Press. 710 | """ 711 | 712 | cdef: 713 | int i, j, k, k1, k2, d, d1, d2, e, w, z, new_z, n, Nd 714 | double p_sum, uval, H_col_sum, H_row_sum, kappa_sum, zeta_sum 715 | double sum_alpha = 0. 716 | double sum_beta = 0. 717 | int[:] topic_lookup = create_topic_lookup(n_tokens, n_topics, seed) 718 | # log likelihoods 719 | double[::1] lL = np.empty(n_iter, dtype=np.float64, order='C') 720 | # number of tokens in document d assigned to topic z, shape = (n_docs, n_topics) 721 | int[:, ::1] ndz = np.zeros((n_docs, n_topics), dtype=np.intc, order='C') 722 | # number of tokens assigned to topic z equal to term w, shape = (n_topics, n_terms) 723 | int[:, ::1] nzw = np.zeros((n_topics, n_terms), dtype=np.intc, order='C') 724 | # number of tokens assigned to topic k, shape = (n_topics,) 725 | int[::1] nz = np.zeros(n_topics, dtype=np.intc, order='C') 726 | # number of tokens in doc d, shape = (n_docs,) 727 | int[::1] nd = np.zeros(n_docs, dtype=np.intc, order='C') 728 | # (weighted) probabilities for the discrete distribution 729 | double[::1] p_cumsum = np.empty(n_topics, dtype=np.float64, order='C') 730 | # preallocate uniformly random numbers on the interval [0, 1) 731 | double[:] rands = create_rands(n_rands=n_rands, seed=seed) 732 | int u = 0 733 | # zeta: TODO 734 | double[::1] zeta = np.empty(n_edges, dtype=np.float64, order='C') 735 | # regression coefficients 736 | double[:, :, ::1] H = np.ascontiguousarray( 737 | np.tile(mu, (n_iter + 1, n_topics, n_topics)), dtype=np.float64) 738 | # 0 = row, 1 = column 739 | double[:, ::1] Hznd = np.empty((n_edges, n_topics), dtype=np.float64, order='C') 740 | double[:, ::1] HTznd = np.empty((n_edges, n_topics), dtype=np.float64, order='C') 741 | double[::1] eta_mean = np.empty(n_topics * n_topics, dtype=np.float64, order='C') 742 | # omega: notice I'm initializing omega here 743 | double[::1] omega = np.ascontiguousarray(np.repeat(1., n_edges)) 744 | # kappa: a transformation of y 745 | double[::1] kappa = b * (np.asarray(y) - 0.5) 746 | # initialize counts 747 | for j in range(n_tokens): 748 | preincrement(ndz[doc_lookup[j], topic_lookup[j]]) 749 | preincrement(nzw[topic_lookup[j], term_lookup[j]]) 750 | preincrement(nz[topic_lookup[j]]) 751 | preincrement(nd[doc_lookup[j]]) 752 | # initialize sum_alpha, lBeta_alpha 753 | for k in range(n_topics): 754 | sum_alpha += alpha[k] 755 | # initialize sum_beta, lBeta_beta 756 | for w in range(n_terms): 757 | sum_beta += beta[w] 758 | # define numpy variables 759 | Inu2 = np.identity(n_topics * n_topics) / nu2 760 | munu2 = np.repeat(mu / nu2, n_topics * n_topics) 761 | # define PolyaGamma sampler 762 | pg_rng = PyPolyaGamma(seed=seed or 42) 763 | # iterate 764 | start_time = datetime.now() 765 | print('{} start iterations'.format(start_time)) 766 | for i in range(n_iter): 767 | # sample omega, and initialize Hznd and zeta for iteration i 768 | for e in range(n_edges): 769 | d1 = edge_tail[e] 770 | d2 = edge_head[e] 771 | Nd = nd[d1] * nd[d2] 772 | zeta_sum = 0. 773 | for k1 in range(n_topics): 774 | H_row_sum = 0. 775 | H_col_sum = 0. 776 | for k2 in range(n_topics): 777 | zeta_sum += ndz[d1, k1] * ndz[d2, k2] * H[i, k1, k2] 778 | H_row_sum += H[i, k1, k2] * ndz[d2, k2] 779 | H_col_sum += H[i, k2, k1] * ndz[d1, k2] 780 | Hznd[e, k1] = H_row_sum / Nd 781 | HTznd[e, k1] = H_col_sum / Nd 782 | zeta[e] = zeta_sum / Nd 783 | omega[e] = pg_rng.pgdraw(b, zeta[e]) 784 | # sample z 785 | for j in range(n_tokens): 786 | d = doc_lookup[j] 787 | w = term_lookup[j] 788 | z = topic_lookup[j] 789 | predecrement(ndz[d, z]) 790 | predecrement(nzw[z, w]) 791 | predecrement(nz[z]) 792 | p_sum = 0. 793 | for k in range(n_topics): 794 | kappa_sum = 0. 795 | for n in range(in_docs[d], in_docs[d + 1]): 796 | e = in_edges[n] 797 | d1 = edge_tail[e] 798 | Nd = nd[d1] * nd[d] 799 | zeta_sum = zeta[e] 800 | for k1 in range(n_topics): 801 | zeta_sum -= ndz[d1, k1] * H[i, k1, z] / Nd 802 | kappa_sum += (kappa[e] - omega[e] * zeta_sum 803 | - omega[e] / 2 * HTznd[e, k]) * HTznd[e, k] 804 | for n in range(out_docs[d], out_docs[d + 1]): 805 | e = out_edges[n] 806 | d2 = edge_head[e] 807 | Nd = nd[d] * nd[d2] 808 | zeta_sum = zeta[e] 809 | for k2 in range(n_topics): 810 | zeta_sum -= ndz[d2, k2] * H[i, z, k2] / Nd 811 | kappa_sum += (kappa[e] - omega[e] * zeta_sum 812 | - omega[e] / 2 * Hznd[e, k]) * Hznd[e, k] 813 | p_sum += (nzw[k, w] + beta[w]) \ 814 | / (nz[k] + sum_beta) \ 815 | * (ndz[d, k] + alpha[k]) \ 816 | * exp(kappa_sum) 817 | p_cumsum[k] = p_sum 818 | preincrement(u) 819 | if u == n_rands: 820 | u = 0 821 | uval = rands[u] * p_sum 822 | new_z = topic_lookup[j] = searchsorted(p_cumsum, uval) 823 | preincrement(ndz[d, new_z]) 824 | preincrement(nzw[new_z, w]) 825 | preincrement(nz[new_z]) 826 | # TODO update zeta, Hznd, HTznd 827 | # sample eta 828 | _Z = np.asarray(ndz) / np.asarray(nd)[:, np.newaxis] 829 | Z = np.empty((n_topics * n_topics, n_edges), dtype=np.float64, order='C') 830 | for e in range(n_edges): 831 | Z[:, e] = np.kron(_Z[edge_head[e]], _Z[edge_tail[e]]) 832 | Omega = np.asarray(omega)[np.newaxis, :] 833 | eta_mean = np.linalg.solve(Inu2 + np.dot(Z * Omega, Z.T), 834 | munu2 + np.dot(Z, kappa)) 835 | # TODO currently setting eta to mean, but need to sample 836 | for k1 in range(n_topics): 837 | for k2 in range(n_topics): 838 | H[i + 1, k1, k2] = eta_mean[k1 + (k2 * n_topics)] 839 | # compute log-likelihood 840 | lL[i] = loglikelihood_grtm(nzw, ndz, nz, alpha, beta, sum_beta, 841 | mu, nu2, b, H[i + 1], y, zeta) 842 | # print progress 843 | print_progress(start_time, n_report_iter, i, lL[i], lL[i - n_report_iter]) 844 | # populate the topic and word distributions 845 | theta = estimate_matrix(ndz, alpha, n_docs) 846 | phi = estimate_matrix(nzw, beta, n_topics) 847 | return theta, phi, np.asarray(H), np.asarray(lL) 848 | 849 | 850 | def gibbs_sampler_rtm(int n_iter, int n_report_iter, 851 | int n_topics, int n_docs, 852 | int n_terms, int n_tokens, 853 | double[:] alpha, double[:] beta, 854 | double mu, double sigma2, double nu, 855 | int[:] doc_lookup, int[:] term_lookup, 856 | adj_mat, seed=None): 857 | """ 858 | Perform (collapsed) Gibbs sampling inference for RTM. 859 | """ 860 | 861 | cdef: 862 | int i, j, k, d, w, z, new_z, _d, n 863 | double p_sum, uval, d_sum, nd_d, nd__d 864 | double sum_alpha = 0. 865 | double sum_beta = 0. 866 | int[:] topic_lookup = create_topic_lookup(n_tokens, n_topics, seed) 867 | # log likelihoods 868 | double[::1] lL = np.empty(n_iter, dtype=np.float64, order='C') 869 | # number of tokens in document d assigned to topic z, shape = (n_docs, n_topics) 870 | int[:, ::1] ndz = np.zeros((n_docs, n_topics), dtype=np.intc, order='C') 871 | # number of tokens assigned to topic z equal to term w, shape = (n_topics, n_terms) 872 | int[:, ::1] nzw = np.zeros((n_topics, n_terms), dtype=np.intc, order='C') 873 | # number of tokens assigned to topic k, shape = (n_topics,) 874 | int[::1] nz = np.zeros(n_topics, dtype=np.intc, order='C') 875 | # number of tokens in doc d, shape = (n_docs,) 876 | int[::1] nd = np.zeros(n_docs, dtype=np.intc, order='C') 877 | # (weighted) probabilities for the discrete distribution 878 | double[::1] p_cumsum = np.empty(n_topics, dtype=np.float64, order='C') 879 | # preallocate uniformly random numbers on the interval [0, 1) 880 | double[:] rands = create_rands(n_rands=n_rands, seed=seed) 881 | int u = 0 882 | # document network variables 883 | int[::1] n_neighbors = np.ascontiguousarray(adj_mat.getnnz(axis=1)) 884 | int[::1] neighbor_index = np.zeros(n_docs, dtype=np.intc, order='C') 885 | int[:, ::1] neighbors = np.zeros((n_docs, np.asarray(n_neighbors).max()), 886 | dtype=np.intc, order='C') 887 | # bold_z is nu times the bold_z in the write-up 888 | double[::1] bold_z = np.zeros(n_topics, dtype=np.float64, order='C') 889 | double[:, ::1] ndznd = np.zeros((n_docs, n_topics), dtype=np.float64, 890 | order='C') 891 | double[:, ::1] eta = np.ascontiguousarray( 892 | np.tile(mu, (n_iter + 1, n_topics)), dtype=np.float64) 893 | # initialize counts 894 | for j in range(n_tokens): 895 | preincrement(ndz[doc_lookup[j], topic_lookup[j]]) 896 | preincrement(nzw[topic_lookup[j], term_lookup[j]]) 897 | preincrement(nz[topic_lookup[j]]) 898 | preincrement(nd[doc_lookup[j]]) 899 | # initialize sum_alpha, lBeta_alpha 900 | for k in range(n_topics): 901 | sum_alpha += alpha[k] 902 | # initialize sum_beta, lBeta_beta 903 | for w in range(n_terms): 904 | sum_beta += beta[w] 905 | # initialize neighbors 906 | for d, _d in zip(*adj_mat.nonzero()): 907 | neighbors[d, neighbor_index[d]] = _d 908 | neighbor_index[d] += 1 909 | # initialize ndznd 910 | for d in range(n_docs): 911 | for n in range(n_neighbors[d]): 912 | _d = neighbors[d, n] 913 | nd__d = nd[_d] 914 | for k in range(n_topics): 915 | ndznd[d, k] += ndz[_d, k] / nd__d 916 | # initialize bold_z 917 | for d in range(n_docs): 918 | nd_d = nd[d] 919 | for n in range(n_neighbors[d]): 920 | _d = neighbors[d, n] 921 | nd__d = nd[_d] 922 | for k in range(n_topics): 923 | bold_z[k] += ndz[d, k] / nd_d * ndz[_d, k] / nd__d 924 | # divide by 2 because we double-counted the edges 925 | for k in range(n_topics): 926 | bold_z[k] = bold_z[k] / 2. 927 | # iterate 928 | start_time = datetime.now() 929 | print('{} start iterations'.format(start_time)) 930 | for i in range(n_iter): 931 | for j in range(n_tokens): 932 | d = doc_lookup[j] 933 | w = term_lookup[j] 934 | z = topic_lookup[j] 935 | predecrement(ndz[d, z]) 936 | predecrement(nzw[z, w]) 937 | predecrement(nz[z]) 938 | p_sum = 0. 939 | for k in range(n_topics): 940 | p_sum += (nzw[k, w] + beta[w]) / (nz[k] + sum_beta) * (ndz[d, k] + alpha[k]) * \ 941 | exp(eta[i, k] / nd[d] / nu * ndznd[d, k]) 942 | p_cumsum[k] = p_sum 943 | preincrement(u) 944 | if u == n_rands: 945 | u = 0 946 | uval = rands[u] * p_sum 947 | new_z = topic_lookup[j] = searchsorted(p_cumsum, uval) 948 | preincrement(ndz[d, new_z]) 949 | preincrement(nzw[new_z, w]) 950 | preincrement(nz[new_z]) 951 | for n in range(n_neighbors[d]): 952 | _d = neighbors[d, n] 953 | nd_d = nd[d] 954 | nd__d = nd[_d] 955 | ndznd[_d, z] -= 1. / nd_d 956 | ndznd[_d, new_z] += 1. / nd_d 957 | bold_z[z] -= ndz[_d, z] / nd__d / nd_d 958 | bold_z[new_z] += ndz[_d, new_z] / nd__d / nd_d 959 | for k in range(n_topics): 960 | eta[i + 1, k] = gaussian(r, sigma2) + mu + sigma2 * bold_z[k] / nu 961 | lL[i] = loglikelihood_rtm(nzw, ndz, nz, alpha, beta, sum_beta, 962 | mu, nu, sigma2, eta[i + 1], bold_z) 963 | # print progress 964 | print_progress(start_time, n_report_iter, i, lL[i], lL[i - n_report_iter]) 965 | # populate the topic and word distributions 966 | theta = estimate_matrix(ndz, alpha, n_docs) 967 | phi = estimate_matrix(nzw, beta, n_topics) 968 | return theta, phi, np.asarray(eta), np.asarray(lL) 969 | 970 | def gibbs_sampler_blhslda(int n_iter, int n_report_iter, 971 | int n_topics, int n_docs, 972 | int n_terms, int n_tokens, 973 | double[:] alpha, double[:] beta, 974 | double mu, double nu2, double b, 975 | int[:] doc_lookup, int[:] term_lookup, 976 | int[:,:] y, int[:] hier, seed): 977 | """ 978 | Perform (collapsed) Gibbs sampling inference for Binary Logistic HSLDA. 979 | """ 980 | 981 | cdef: 982 | int i, j, k, d, w, z, new_z, n_labels = y.shape[1], l, pa_l 983 | double p_sum, uval, eta_sum, eta_l, l_sum, exp_eta_l 984 | double sum_alpha = 0. 985 | double sum_beta = 0. 986 | int[:] topic_lookup = create_topic_lookup(n_tokens, n_topics, seed) 987 | # log likelihoods 988 | double[::1] lL = np.empty(n_iter, dtype=np.float64, order='C') 989 | # number of tokens in document d assigned to topic z, shape = (n_docs, n_topics) 990 | int[:, ::1] ndz = np.zeros((n_docs, n_topics), dtype=np.intc, order='C') 991 | # number of tokens assigned to topic z equal to term w, shape = (n_topics, n_terms) 992 | int[:, ::1] nzw = np.zeros((n_topics, n_terms), dtype=np.intc, order='C') 993 | # number of tokens assigned to topic k, shape = (n_topics,) 994 | int[::1] nz = np.zeros(n_topics, dtype=np.intc, order='C') 995 | # number of tokens in doc d, shape = (n_docs,) 996 | int[::1] nd = np.zeros(n_docs, dtype=np.intc, order='C') 997 | # (weighted) probabilities for the discrete distribution 998 | double[::1] p_cumsum = np.empty(n_topics, dtype=np.float64, order='C') 999 | # preallocate uniformly random numbers on the interval [0, 1) 1000 | double[:] rands = create_rands(n_rands=n_rands, seed=seed) 1001 | int u = 0 1002 | # regression coefficients 1003 | 1004 | double [:, :, ::1] eta = np.zeros((n_iter + 1, n_labels, n_topics), dtype=np.float64, order='C') 1005 | double [:, :, ::1] etand = np.empty((n_docs, n_labels, n_topics), dtype=np.float64, order='C') 1006 | double [::1] eta_mean = np.empty(n_topics, dtype=np.float64, order='C') 1007 | 1008 | # omega: notice I'm initializing omega here 1009 | double[:, ::1] omega = np.ones((n_docs,n_labels), dtype=np.float64, order='C') 1010 | 1011 | # kappa: a transformation of y 1012 | double[:, ::1] kappa = b * (np.asarray(y) - 0.5) 1013 | 1014 | double [::1] kappa_sum = np.zeros(n_labels, dtype=np.float64, order='C') 1015 | 1016 | # initialize counts 1017 | for j in range(n_tokens): 1018 | preincrement(ndz[doc_lookup[j], topic_lookup[j]]) 1019 | preincrement(nzw[topic_lookup[j], term_lookup[j]]) 1020 | preincrement(nz[topic_lookup[j]]) 1021 | preincrement(nd[doc_lookup[j]]) 1022 | # initialize sum_alpha, lBeta_alpha 1023 | for k in range(n_topics): 1024 | sum_alpha += alpha[k] 1025 | # initialize sum_beta, lBeta_beta 1026 | for w in range(n_terms): 1027 | sum_beta += beta[w] 1028 | # define numpy variables 1029 | Inu2 = np.identity(n_topics) / nu2 1030 | munu2 = np.repeat(mu / nu2, n_topics) 1031 | # define PolyaGamma sampler 1032 | pg_rng = PyPolyaGamma(seed=seed or 42) 1033 | # iterate 1034 | start_time = datetime.now() 1035 | print('{} start iterations'.format(start_time)) 1036 | for i in range(n_iter): 1037 | # sample omega 1038 | for d in range(n_docs): 1039 | for l in range(n_labels): 1040 | eta_sum = 0. 1041 | for k in range(n_topics): 1042 | # initialize etand for iteration i 1043 | etand[d, l, k] = eta[i, l, k] / nd[d] 1044 | eta_sum += etand[d, l, k] * ndz[d, k] 1045 | omega[d,l] = pg_rng.pgdraw(b, eta_sum) 1046 | 1047 | # sample z 1048 | for j in range(n_tokens): 1049 | d = doc_lookup[j] 1050 | w = term_lookup[j] 1051 | z = topic_lookup[j] 1052 | predecrement(ndz[d, z]) 1053 | predecrement(nzw[z, w]) 1054 | predecrement(nz[z]) 1055 | 1056 | for l in range(n_labels): 1057 | pa_l = hier[l] 1058 | if pa_l == -1: 1059 | pa_l = l 1060 | if y[d, pa_l] == 1: 1061 | kappa_sum[l] = kappa[d,l] 1062 | for k in range(n_topics): 1063 | kappa_sum[l] -= omega[d,l] * etand[d, l, k] * ndz[d, k] 1064 | 1065 | eta_l = 0. 1066 | for l in range(n_labels): 1067 | pa_l = hier[l] 1068 | if pa_l == -1: 1069 | pa_l = l 1070 | if y[d, pa_l] == 1: 1071 | l_sum = 0. 1072 | for k in range(n_topics): 1073 | l_sum += etand[d, l, k] * \ 1074 | (kappa_sum[l] - omega[d,l] / 2 * etand[d, l, k]) 1075 | eta_l += l_sum 1076 | 1077 | p_sum = 0. 1078 | for k in range(n_topics): 1079 | p_sum += (nzw[k, w] + beta[w]) \ 1080 | / (nz[k] + sum_beta) \ 1081 | * (ndz[d, k] + alpha[k]) \ 1082 | * exp(eta_l) 1083 | p_cumsum[k] = p_sum 1084 | preincrement(u) 1085 | if u == n_rands: 1086 | u = 0 1087 | uval = rands[u] * p_sum 1088 | new_z = topic_lookup[j] = searchsorted(p_cumsum, uval) 1089 | preincrement(ndz[d, new_z]) 1090 | preincrement(nzw[new_z, w]) 1091 | preincrement(nz[new_z]) 1092 | 1093 | #sample eta 1094 | Z = (np.asarray(ndz) / np.asarray(nd)[:, np.newaxis]).T 1095 | for l in range(n_labels): 1096 | Omega = np.asarray(omega[::,l])[np.newaxis, :] 1097 | Kappa = np.asarray(kappa[::,l]) 1098 | eta_mean = np.linalg.solve(Inu2 + np.dot(Z * Omega, Z.T), 1099 | munu2 + np.dot(Z, Kappa)) 1100 | # TODO currently setting eta to mean, but need to sample 1101 | for k in range(n_topics): 1102 | eta[i + 1, l, k] = eta_mean[k] 1103 | 1104 | #compute log-likelihood 1105 | lL[i] = loglikelihood_blhslda(nzw, ndz, nz, alpha, beta, sum_beta, 1106 | mu, nu2, b, eta[i + 1], y, Z) 1107 | 1108 | # print progress 1109 | print_progress(start_time, n_report_iter, i, lL[i], lL[i - n_report_iter]) 1110 | # populate the topic and word distributions 1111 | theta = estimate_matrix(ndz, alpha, n_docs) 1112 | phi = estimate_matrix(nzw, beta, n_topics) 1113 | return theta, phi, np.asarray(eta), np.asarray(lL) 1114 | -------------------------------------------------------------------------------- /examples/KL_thresh_blslda.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# This notebook is used to decide on a tolerable level of corruptableness.\n", 12 | "%matplotlib inline\n", 13 | "\n", 14 | "import numpy as np\n", 15 | "import matplotlib.pyplot as plt\n", 16 | "import pandas as pd\n", 17 | "\n", 18 | "from scipy.stats import entropy as KL_divergence\n", 19 | "\n", 20 | "from slda.topic_models import BLSLDA\n", 21 | "from modules.helpers import plot_images" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 24, 27 | "metadata": { 28 | "collapsed": false 29 | }, 30 | "outputs": [ 31 | { 32 | "name": "stdout", 33 | "output_type": "stream", 34 | "text": [ 35 | "2016-06-16 14:19:32.334362 start iterations\n", 36 | "2016-06-16 14:19:40.616433 0:00:08.282071 elapsed, iter 100, LL 57698.8614, 117.38% change from last\n", 37 | "2016-06-16 14:19:48.483770 0:00:16.149408 elapsed, iter 200, LL 62979.3599, 9.15% change from last\n", 38 | "2016-06-16 14:19:56.034072 0:00:23.699710 elapsed, iter 300, LL 62048.8616, -1.48% change from last\n", 39 | "2016-06-16 14:20:03.887979 0:00:31.553617 elapsed, iter 400, LL 62357.1662, 0.50% change from last\n", 40 | "2016-06-16 14:20:11.963857 0:00:39.629495 elapsed, iter 500, LL 62202.0333, -0.25% change from last\n", 41 | "2016-06-16 14:20:20.023174 0:00:47.688812 elapsed, iter 600, LL 62804.2706, 0.97% change from last\n", 42 | "2016-06-16 14:20:28.223925 0:00:55.889563 elapsed, iter 700, LL 62088.5786, -1.14% change from last\n", 43 | "2016-06-16 14:20:36.536799 0:01:04.202437 elapsed, iter 800, LL 63988.5573, 3.06% change from last\n", 44 | "2016-06-16 14:20:44.648887 0:01:12.314525 elapsed, iter 900, LL 64792.9892, 1.26% change from last\n", 45 | "2016-06-16 14:20:52.707683 0:01:20.373321 elapsed, iter 1000, LL 67945.2241, 4.87% change from last\n", 46 | "2016-06-16 14:21:00.774657 0:01:28.440295 elapsed, iter 1100, LL 81897.7073, 20.53% change from last\n", 47 | "2016-06-16 14:21:08.780748 0:01:36.446386 elapsed, iter 1200, LL 113593.4318, 38.70% change from last\n", 48 | "2016-06-16 14:21:16.766334 0:01:44.431972 elapsed, iter 1300, LL 115795.5347, 1.94% change from last\n", 49 | "2016-06-16 14:21:24.811633 0:01:52.477271 elapsed, iter 1400, LL 117524.3091, 1.49% change from last\n" 50 | ] 51 | } 52 | ], 53 | "source": [ 54 | "# Generate topics\n", 55 | "# We assume a vocabulary of 'rows'^2 terms, and create 'rows'*2 \"topics\",\n", 56 | "# where each topic assigns exactly 'rows' consecutive terms equal probability.\n", 57 | "rows = 3\n", 58 | "V = rows * rows\n", 59 | "K = rows * 2\n", 60 | "N = K * K\n", 61 | "D = 10000\n", 62 | "seed = 42\n", 63 | "topics = []\n", 64 | "topic_base = np.concatenate((np.ones((1, rows)) * (1/rows),\n", 65 | " np.zeros((rows-1, rows))), axis=0).ravel()\n", 66 | "for i in range(rows):\n", 67 | " topics.append(np.roll(topic_base, i * rows))\n", 68 | "topic_base = np.concatenate((np.ones((rows, 1)) * (1/rows),\n", 69 | " np.zeros((rows, rows-1))), axis=1).ravel()\n", 70 | "for i in range(rows):\n", 71 | " topics.append(np.roll(topic_base, i))\n", 72 | "topics = np.array(topics)\n", 73 | "\n", 74 | "# Generate documents from topics\n", 75 | "# We generate D documents from these V topics by sampling D topic\n", 76 | "# distributions, one for each document, from a Dirichlet distribution with\n", 77 | "# parameter α=(1,…,1)\n", 78 | "alpha = np.ones(K)\n", 79 | "np.random.seed(seed)\n", 80 | "thetas = np.random.dirichlet(alpha, size=D)\n", 81 | "topic_assignments = np.array([np.random.choice(range(K), size=N, p=theta)\n", 82 | " for theta in thetas])\n", 83 | "word_assignments = np.array([[np.random.choice(range(V), size=1,\n", 84 | " p=topics[topic_assignments[d, n]])[0]\n", 85 | " for n in range(N)] for d in range(D)])\n", 86 | "doc_term_matrix = np.array([np.histogram(word_assignments[d], bins=V,\n", 87 | " range=(0, V - 1))[0] for d in range(D)])\n", 88 | "\n", 89 | "#Generate responses\n", 90 | "# choose parameter values\n", 91 | "mu = 0.\n", 92 | "nu2 = 1.\n", 93 | "np.random.seed(seed)\n", 94 | "eta = np.random.normal(loc=mu, scale=nu2, size=K)\n", 95 | "# plot histogram of pre-responses\n", 96 | "zeta = np.array([np.dot(eta, thetas[i]) for i in range(D)])\n", 97 | "\n", 98 | "# choose parameter values\n", 99 | "y = (zeta >= 0).astype(int)\n", 100 | "\n", 101 | "_K = K\n", 102 | "_alpha = alpha\n", 103 | "_beta = np.repeat(0.01, V)\n", 104 | "_mu = mu\n", 105 | "_nu2 = nu2\n", 106 | "_b = 7.25\n", 107 | "n_iter = 1500\n", 108 | "blslda = BLSLDA(_K, _alpha, _beta, _mu, _nu2, _b, n_iter, seed=seed, n_report_iter=100)\n", 109 | "\n", 110 | "blslda.fit(doc_term_matrix, y)\n", 111 | "results = blslda.phi" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": 25, 117 | "metadata": { 118 | "collapsed": false 119 | }, 120 | "outputs": [ 121 | { 122 | "name": "stdout", 123 | "output_type": "stream", 124 | "text": [ 125 | "0.000453411850022\n", 126 | "0.00026092354286\n", 127 | "0.0260038860991\n", 128 | "0.00064661114753\n", 129 | "0.0026590186258\n", 130 | "0.00151054295545\n" 131 | ] 132 | } 133 | ], 134 | "source": [ 135 | "for res in results:\n", 136 | " minimized_KL = 1\n", 137 | " for topic in topics:\n", 138 | " KL = KL_divergence(topic, res)\n", 139 | " if KL < minimized_KL:\n", 140 | " minimized_KL = KL\n", 141 | " print(minimized_KL)" 142 | ] 143 | }, 144 | { 145 | "cell_type": "code", 146 | "execution_count": 26, 147 | "metadata": { 148 | "collapsed": false 149 | }, 150 | "outputs": [ 151 | { 152 | "data": { 153 | "text/plain": [ 154 | "[]" 155 | ] 156 | }, 157 | "execution_count": 26, 158 | "metadata": {}, 159 | "output_type": "execute_result" 160 | }, 161 | { 162 | "data": { 163 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYEAAAEHCAYAAABIsPrhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAH5RJREFUeJzt3X2wHXWd5/H3J4nBUUh4EKGKh+giDOKME2ULggGSWnR4\ncIpYlg5aVklwpmSRWajyD0CXkhApIMwfOhnXZXHRAXaoSFEzEERLHJmLPBshAWchEKZ4CgzBJQSE\nACbhu39030vn5Jx7z739O6f7dn9eVbdyHrr79zv5nL7f231O/36KCMzMrJ1mVN0BMzOrjouAmVmL\nuQiYmbWYi4CZWYu5CJiZtZiLgJlZi7kImJm1mItAQpL2kvTPkl6T9KSkL1bdJytH0tmS1kh6U9IP\nq+6PpSFptqT/LekpSa9IekDSSVX3qwqzqu5Aw3wfeBPYF/g4cKukdRHxaLXdshKeA74NnAj8UcV9\nsXRmAc8Ax0XEs5I+Ddwg6U8i4pmK+zZU8hXDaUh6D/AycERE/Hv+2LXAxoj4ZqWds9IkfRs4ICK+\nUnVfbDAkPQQsi4h/rrovw+TTQekcBmwfLQC5h4CPVNQfM+uTpP2AQ4H/W3Vfhs1FIJ3dgVc6HnsF\n2KOCvphZnyTNAv4P8A8R8XjV/Rk2F4F0XgPmdDw2B/h9BX0xsz5IElkBeAv4bxV3pxIuAuk8DsyS\ndEjhsT+jhYeXZtPI1cD7gM9GxI6qO1MFF4FEImIr8E/AcknvkbQQOBW4rtqeWRmSZkp6NzCTrMjv\nJmlm1f2y8iRdCRwOnBoRf6i6P1VxEUjrbOA9wIvAPwL/1V8PnfYuBLYC5wNfym//90p7ZKVJOhj4\nKjAf2CTp95JebeO1PaW+IippL+DHwDzgKeAvI6Lzw1Ek7SD7poyApyPiM1Nu1AbOuTaXs7VOZYvA\nCuCliLhC0vnAXhFxQZflXo2Izg9Nraaca3M5W+tUtgisBxZFxCZJ+wMjEXF4l+V+HxH+quQ04Vyb\ny9lap7KfCbw/IjYBRMQLZMMldLObpF9LukfSkpJt2uA51+ZytraTCccOkvQLYL/iQ0CQfWDWr4Mj\n4gVJHwRul/RwRDw5ua5aSs61uZytTcaERSAiPtXrOUmbJO1XOLR8scc2Xsj/fVLSCPAxYJc3lCQP\nZFS9myHLIiKUItfR7Q2muzYJo9l6n22giNBU1is7iuhqYCmwAjid/E1WJGlPYGtE/EHS+4BP5MvX\n0gUXXMA3vvGNUtu47LLLSm3jsssu4/LLLy/Vh4QakavtZCkJ99nddtttyh3Zvn07s2aV+zX01ltv\nlVo/hVdffbXU+pdeeinf/ObUx5mcM2fqn+GX/UxgBfApSY8BnwQuB5B0pKSr8mU+DPxG0lrgl8Bl\nEbG+ZLs2BM61sbzP2phSJTgiNpO9kToff4DsQgwi4l7go2XasWpExB933HeuDRAR3mdtjK8Y7nDs\nscdWvo0UfTCbDmbM8K8ggOOOO66ytms1qUwdPmR65ZVdLp6sxNy5c6vuwpQ/aOqmDtlaJnWuZT4T\nSKEJnwmUNWfOnCnn6jJsZtZiLgJmZi3mImBm1mIuAmZmLeYiYGbWYi4CZmYtlqQISDpJ0npJj+dj\nlHc+P1vSKkkbJN2bz+pjNedcm8vZ2qjSRUDSDOB7wInAR4AvSuocn/yvgM0RcSjwXeCKsu3aUDjX\nBvI+a0UpjgSOAjZExNMRsQ1YBXSOP74EuCa/fSNwQoJ2bfCcazN5n7UxKYrAAcCzhfsb88e6LhMR\nO4AtkvZO0LYNlnNtJu+zNqbsUNKQTVjRqXOIgM5l1GUZqz/n2gxJ99nt27eP3Z4xY4bHAxqCO++8\nkzvvvDPJtlIUgY1A8UOjA4HnO5Z5FjgIeF7STGBORLycoG0bLOfaTEn32bLzAdjkHXfccTsNOldm\n/pEUJXsN8CFJ8yTNBr5ANtlM0S1kE1gAfB64PUG7NnjOtZm8z9qY0iU8InZI+hvgNrKicnVEPCrp\nYmBNRPwEuBq4TtIG4CWyN53Vn3NtIO+zVuShpDt4KOl3eCjpZvJQ0ul5KGkzM5uWXATMzFrMRcDM\nrMVcBMzMWsxFwMysxVwEzMxazEXAzKzFhjWfwOmSXpT0YP7zlRTt2mA51+Zytjaq9BXDhbHJTyAb\nf2SNpJsjYn3Hoqsi4pyy7dlQOdcG8j5rRcOaTwC6j1xo9eZcm8n7rI0Z1nwCAJ+VtE7SDZIOTNCu\nDZ5zbSbvszZmWPMJrAauj4htks4km7GoljMVzZkzp+ou1FmpXC+66KKx24sXL2bx4sWD6GNPb7zx\nxlDb6+bd7373UNsbGRlhZGRk7P7y5csh8T5bHLtHElL7DiD22GOPobbXmWsZpQeQk7QAWBYRJ+X3\nLwAiIlb0WH4G2dyle3Z5rvJBxuoyoF5NdqSfp8g1fz6q/r9tYxHolE/4cgwJ99mqJ5F5++23K20f\nqv+9IanSAeQmHJtc0v6Fu0uARxK0a4PnXJvJ+6yNGdZ8AudIOhXYBmwGlpZt14bCuTaQ91kr8nwC\nHery/1GH00Gpx52v+v/Wp4Oy00Gpc/XpoOp/b1R9OsjMzKYpFwEzsxZzETAzazEXATOzFnMRMDNr\nMRcBM7MWcxEwM2uxVPMJXC1pk6SHx1lmpaQN+YBU81O0a4PlXJvJuVpRqiOBHwEn9npS0snAIRFx\nKHAmcGWidm2wnGszOVcbk6QIRMRdwMvjLLIEuDZf9n5grqT9UrRtg+Ncm8m5WtGwPhPoHL/8ObqP\nX27Ti3NtJufaIsMqAv2MX27Tj3NtJufaIikmlenHRuCgwv0DyeY2teltUrkuW7Zs7HYVk8q00RQn\nH5lUrsUB3No6qcyw1WpSmbENSR8AbomIP+3y3CnA2RHx6XwSmu9GxIIuy1X+10bVowGOqsOOFBFK\nkWu+rEcRpT6jiKbM1aOIVv97o8wookmOBCRdDywG9pH0DHARMJtstqKrIuKnkk6R9ATwOnBGinZt\nsJxrMzlXK/J8Ah3q8v9RlyOBVNvykUCmLkcCqbbnI4FM1e9tzydgZmZT4iJgZtZiLgJmZi3mImBm\n1mIuAmZmLeYiYGbWYi4CZmYt5iJgZtZiQ5lURtIiSVskPZj/XJiiXRss59pMztWKUg0g9yPg78nH\nIO/hVxFxaqL2bDicazM5VxszrElloPvwtFZjzrWZnKsVDfMzgQWS1kq6VdIRQ2zXBsu5NpNzbYlh\nzSfwADAvIrbm85feBBw2pLYnZcuWLVV3YTqZVK51GBTP+jLlXGfMmMHMmTMH38OCN998c6jtdbN5\n8+ahtnfXXXdx9913J9lWyvkE5pGNT/7RPpZ9EjgyIjZ3PF75EJ4vvzzRUfJw7LXXXlV3YXTc+dK5\n5s9Vnq1lUuda9ciodSgCL730UqXt77PPPrUYRVT0OI9YnKRa0lFkxWe4pdOmyrk2k3M1YEiTygCf\nk3QWsA14AzgtRbs2WM61mZyrFXlSmQ4+HfSO1JOPpNqWlZM6V58O8ukgMzObplwEzMxazEXAzKzF\nXATMzFrMRcDMrMVcBMzMWsxFwMysxUoXAUkHSrpd0iOSfivpnB7LrZS0QdI6SfPLtmuD51ybyfus\nFaW4Yng78PWIWCdpd+ABSbdFxPrRBfJBqA6JiEMlHQ1cCSxI0LYNlnNtJu+zNqb0kUBEvBAR6/Lb\nrwGPAgd0LLaEfAKLiLgfmFscn8Tqybk2k/dZK0r6mYCkDwDzgfs7njoAeLZw/zl2fdNZTTnX5nK2\nlqwI5IeVNwLn5n9d7PR0l1U8lsw04Fyby9kapBtFdBbZm+m6iLi5yyIbgYMK9w8Enk/Rtg2Oc22u\nlNlu27Zt7HYVk8q0Ue0mlZF0LfD/IuLrPZ4/BTg7Ij4taQHw3YjY5UOmOow06VFEd3IdCXLNl608\nW8vkk8ok22c9iuj0HkW09JGApIXAl4DfSlpLdsj4TWAe+fjkEfFTSadIegJ4HTijbLs2FM61gbzP\nWpHnE+jgI4F3eD6BZvJ8AulN5yMBXzFsZtZiLgJmZi3mImBm1mIuAmZmLeYiYGbWYi4CZmYt5iJg\nZtZiLgJmZi02lEllJC2StEXSg/nPhWXbHZS77rqr8m2k6EMKTcrV3lGnfXbHjh2D2Oy0U+U+n+JI\nYHSCiiOAY4CzJR3eZblfRcTH859LErQ7EC4CO2lMrraT2uyzb7/99iA2O+2kGgxuKoY1qQx0H5rW\nasy5NpP3WSsa1qQyAAskrZV0q6QjUrZrg+Vcm8vZGhGR5AfYHfgNsKTHc+/Jb58MPN5jG+Gf+vyk\nytXZ1uvH+2wzf6b6uzvVfAKzgJ8AP4uIv+tj+SeBIyNic+nGbWCca3M5WxuV6nTQD4FHer2ZihNU\nSzqKbAhrv5nqz7k2l7M1YEiTygCfk3QWsA14AzitbLs2WM61uZytFdVqUhkzMxuuSq8YlrSXpNsk\nPSbp55Lm9lhuR37BylpJN+WPnSRpvaTHJZ3fZZ3ZklZJ2iDpXkkHdzw/0fqnS3qxcLHMVzqev1rS\nJkkPj/P6Vubtr5M0fzLrT3SxTj8X/PTRh4FdNDTVbJ2rc61jrv1sY9DZDizXVN8OmuI3ilYA5+W3\nzwcu77Hcqx33ZwBPkB2+vgtYBxzescxZwPfz26cBqya5/unAynH6fizZV+se7vH8ycCt+e2jgfsm\nuf4iYPU47e8PzC98k+OxLq9hoj70s41x+5EyW+fqXOuaax2yHVSuVY8dtAS4Jr99DfCZHst1XrRy\nFLAhIp6OiG3AqnxbvbZ9I3DCJNfv1u6YiLgLGG9C4iXAtfmy9wNzVfiwrY/1J2q/nwt+JurDIC8a\nmkq2ztW5Qg1z7XMbE/WhVLaDyrXqIvD+iNgE2QsE9u2x3G6Sfi3pHklLyF74s4XnN7Lrf8bYMhGx\nA9giae/O58ZZH+Cz+SHZDZIOnMwL69LGcz3aGE9fF+uo9wU/ffdhnG303Y8Ok86W7BeKc80512mX\nKwwp25S5lv520EQk/QIoVlSRfRthMgNSHRwRL0j6IHA78Lddlun8hLuzGqqwTLdK2bn+auD6iNgm\n6Uyyv1JO2HW1nvppYzwPAPMiYqukk4GbgMN2aUTanewvp3Pzvw4m3YcJttGzHwPI9j6yfMfrr3N1\nrt3WrzpXGFK2U821l4EXgYj4VK/n8g9Z9ouITZL2B17ssY0X8n+flDQCzAGKHxwdCDzfsdqzwEHA\n85JmAnMiYvRQbuNE6xeWBfgB2bnQcUm6juyN917gbeDLwD3j9LGnYrgR8TNJ35e0dxS+q63sgp8b\ngesi4uYum9lI9n8wapc+TLSN8foxgGzXAMWBzGqRa+E1HQqcBOxBdu61Vx97cq7d+0s1++sI2Xn3\nbcC7yf6yHj2CmFSueR8Gnm2ZXHv1u+rTQauBpfnt04FdXpSkPSXNzm+/D/hEvt6HJM3Ln/tC/ljR\nLfk2AT7Pzn+JrJlo/fwNPmoJ8EiX/oudK/elZFV4LnAesFTSxyQtALaMHkaPs36x/X4u1hn3gp/8\nNX0530avPgzqoqGpZHsosHcNcx31PbLzsIfk23Ku0zvXAL4WEXPITlk9lG+r1/9pt20U+zCMbNPn\nGpP8dkDKH2Bv4F/IPuX+BbBn/viRwFX57WOAh4G1ZCEtzR8/KV9vA3BB/tjFwF/kt3cDbsifvw/4\nQEfbE61/KfBvebu/BA7rWP96sgr9FvAMcAZwJvDV/Pk/Bl4H/iPv98cnuf7ZhfbvAY7uWH8hsIPs\nmxJrgQfz1zS2jXy575F9s6JbHybcxkT9SJ1tjXO9iuwDyW8B68f5P3Wu0yfXx4B/6Of/tA7ZDipX\nXyyWmKT/Qfam/yOykI6PiK2VdspKkTSH7K/R/wL8NXBIRHy52l5ZWZL+FTiC7C/7x4ALI+KOans1\nfFWfDmqciDib7Du8xwL/RPZXg01vy4EfRMRzVXfEkjoP+E9k3775AXBL/kF2q7gIDEBk7iH7gOes\nqvtjU6fsis1PAt+tui+WVkSsiYjXI2JbRFwL3A2cUnW/hm3g3w5quVnkHyLatLWI7ErVZySJ7Chv\npqQjIuI/V9s1SyyY2gV005qPBBKRtK+k0yS9V9IMSSeSfYvhl1X3zUr5X2SFfD7wZ8CVZOPw/3mV\nnbJyJM2V9OeSdpM0U9KXgOOAn1fdt2HzkUA6QXbq53+SFdenyS7m+EmlvbJSIuJN4M3R+5JeA94M\nj60/3b0LuITsW3w7yL7xtSQiNlTaqwqU+naQpL2AH5MdLj8F/GVEvNJluR1kX3cS8HRE9BpvxGrA\nuTaXs7VOZYvACuCliLhC2fCue0XEBV2WezWyCzJsGnCuzeVsrVPZIrAeWBTvXEI+EhGHd1nu9xGx\nR4l+2hA51+Zyttap7AfDUx0F1OrNuTaXs7WdTPjBsAYwCqikhyPiyS5t+fLlmpAUETHR1+X6ynV0\ne+l7aQPkfXaa6WN/7WrCIhCDGQX0Y0DXXxa/+93v+ul3T1dccQXnnXfelNdfuXIl3/rWt0r1Yfny\n5aW2sXz5cpYvX16qD8uWLWPZsmVTWvfDH/4w69evB8YG5iqdK8D27dun1B+Aiy++mIsuuqjU+t/+\n9renvH6TpN5nbXorezpoqqOAdhvhz2ri1FNPLd51rs2zNP/X2VrpIrAC+JSkx8gurb8cQNKRkq7K\nl/kw8BtJo6P7XRYR60u2awN0/vnZPN7OtbG8z9qYWo0iKinKng66++67Wbhw4ZTXf+ihh1i0aNHE\nC47jjjvuKLWNO+64gxNOmMykSLsaGRlh8eLFU15f0pTPMfbYXpQ5HVT29YyMjPDJT35yyus3Sepc\nU23Lyplqro0rAmXtueeelbY/atasai/mrlsRSKHq/9O6cBFopqnm6rGDzMxazEXAzKzFXATMzFrM\nRcDMrMVcBMzMWsxFwMysxZIUAUknSVov6fF8eNrO52dLWiVpg6R7JR2col0bLOfaXM7WRpUuApJm\nAN8DTgQ+AnxRUufQtH8FbI6IQ8km7L6ibLs2FM61gbzPWlGKI4GjgA0R8XREbANWAZ1Dzy4Brslv\n3wiUuxzWhsW5NpP3WRuToggcADxbuL8xf6zrMhGxA9giae8EbdtgOddm8j5rY1IUgW6XKndeSt65\njLosY/XnXJvB+6yNSTGYykag+KHRgcDzHcs8CxwEPC9pJjAnIl7utrErrnjn1OPChQtLDQZn/RsZ\nGWFkZKTz4WS5Qjam/6hFixaVGhDOSkm6z9r0VnoAufwN8hjZOcP/AH4NfDEiHi0s8zXgTyLia5K+\nAHwmIr7QZVseQC5X9WBnkgD+nQS55st6ALn6mEXCfXZIfbYJDGxmsT4a3iHpb4DbyE4vXR0Rj0q6\nGFgTET8Brgauk7QBeAno+ovCase5NpD3WSvyUNIdfCSQ8VDSzeWhpJvJQ0mbmdmkuQiYmbWYi4CZ\nWYu5CJiZtZiLgJlZi7kImJm1mIuAmVmLuQiYmbXYsCaVOV3Si5IezH++kqJdGyzn2lzO1kaVvoSy\nMEHFCWSDUK2RdHNErO9YdFVEnFO2PRsq59pA3metaFiTykD34Wut3pxrM3mftTEpBlPpNkHFUV2W\n+6yk44DHga9HxMZuG9t3330TdMkSSZYrwB/+8IfE3ZucfGTUStVkrK6k+6xNbymKQD8TVKwGro+I\nbZLOJJu2ztPVTT+lcr3kkkvGbh9//PEcf/zxA+mkTcj7rI0ZyqQyHZNR/ABYkaBdG7ykuV544YXp\nemZleJ+1MSk+E1gDfEjSPEmzycYdX11cQNL+hbtLgEcStGuD51ybyfusjRnWpDLnSDoV2AZsBpaW\nbdeGwrk2kPdZK6rdpDJV98HekXryka1bt6ba3JS8973vrbR9qMcHw55Uppk8qYyZmU2ai4CZWYu5\nCJiZtZiLgJlZi7kImJm1mIuAmVmLuQiYmbVYqvkErpa0SdLD4yyzUtIGSeskzU/Rrg2Wc20m52pF\nqY4EfgSc2OtJSScDh0TEocCZwJWJ2rXBcq7N5FxtTJIiEBF3AS+Ps8gS4Np82fuBuZL2S9G2DY5z\nbSbnakXD+kygc/zy5/LHbHpzrs3kXFtkWEWgn/HLbfpxrs3kXFskxXwC/dgIHFS4v8v45TYtTSpX\nTyozbXh/bZGURUD0npN0NXA28GNJC4AtEbEpYds2OMly9aQyteL91YBERUDS9cBiYB9JzwAXAbOB\niIirIuKnkk6R9ATwOnBGinZtsJxrMzlXK/J8AtaT5xNIrw77m+cTaCbPJ2BmZpPmImBm1mIuAmZm\nLeYiYGbWYi4CZmYt5iJgZtZiLgJmZi3mImBm1mJDmVRG0iJJWyQ9mP94/IBpwLk2k3O1olRjB/0I\n+HvyMch7+FVEnJqoPRsO59pMztXGDGtSGeg9WJXVlHNtJudqRcP8TGCBpLWSbpV0xBDbtcFyrs3k\nXFtiWPMJPADMi4it+fylNwGHDaltG5xJ5bpixYqx24sXL2bx4sUD72BRHQZvmyYmleu55547dnvB\nggUcc8wxg+9hwT777DPU9rrZfffdh9reyMgIIyMjY/cvvvjiKW8r2SiikuYBt0TER/tY9kngyIjY\n3PG499IaiQilyDV/Lqr+JSz5DAekz/Wpp54aQC/718Yi0ElSLUYR7TlJRXGSaklHkRWfXd5QVkvO\ntZmcqwFDmlQG+Jyks4BtwBvAaSnatcFyrs3kXK3Ik8pYT6knH6n6vebTQZnUufp0kE8HmZnZNOUi\nYGbWYi4CZmYt5iJgZtZiLgJmZi3mImBm1mIuAmZmLVa6CEg6UNLtkh6R9FtJ5/RYbqWkDZLWSZpf\ntl0bPOfaTN5nrSjFFcPbga9HxDpJuwMPSLotItaPLpAPQnVIRBwq6WjgSmBBgrZtsJxrM3mftTGl\njwQi4oWIWJfffg14FDigY7El5BNYRMT9wNzi+CRWT861mbzPWlHSzwQkfQCYD9zf8dQBwLOF+8+x\n65vOasq5NpeztWTzCeSHlTcC5+Z/Xez0dJdVPE7QNJAy12XLlo3drmI+AdtZqmy/853vjN2uYj6B\nNuqcT6CMJAPISZoF/AT4WUT8XZfnrwT+NSJ+nN9fDyyKiE0dy7kw1Mu7SJBr/pwHkKuJfD6BZPus\nB5DzAHIAPwQe6fZmyq0GvgwgaQGwpdsvCqsd59pcztaABKeDJC0EvgT8VtJaskPGbwLzyMcnj4if\nSjpF0hPA68AZZdu1oXCuDeR91oo8n4D15PkEmsnzCaTn00FmZjYtuQiYmbWYi4CZWYu5CJiZtZiL\ngJlZi7kImJm1mIuAmVmLDWU+AUmLJG2R9GD+c2HZdm3wnGszeZ+1ohRHAqNjkx8BHAOcLenwLsv9\nKiI+nv9ckqBdG7za5Fp2sKxUg201RG322Xvvvbf0Nu68885pvT5U+/4c1nwC0H1UQquxOuXqIpBO\nnfbZ++67r/Q2qv4l3voiUDTO2OQACyStlXSrpCNStmuD5Vyby9nasOYTeACYFxFb82nrbgIOS9W2\nDY5zbS5nazCk+QS6LP8kcGREbO543API1cu48wl06pVr/pyzrYl+5hPo5H22/qY6gFyqI4FxxyaX\ntN/oWOSSjiIrPrv8okg5uqGVJ+laEuQKzraGvM8aMKT5BIDPSToL2Aa8AZxWtl0bLOfaXM7Wimo1\nn4CZmQ1XpVcMS9pL0m2SHpP0c0lzeyy3I79gZa2km/LHTpK0XtLjks7vss5sSaskbZB0r6SDO56f\naP3TJb1YuFjmKx3PXy1pk6SHx3l9K/P210maP5n1J7pYp58Lfvrow8AuGppqts7VudYx1362Mehs\nB5ZrRFT2A6wAzstvnw9c3mO5VzvuzwCeIDt8fRewDji8Y5mzgO/nt08DVk1y/dOBleP0/Viyr9Y9\n3OP5k4Fb89tHA/dNcv1FwOpx2t8fmJ/f3h14rMtrmKgP/Wxj3H6kzNa5Ote65lqHbAeVa9VjBy0B\nrslvXwN8psdynR8+HQVsiIinI2IbsCrfVq9t3wicMMn1u7U7JiLuAl7u9Xy+vWvzZe8H5krabxLr\nT9R+Pxf8TNSHQV40NJVsnatzhRrm2uc2JupDqWwHlWvVReD9kX8DISJeAPbtsdxukn4t6R5JS8he\n+LOF5zey63/G2DIRsQPYImnvzufGWR/gs/kh2Q2SDpzMC+vSxnM92hhPXxfrqPcFP333YZxt9N2P\nDpPOluwXinPNOddplysMKduUuSa7WKwXSb8AihVVZN9GmMyAVAdHxAuSPgjcDvxtl2U6P+HurIYq\nLNOtUnauvxq4PiK2STqT7K+UE3Zdrad+2hhPXxfraPwLfvrqwwTb6NmPAWR7H1m+4/XXuTrXbutX\nnSsMKdup5trLwI8EIuJTEfHRws+f5v+uBjaNHupI2h94scc2Xsj/fRIYAeYAxQ+ODgSe71jtWeCg\nfNszgTkRMXoot3Gi9SPi5fzQE+AHwJGTeNmjbRw0QR97iojXImJrfvtnwLsKfxkBYxfp3QhcFxE3\nT6UPE21jvH4MINs1QHEgM+fqXEdfb61zzfsw8GzL5NpL1aeDVgNL89unA7u8KEl7Spqd334f8Il8\nvQ9Jmpc/94X8saJb8m0CfJ6d/xJZM9H6+Rt81BLgkS79F73Pv60GvpxvawGwZfQwup/1i+cj1fti\nnXEv+OmzDxNeNNRHP3q1vTS/3W+2hwJ7O1fnSj1zHXcbQ8o2fa4xyW8HpPwB9gb+hexT7l8Ae+aP\nHwlcld8+BngYWAs8BCzNHz8pX28DcEH+2MXAX+S3dwNuyJ+/D/hAR9sTrX8p8G95u78EDutY/3qy\nCv0W8AxwBnAm8NXCMt8j+1bDQ8DHJ7M+cHah/XuAozvWXwjsIPumxFrgwfw1TaYPE25jon6kzta5\nOtc65lqHbAeVqy8WMzNrsapPB5mZWYVcBMzMWsxFwMysxVwEzMxazEXAzKzFXATMzFrMRcDMrMVc\nBMzMWuz/AyOpdyKRDULDAAAAAElFTkSuQmCC\n", 164 | "text/plain": [ 165 | "" 166 | ] 167 | }, 168 | "metadata": {}, 169 | "output_type": "display_data" 170 | }, 171 | { 172 | "data": { 173 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZsAAAEACAYAAABlOdt4AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmYVNWd//H3t2kQFzY3iCC4ABKNijgiiTG2xAU1oplR\nY5YBjWb8Rc2eCEYnwJiZiM9kfsQYdX7PkAgmgsbEiCMiLrSZZGRxQVBAGxORFkEMNChudPP9/XFO\nw6Wp6qbpulXVfT+v56mnb526y6nb3fdT59S595q7IyIikqaKUldAREQ6PoWNiIikTmEjIiKpU9iI\niEjqFDYiIpI6hY2IiKSuzWFjZv3M7EkzW2ZmS83sm7G8l5nNNbOXzexRM+uRWOZWM6sxs8VmNjRR\nPtbMXonLjEmUDzOzJfG1KYnyvNsQEZHyUYiWTT3wXXc/GvgkcI2ZDQHGA4+7+1HAk8D1AGZ2DnCk\nuw8CrgLujOW9gB8BJwEnAxMS4XEHcKW7DwYGm9nZsTznNkREpLy0OWzcfa27L47T7wLLgX7ABcC0\nONu0+Jz4c3qcfwHQw8x6A2cDc919k7vXAXOBUWbWB+jm7gvj8tOBCxPrSm6jsVxERMpIQb+zMbPD\ngKHAfKC3u6+DEEjAwXG2vsDqxGK1saxp+RuJ8toc85NjGwcV7t2IiEihFCxszGw/4H7gW7GFk+86\nOJbjuecop4VyERFpJyoLsRIzqyQEzd3u/mAsXmdmvd19XewKeyuW1wKHJhbvB6yJ5VVNyuc1Mz/A\n2jzbaFo/hZOIyB5w91wf+FutUC2bXwLL3P1nibJZwGVx+jLgwUT5GAAzGwHUxa6wR4EzzaxHHCxw\nJvBo7B7bbGbDzczisg/m2MbYRPku3L3sHxMmTCh5HVRP1bO91lH1LPyjkNrcsjGzU4AvA0vN7HlC\nF9cPgcnAfWb2VeB14GIAd59tZuea2UpgC3B5LN9oZjcBz8R1TPIwUADgauAuoCsw293nxPKc2xAR\nkfLS5rBx9z8DnfK8fEaeZa7NU34XIVSalj8LHJujfEO+bYiISPnQFQTKSFVVVamrsFtUz8JqD/Vs\nD3UE1bOcWaH75cqRmXkW3qeISCGZGV5mAwRERETyUtiIiEjqFDYiIpI6hY2IiKROYSMiIqlT2IiI\nSOoUNiIikjqFjYiIpE5hIyIiqVPYiIhI6hQ2IiKSOoWNiIikTmEjIiKpU9iIiEjqFDYiIpI6hY2I\niKROYSMiIqlT2IiISOoUNiIikjqFjYiIpE5hIyIiqVPYiIhI6hQ2IiLt0AcfwHvvQUMDPPZYmC5n\nlaWugIhIe/P22/DAA3DfffDuu3DqqTBrVvg5aBB06QIrVsC4cXDAAdC9O2zYAKtWQW0tHH00HHEE\n1NTAli0wcGB4raICnn4aTjwxLPfnP8Nf/wqHHAIzZoTpLVvgo49g/fpQFzPo2RPuvhvOO6+0+6U5\n5u6lrkPqzMyz8D5FJHzC79o1HLjdob4eNm2ClSth+PBQDuHgv3o1PPVUCIYFC8JB/wc/gPnz4aWX\nwny1teHRuzesWQNLl4byU06B88+H116DJ56AoUOhW7ew3Ouvw5tv5q9j9+6weXOY3msv+PDDHa/1\n7x+Wb9S5MwwbFsrPOAPWrYNPfSoE0sKFMGJEWF8azAx3t4KsKwsHYYWNSMe0bVv4lP/EE6GVMX16\n7vm6dIF99oH99gstijVrQviYhUD6ylfCwfz226GuDj72MTj9dOjbN7QqXnsNevUKAXHQQXDttWFd\nLdm8OYTFn/4En/gEHHxw6PaqqIDly8NrQ4aEQHQPr3XtChs37qizFeRQv2fKLmzMbCrwOWCdux8X\ny3oB9wIDgNeAS9x9U3ztVuAcYAtwmbsvjuVjgRsAB/7V3afH8mHAXUBXYLa7f7ulbTSpn8JGpAP5\n4x/h5z+H++8Pz3v3Di2EMWPg61+H/feHqVPDwbyhAS66KITE0qXhIN+7Nxx3XGgRdOq087rdw6NC\n32iXZdh8GngXmJ4Im8nA39z9FjMbB/Ry9/Fmdg5wrbufZ2YnAz9z9xExOJ4BhgEGPAsMc/dNZrYA\n+Ia7LzSz2XGZR/NtI0f9FDYi7diGDSEUfvUrmDAhtBiuuw7+7u9C19jHPhZaAlJYZRc2AGY2AHgo\nETYrgNPcfZ2Z9QHmufvHzezOOH1vnG85UAWcHuf/eiy/A6gGngKedPejY/mljfPl2Ea1uw/JUTeF\njUg79corcNRRYfqMM+C00+CLX4QjjyxtvbKgkGGT5mi0g919HYC7rzWzg2N5X2B1Yr7aWNa0/I1E\neW2O+QF6N9nGQQV/FyJSEvX18KUvwW9/C9/6Fnzta3DMMaWuleypUgx9bpqSRviOJld6NlcuIh3U\njBnwj/8YWi+zZ8NZZ+363Yq0L2mGzToz653o4norltcChybm6wesieVVTcrnNTM/wNo829jFxIkT\nt09XVVVRVVWVb1YRKZEtW+Caa2DatPCdzI9/HL7kl+Korq6muro6lXUX8jubwwjf2Rwbn08GNrj7\nZDMbD/SMAwTOBa6JAwRGAFNyDBCoiNMnuntd4wABYBHwMHCru89psg0NEBBpx+66Cy6/PAwxXrIk\nDDsu5bBfKcMBAmZ2D6FVcgCwDpgA/AH4LaFV8jpwsbvXxflvA0YRhj5f7u7PxfLL2DH0+ceJoc8n\nsvPQ52/F8v2B+3Jto0n9FDYiZWrbNviv/4Lvfx8eeiicha9hx+Wh7MKm3ClsRMpTQwNUVYWWzL33\nwqhRpa6RJLWX0WgiInnV1YXLsPTqFabVZdaxqbEqIiUxbly4gOTDDytoskAtGxEpqoaGcFLmc8/B\ns89Cjx6lrpEUg8JGRIrGHcaODSdqrlypoMkSdaOJSKrmzYNHHoErrwyjzFasCOfT6HIz2aLRaCJS\nEC++GEaTHXBAGF2WvE/LEUeEi2UOGQL/8R/p3X9FCkuj0UQ6uPXrw/1SunaFrVvL74rG9fXwwgsh\nVI44Au68E2bOhB/+MNw0bMkSuOEGOOww+OQnw03JJNvUssko93BPkIUL4cwzwyfQE04I9zVv/NTZ\n0BCGpB5wQGnrWm7cdx09tW1buAx+586t+x6ivj78XLkyXKbl1FPDjbaeeGLn+Xr0gNGjw+/q8cfD\nDb1uvhmuuCLc5OvYY8P9WZL+/Oew/uOPD6O+mjNvXrii8rBh8Mwzoewf/gEqK8OdIdeuDd1fXbuG\nOi5cGO5+eeSRYRsnnQSf+1z4PkY6Dp3U2UrlEDYrV4ZPfhddFA5Mhx4KixaFA0fnzuEyHU8/HW7x\n+vd/D8uWhX/g4cPDp9otW8KnRfdwL48XXgjr7dIFvv3tcKfBY4/ddbvu4Ta5Tz0VLtP+7/8e1v3H\nP4a7AL73XjjPYePGnZfr0yccoFasCJ9ar7oq/3vbuDG8h+XLw4Hq3HPh3/4NLrggdKu05mzw+vpQ\np93pZqmtheefD/Xv3x8OPDCEZq9eO8/3wQehS8c93Dv+rbfCQXLvvXfeTxs27Bys778ffienn74j\nXGbODCOpJkwIv8vx48N94Zct27HcD34QtnnZZaE+FRVwxx0hDC66CObODXWdMwf+8z93fV9HHgm/\n+EX4HdfXhxuB3XxzuNPjww+HecaMCXeb7NMnXIJ/4cIw34cfhvvbv/lm2D+NBg8O8w0fHp4vXBi6\ntY4/PuyPNWtg5Mjw2t57h1bVbbeFDxyHHx7mPe44+PSnw62UP/gg/M2pxdKxKWxaKc2w+eijHQfT\nykp4553QGujXL5wRff/9UFMTgmJ3XXRR6NuePz8cwC+8MByATzghrHf16vDp9+23w0Eyed28ffaB\nb34z1OMXv4ABA2DVqp3X/5OfhNevu27Hp/B33gnr27IFbrwxHJzPOSd0g1xxRQjILVtC987554fL\nipx1Vjho3nPPzuuvqAgHqE6dwgGqMTz/6Z/CckuXhvCbPDnsn+nTw8Fw+HBYvDj0/fftGz5pV1SE\nT9LdusHFF8OXvxze84MPhvrkMnJk2EeDBoWD6OLFu87TqVMIjblzwwH3scd2vHbqqfA//xO2+c47\noey880LwbN26owzCzbsuvzx8cBg5MrzfoUN33d6gQeHvAGDffUPdu3YN7/Mzn4HvfW/3Lp+/bVvu\n8H711dA6Wbo0/E2uXw8//WkIoGee2bGvpk+HBx4IYXXUUaGrq74+7KPjj295+5ItCptWSiNs5s8P\nXR033rij7KGHwoE46fOfD5++hw8P4fP++6G8ujp8Smy8j/n69eHT/KJFoby1J7m99hr88pdhSOmb\nb4Z+8jlzwqXazzsvtIBmzw4HxNYON92wIZwP8dBDoX577x0+fT/xBJx9drjF7kknhYN0ZWUItyFD\nQgvs+98PgVLX5Ip1Rx8dAmjECHj33VDfXr1Cy+K440KL6jvfCfM2BuZnPwsvvRS6dPbdNwTO8OEh\nFF59NZTV14e7Oe6/f2hxLVsWDuQDBoQ6fPBBOPA/+yz84Q9h+WefDd87HHJI6DJasQIWLAgB//vf\nhy+0N2wIy40eHeq3bVtY9yc+sev+agyE558PLdjNm8P6t24Nv5v+/fOHhkg5Udi0UqHCxh0mTQoH\noKVLQ7fVt78dDpC//nU4qH73u3DJJfC//xs+mefq2soa9/D9Qc+e4eC8eXMI1vr6EE4taU3XmogU\njsKmldoSNh9+GLpF+vcPrZGPPoLrrw9fnp544s7z1taGbi4RkY5AQ5+LaMqU0K3yuc+FbqpDDsnf\nxaWgERHJTS2bZixZAqedFkZuqTtMRLJG3WittCdhU18fhu6OGAH//M8pVUxEpIwVMmw0HiaP664L\no5e+971S10REpP3TdzZNvPlmOJdj3rxwrsY++5S6RiIi7Z/CpomRI8OAgOnT9YW/iEihKGwS7rkn\nnHTZ0KAT7kRECkmH1ITf/CZc00tBIyJSWBqNFr39drikyKpVu17IUUQkizQaLQUzZ4brXiloREQK\nT2ETPfRQuGimiIgUnrrRCJeM79sX3ngjXEFYRETUjVZwDz4Ip5yioBERSYvCBrjrLvja10pdCxGR\njivz3WgffRRuebt0abiis4iIBOpGK6Bnnw13cVTQiIikp0OEjZmNMrMVZvaKmY1rzbILFoRbA4uI\nSHrafdiYWQVwG3A2cAzwRTMbsrvLV1fD6aenVDkREQE6QNgAw4Ead1/l7luBmcAFu7vw0qUwbFhq\ndRMRETpG2PQFViee18ayFn30UTi3ZsCAVOolIiJRR7jqc66RErsMPZs4ceL26aqqKqqqqli1KpzM\n2aVLirUTEWknqqurqa6uTmXd7X7os5mNACa6+6j4fDzg7j45MU/Ooc+PPAJTpsCjjxatuiIi7YaG\nPu9sETDQzAaYWRfgUmDW7ixYUwMDB6ZaNxERoQN0o7l7g5ldC8wlhOdUd1++O8uuXKmwEREphnYf\nNgDuPgc4qrXLrVwJZ52VQoVERGQnHaEbbY+pZSMiUhztfoDA7sg1QKChAfbZBzZvhr32KlHFRETK\nmAYIFMBbb4W7cipoRETSl9mwWbsW+vQpdS1ERLIhs2Gzbp3CRkSkWDIbNmrZiIgUj8JGRERSp7AR\nEZHUKWxERCR1ChsREUldpsOmd+9S10JEJBsyHTZq2YiIFEcmw+b998OjZ89S10REJBsyGTYbNsCB\nB4IV5Io/IiLSkkyGTV0d9OhR6lqIiGRHJsNm0yZ1oYmIFFMmw0YtGxGR4spk2GzapLARESmmTIZN\nXZ260UREiimTYaOWjYhIcWU2bNSyEREpnkyGjQYIiIgUVybDRi0bEZHiymTYqGUjIlJcmQwbtWxE\nRIork2Gjlo2ISHFlMmzUshERKa5Mho1aNiIixdWmsDGzi8zsRTNrMLNhTV673sxqzGy5mZ2VKB9l\nZivM7BUzG5coP8zM5pvZy2Y2w8wqY3kXM5sZ1/W0mfVvaRvNqa+HDz6A/fZryzsXEZHWaGvLZinw\neeCpZKGZfRy4BPg4cA5wuwUVwG3A2cAxwBfNbEhcbDLwU3c/CqgDrojlVwAb3H0QMAW4JW7j6Fzb\naKnCmzdD9+66l42ISDG1KWzc/WV3rwGaHrovAGa6e727vwbUAMPjo8bdV7n7VmBmnBdgJPC7OD0N\nuDCxrmlx+v44H8DoPNto1ubN0K1bq96miIi0UVrf2fQFVieevxHLmpbXAn3N7ABgo7tvS5Y3XZe7\nNwCbzGz/ZrbRrPfeg332afX7ERGRNqhsaQYzewzonSwCHLjB3R/Kt1iOMid3uHmcv+ky3sK68pXn\nNHHiRADWrIH6+iqgKt+sIiKZVF1dTXV1dSrrbjFs3P3MPVhvLXBo4nk/YA0hIPo3LXf3t82sp5lV\nxNZN4/zJda0xs05AD3ffaGb5tpFTY9j86U+wbNkevCMRkQ6uqqqKqqqq7c8nTZpUsHUXshst2dKY\nBVwaR5IdDgwEFgKLgIFmNsDMugCXAg/GZZ4ELo7TYxPls+Jz4utPtrCNZr3/Puy99568PRER2VMt\ntmyaY2YXAj8HDgT+28wWu/s57r7MzO4DlgFbgavd3YEGM7sWmEsIuqnuviKubjww08xuAp4Hpsby\nqcDdZlYD/I0QUDSzjWYpbEREis924/jc7pnZ9hyaORMeeADuvbfElRIRKXNmhrsX5ESRzF1BQC0b\nEZHiU9iIiEjqMhk2Os9GRKS4Mhk2atmIiBRX5sLmvfcUNiIixZa5sFHLRkSk+BQ2IiKSukyGjQYI\niIgUVybDRi0bEZHiylzYaICAiEjxZS5s1LIRESk+hY2IiKROYSMiIqnLZNhoNJqISHFlLmw0QEBE\npPgyFzbqRhMRKT6FjYiIpC5TYeOusBERKYVMhc3WrVBRAZWVpa6JiEi2ZCps3ntPI9FEREohU2Gj\nLjQRkdJQ2IiISOoUNiIikrpMhY2+sxERKY1MhY0uVSMiUhqZChtdqkZEpDQyFzZq2YiIFF+mwkbd\naCIipdGmsDGzW8xsuZktNrPfmVn3xGvXm1lNfP2sRPkoM1thZq+Y2bhE+WFmNt/MXjazGWZWGcu7\nmNnMuK6nzax/S9vIR91oIiKl0daWzVzgGHcfCtQA1wOY2dHAJcDHgXOA2y2oAG4DzgaOAb5oZkPi\nuiYDP3X3o4A64IpYfgWwwd0HAVOAW5rbRnOVVTeaiEhptCls3P1xd98Wn84H+sXp0cBMd69399cI\nQTQ8PmrcfZW7bwVmAhfEZUYCv4vT04AL4/QF8TnA/XG+5raRl7rRRERKo5Df2XwVmB2n+wKrE6+9\nEcualtcCfc3sAGBjIrhq47w7rcvdG4BNZrZ/M9vIS91oIiKl0eL1j83sMaB3sghw4AZ3fyjOcwOw\n1d1nJOZpyskdbh7nb7qMt7CufOU5TZw4kccfh333herqKqqqqvLNKiKSSdXV1VRXV6ey7hbDxt3P\nbO51MxsLnMuO7i0ILZNDE8/7AWsIAdG/abm7v21mPc2sIrZuGudPrmuNmXUCerj7RjPLt42cJk6c\nyPr1cPTRoJwREdlVVdXOH8QnTZpUsHW3dTTaKOA6YLS7f5h4aRZwaRxJdjgwEFgILAIGmtkAM+sC\nXAo8GJd5Erg4To9NlM+Kz4mvP9nCNvJSN5qISGm09TZiPwe6AI/FgWDz3f1qd19mZvcBy4CtwNXu\n7kCDmV1LGMVWAUx19xVxXeOBmWZ2E/A8MDWWTwXuNrMa4G+EgKKZbeSl0WgiIqVhLRyfOwQzc3dn\n9Gi48koYPbrUNRIRKX9mhrs3e0rJ7srUFQTUjSYiUhqZCxt1o4mIFF+mwubdd2G//UpdCxGR7MlU\n2NTVQa9epa6FiEj2ZC5sevQodS1ERLInM6PRtm519toLtm6FikxFrIjIntFotD2weTN0766gEREp\nhcwceuvqoGfPUtdCRCSbFDYiIpI6hY2IiKROYSMiIqlT2IiISOoUNiIikrrMhM2mTQobEZFSyUzY\nqGUjIlI6mQmbjRsVNiIipZKZsFm3Dg4+uNS1EBHJpkyFTe/epa6FiEg2ZSps+vQpdS1ERLIpM1d9\n7tzZ2bIFOncudW1ERNoHXfV5D3TvrqARESmVzISNvq8RESkdhY2IiKROYSMiIqlT2IiISOoUNiIi\nkjqFjYiIpE5hIyIiqWtT2JjZv5jZC2b2vJnNMbM+idduNbMaM1tsZkMT5WPN7BUze9nMxiTKh5nZ\nkvjalER5LzObG+d/1Mx6tLSNXBQ2IiKl09aWzS3ufry7nwA8DEwAMLNzgSPdfRBwFXBnLO8F/Ag4\nCTgZmJAIjzuAK919MDDYzM6O5eOBx939KOBJ4Pq4rnNybSOfgw5q4zsVEZE91qawcfd3E0/3BbbF\n6dHA9DjPAqCHmfUGzgbmuvsmd68D5gKjYouom7svjMtPBy6M0xcA0+L0tPi8sTzXNnLq1m2P36aI\niLRRZVtXYGY/BsYAdcDpsbgvsDoxW20sa1r+RqK8Nsf8AL3dfR2Au681s8YbBeRb17pc9dx331a9\nLRERKaAWWzZm9lj8LqXxsTT+PB/A3W909/7Ab4BvNC7WdDWA5yinhfJmq9aaZbp0aWFtIiKSmhZb\nNu5+5m6uawbw38BEQsvk0MRr/YA1sbyqSfm8ZuYHWGtmvd19XexueyuWN7fMLiZNmrh9uqqqiqqq\nqnyziohkUnV1NdXV1amsu023GDCzge6+Mk5/AzjV3S+JAwSucffzzGwEMMXdR8QBAs8AwwitqmeA\nE929zswWEFpGiwiDDW519zlmNhnY4O6TzWw80NPdx+fbRp56ehZupSAiUkiFvMVAW7+zudnMBhMG\nBqwC/g+Au882s3PNbCWwBbg8lm80s5sIIePApDhQAOBq4C6gKzDb3efE8snAfWb2VeB14OLmtiEi\nIuUnMzdPy8L7FBEpJN08TURE2hWFjYiIpE5hIyIiqVPYiIhI6hQ2IiKSOoWNiIikTmEjIiKpU9iI\niEjqFDYiIpI6hY2IiKROYSMiIqlT2IiISOoUNiIikjqFjYiIpE5hIyIiqVPYiIhI6hQ2IiKSOoWN\niIikTmEjIiKpU9iIiEjqFDYiIpI6hY2IiKROYSMiIqlT2IiISOoUNiIikjqFjYiIpE5hIyIiqVPY\niIhI6goSNmb2fTPbZmb7J8puNbMaM1tsZkMT5WPN7BUze9nMxiTKh5nZkvjalER5LzObG+d/1Mx6\ntLQNEREpL20OGzPrB5wBrEqUnQMc6e6DgKuAO2N5L+BHwEnAycCERHjcAVzp7oOBwWZ2diwfDzzu\n7kcBTwLXN7eN9qy6urrUVdgtqmdhtYd6toc6gupZzgrRsvm/wA+alF0ATAdw9wVADzPrDZwNzHX3\nTe5eB8wFRplZH6Cbuy+My08HLkysa1qcnhafN7eNdqu9/AGqnoXVHurZHuoIqmc5a1PYmNn5wGp3\nX9rkpb7A6sTz2ljWtPyNRHltjvkBerv7OgB3XwscnGcbbySWERGRMlLZ0gxm9hiQbDEY4MCNwA+B\nM3MtluO55yinhfJmq7YHy4iISCm4+x49gE8Aa4G/AH8FtgKvEVoedwJfSMy7ghBYlwJ3JsrvBL4A\n9AGWJ8ovBe6I08sJrRuS8+XbRp66uh566KGHHq1/7GlGNH202LLJx91fJBz8ATCzvwLD3H2jmc0C\nrgHuNbMRQJ27rzOzR4F/jYMCKgitovHuXmdmm81sOLAIGAPcGlc9C7gMmBx/Ppgo32UbeeqaqxUk\nIiJFssdhk4MTu7bcfbaZnWtmK4EtwOWxfKOZ3QQ8E+efFAcKAFwN3AV0BWa7+5xYPhm4z8y+CrwO\nXNzcNkREpPxY7GYSERFJTYe/goCZjTKzFfFk0XElrEc/M3vSzJaZ2VIz+2YsL8uTVs2swsyei12i\nmNlhZjY/1nOGmVXG8i5mNjPW82kz61/EOvYws9+a2XIze8nMTi7H/Wlm3zGzF+NJy7+J+6zk+9PM\npprZOjNbkihr9f7Ld6J2yvW8Jf7eF5vZ78yse+K162M9l5vZWYnyVI8FueqZeK3NJ76nWUcz+0bc\nN0vN7OZEeeH2ZaG+/CnHByFMVwIDgM7AYmBIierSBxgap/cDXgaGELoJr4vl44Cb4/Q5wMNx+mRg\nfpHr+x3g18Cs+Pxe4OI4fQdwVZz+OnB7nP4CMLOIdbwLuDxOVwI9ym1/AocQBtF0SezHseWwP4FP\nA0OBJYmyVu0/oBfwatz3PRuni1DPM4CKOH0z8JM4fTTwfPx7OCz+/1sxjgW56hnL+wFzCAOp9i/l\n/syzL6sI5zxWxucHxp8fL+S+TP2frZQPYATwSOL5eGBcqesV6/KH+A+zfRQdzY+22z4qrwh16wc8\nFv8IG8NmfeKfe/t+jf9EJ8fpTsD6ItWxG/BqjvKy2p+EsFkVDyKVhIEtZwJvlcP+jAeM5IGnVfuP\nxMjRWH5Hcr606tnktQuBu+P0Tv/jwCOEg3lRjgW56gn8FjiWncOmZPszx+/8XmBkjvkKui87ejda\nvpNLS8rMDiN8uphPeZ602nhVCAcwswOAje6+Lb6e3I/b6+nuDUBdsqsgRUcAb5vZr2J33/8zs30o\ns/3p7muAnxIGt7wBbAKeI4yeLKf92ejg3dx/LZ2oXUxfBWbH6eZOHC/6scAKd+J7mgYDn4nduvPM\n7MQ8dWzTvuzoYVN2J36a2X7A/cC33P3dZupTkrqb2XnAOndfnKiD5aiPJ17baRUUZx9XAsOAX7j7\nMMKIxPHNbLtU+7Mn4dJKAwitnH0JXSj56lKq/dmS1p6oXRRmdgOw1d1nJOqVqz5Fr6eZ7Q3cAEzI\n9XKO56Xan5VAT3cfAVxHaIk11ilXXfaojh09bGqB5Bes/YA1JaoL8Uvg+wlN/sbzhdZZvKabhWvE\nvRXLa4FDE4sXq+6nAKPN7C/ADGAkMIVw7bnGv5dkXbbX08w6Ad3dfWMR6llL+MT4THz+O0L4lNv+\nPAP4i7tviC2VB4BPAT3LbH82au3+K9n/mJmNBc4FvpQoLqd6Hkn4ruMFC+ch9gOeM7ODy6yeq4Hf\nA7j7IqAh9mbkq8se1bGjh80iYKCZDTCzLoT+0FklrM8vgWXu/rNEWeNJq7DrSatjAKyFk1YLyd1/\n6O793f35d16AAAABgElEQVQIwv560t2/AswjnuNE+II7Wc+xcfpiwpW5Uxf3xWozGxyLPgu8RJnt\nT0L32Qgz62pmlqhnuezPpq3W1u6/R4EzLYwM7EX4PurRtOtpZqMIn8JHu/uHTep/aRzVdzgwEFhI\n8Y4F2+vp7i+6ex93P8LdDyccpE9w97co7f5s+jv/A+Hvkvj/1MXd/xbr+IWC7ctCf0FWbg9gFGHk\nVw3hagWlqscpQANh5MbzhH77UcD+wOOxjo8RmrONy9xGGPXxAuHqDMWu82nsGCBwOLAAeIXwhWLn\nWL4XcF/cv/OBw4pYv+PjH/5iwiezHuW4PwndKMuBJYQrl3cuh/0J3EP4RPohIRQvJwxkaNX+I4RS\nTXwvY4pUzxrCwIvn4uP2xPzXx3ouB85KlKd6LMhVzyav/4U4QKBU+zPPvqwE7gaWEk64Py2NfamT\nOkVEJHUdvRtNRETKgMJGRERSp7AREZHUKWxERCR1ChsREUmdwkZERFKnsBERkdQpbEREJHX/H9xm\nfnEa0OlRAAAAAElFTkSuQmCC\n", 174 | "text/plain": [ 175 | "" 176 | ] 177 | }, 178 | "metadata": {}, 179 | "output_type": "display_data" 180 | }, 181 | { 182 | "data": { 183 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYwAAAEACAYAAACgS0HpAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmUXVWd77+/SlJDKpVUJZCBBEISYqBRQUBU2tYCxAYU\n8WHbKu1yQhoFfC61HVDXS2LbonaDQ+PwGnmC3SooLgWRlqixRG0JQxgkIUlBEpIQMpBKzXPVfn/8\n7vbue2qfc/aZ7j237u+zVq2qOnc4+567z/7u7++3B1JKQRAEQRDCqKt0AQRBEITqQARDEARBcEIE\nQxAEQXBCBEMQBEFwQgRDEARBcEIEQxAEQXAiFcEgoluI6CARPWEcW0tE+4hoc+HnQuOx64iok4ie\nIqLXG8cvJKJtRLSDiD6ZRtkEQRCEdKA05mEQ0asB9AP4nlLqpYVjawH0KaVu9Dz3FAA/APByAMsA\n/BrAagAEYAeA8wHsB/AQgLcrpbYlLqAgCIKQmJlpvIlS6g9EtNzyEFmOXQrgdqXUOIDdRNQJ4OzC\nczuVUs8CABHdXniuCIYgCEIOyDqHcQ0RPUZE3yGieYVjSwHsNZ7zXOGY9/i+wjFBEAQhB2QpGN8E\nsEopdTqAAwBuKBy3uQ4VcFwQBEHIAamEpGwopQ4b/94M4OeFv/cBON54bBk4Z0EATrAcnwIRiZAI\ngiDEQCll65w7kabDIBgugYgWG49dBuDJwt93A3g7EdUT0QoAJwF4EJzkPomIlhNRPYC3F55rRSkl\nP0ph7dq1FS9DXn7kWsi1kGsR/JOUVBwGEf0AQDuABUS0B8BaAOcS0ekAJgHsBnAVACilthLRjwBs\nBTAG4GrFn2SCiK4FsAEsZLcopZ5Ko3yCIAhCctIaJXW55fB3A55/PYDrLcd/CWBNGmUSBEEQ0kVm\nelc57e3tlS5CbpBrUUSuRRG5FumRysS9ckNEqhrLLQiCUEmICConSW9BEARhGiOCIQiCIDghgiEI\ngiA4IYIhCIIgOCGCIQiCIDghgiEIgiA4IYIhCIIgOCGCIQiCIDghgiEIgiA4IYIhCIIgOCGCIQiC\nIDghgiEIgiA4IYIhCIIgOCGCIQiCIDghgiEIgiA4IYIhCIIgOCGCIQiCIDghgiEIgiA4IYIhCIIg\nOCGCIQiCIDghgiEIgiA4IYIhCIIgOCGCIQiCIDghgiEIgiA4IYIhCIIgOCGCIQiCIDghgiEIgiA4\nIYIhCIIgOCGCIQiCIDghgiEIgiA4IYIhCIIgOCGCIQiCIDghgiEIgiA4IYIhCIIgOJGKYBDRLUR0\nkIieMI61EdEGItpORPcR0Tzjsa8TUScRPUZEpxvH301EOwqveVcaZRMEQRDSIS2H8V0Af+s59ikA\nv1ZKrQGwEcB1AEBEFwFYpZRaDeAqAN8uHG8D8H8AvBzAKwCsNUVGEARBqCypCIZS6g8AjnoOXwrg\ntsLftxX+18e/V3jdJgDziGgRWHA2KKV6lFLdADYAuDCN8gmCIAjJyTKHsVApdRAAlFIHACwsHF8K\nYK/xvH2FY97jzxWOVS1jY8C+fZUuhSAIQjrMrMA5yfK/shxH4biVdevW/eXv9vZ2tLe3p1C0dNm4\nEbjxRuC++ypdEkEQapGOjg50dHSk9n5ZCsZBIlqklDpIRIsBHCoc3wfgeON5ywDsLxxv9xz/rd+b\nm4KRVwYGgN7eSpdCEIRaxduZXr9+faL3SzMkRSh1CXcDeE/h7/cAuMs4/i4AIKJXAuguhK7uA3AB\nEc0rJMAvKByrWoaGgP7+SpdCEAQhHVJxGET0A7A7WEBEewCsBfBFAD8movcB2APgrQCglLqXiC4m\noqcBDAB4b+H4USL6ZwAPg0NR6wvJ76pleJhdhiAIwnQgFcFQSl3u89DrfJ5/rc/xWwHcmkaZ8oAI\nhiAI0wmZ6Z0hQ0MiGIIgTB9EMDJEO4zJyUqXRBDi09EBjI9XuhRCHhDByJDhYf49NFTZcghCEq64\nAti6tdKlEPKACEaGaKGQsJRQzfT0ACMjlS6FkAdEMDJEOwwRDKFaUYoFY3S00iUR8oAIRoZohyFz\nMYRqZWiI8xfiMARABCNTxGEI1U5PD/8WwRAAEYxMEcEQqh0RDMFEBCNDhoaAGTNEMITqRQuG5DAE\nQAQjU4aHgQULJIchVC/iMAQTEYwMGR4GjjlGHIbgzuhoviZ6imAIJiIYGTI0xA5DBENw5corgXvu\nqXQpiohgCCYiGBkiDkOIypEjQHeO1mgWwRBMRDAyRAuG5DAEV4aH89U4S9JbMBHByBAJSQlRGRrK\nn2C0tOSrTELlEMFIgc9+1p6olJCUEJU8CsbChfkqk1A5RDASMj4O/Mu/2MNOelitCIbgyvBwvsI/\nIhiCiQhGQvr6+LdXFCYmgLExYP58yWEI7uTVYeRJxITKIYKRkN5e/j04WHp8ZARoaADmzBGHIbiT\nR8E49th8lUmoHCIYCdEOwysYw8NAUxPQ3CyCIbiTx1FSEpISNCIYCdEOwysKQ0NAY6MIhhCNPDoM\nEQxBI4KREL+Q1PAwC8acOZLDENyYmOBcQV7yBXrzJMlhCBoRjIT4Jb1rNST12GOVLkH1onvxeenN\nDw8DRMDcufkpk1BZRDAS4ucwajEkNTICnHEGjw4ToqN3aMxL49zTA8ybB9TX56dMQmURwUiIXw5D\nO4ymJr7ZJibKX7ZyMzTEYYyurkqXpDrJq2A0NFSmTPv3S+cjb4hgJCRolFRjI1v6WnEZusE7cqSy\n5ahW9A6NeckXmIJRzjIND/PqCSeeCPzqV+U7rxCOCEZCenuBujr/UVKACIbghjgM4Le/BV76UmDb\nNuCcc2TASN4QwUhIby+PIvGbhwGIYAjARz4CPPRQ8HPyKhjlymH09gKXXALccANw553A0qX5uRYC\nI4KRkL4+YPFi/5AUUDuCoa+BCMZUHn8c2Lkz+DnDw/lKMJfbYRw4wPfSJZfw/5XKnQj+iGAkpLeX\nK3lQSKpW5mJMV4ehVPJBC0ND4Z2GoSGgtbV8+YI3vCH4uyp3DuPwYXbrGhGM/CGCkZDeXmDJEglJ\nAdNXML79bQ4pJWFw0E0w5s0rXyPZ0QEcOuT/eLkdxqFDvG6VRgQjf4hgJESHpGzDamstJKUFY7oN\nqz10CLj33mTv4eIwhofZYZSjkRwcDBexSgiG6TAaG0Uw8oYIRkJ0SMpv4h5QOyvWDg0Bs2ZNP4cx\nMAA88wywa1f89xgcDA9LltNhvPBCsVx+dHeXN+l9+HByh7F5M7B+fbrlEoqIYCTEL4fhDUnVSg5j\n6dLpKRh1dcBvfhP/PVxzGPPmlS9fAAQLhjlKanSUczlZ4nUYcQTjz38GNmxIt1xCERGMhPiNkqrV\neRjLlk0/wejvB/76r4Ff/zr+e7jkMMoZktKCERaSam1lsZw1K/tZ12kkvbu6gIMH0yuTUtOvPich\nc8Egot1E9DgRPUpEDxaOtRHRBiLaTkT3EdE84/lfJ6JOInqMiE7PunxJGBnhCtXWJjkMYPoKxsAA\ncOml7DBse7eHMTnJ9cElJFVuwXBxGEB58hhpJL27uoIT+VF56CHgTW9K7/2qnXI4jEkA7Uqplyml\nzi4c+xSAXyul1gDYCOA6ACCiiwCsUkqtBnAVgG+XoXyx6e3llTybm4NHSdVSDkOHpLIOX5STgQHg\n5JN5u90nnoj+er3kh0tIau5c7snHEaYouOQwTMEoRx4jLYfR11ccgJFGmaZbBygJ5RAMspznUgC3\nFf6+rfC/Pv49AFBKbQIwj4gWlaGMsejrA1pa7IJRqyGptjYOX0ynnM3AAH+Hr3tdvLCUbrxcQlJN\nTeUL/wDTz2EcPcq/0wpLdXfzdRCYcgiGAnAfET1ERO8vHFuklDoIAEqpAwB0v2IpgL3Ga58rHMsl\n2mHMnh0ekppODagfQ0Pc4C1YUB29su7uYgMTRFLB0I2yS0iqqak8jfPhw/w9+YmY3jzJFIwsk/GT\nk1xnjjmmeCyuwwDSC0v19BRXpBbKIxjnKKXOAnAxgGuI6G/AImKDLMdyG9wwBWNwsDQMU6sT96pJ\nML76VeDGG8OfNzDAYcX2duCPf4zeiLk6jHIKxgsvAMuX+zsMvXmS7vRkXaajR/ka19cXj8UVjPnz\n03MYPT18jWSZdWZm1icoOAgopQ4T0c8AnA3gIBEtUkodJKLFAHR/YB+A442XLwOw3/a+69at+8vf\n7e3taG9vT7/wIeiQ1IwZXNFNkajVeRjVJBiHD5c2UH5oh9HWBvzVXwF/+hOLhyuDg26dBu1Ky+Uw\nggTDdBdA9jkMb/4CiC8Yp5ySrmAAfK/Pn5/Oe5aTjo4OdHR0pPZ+mQoGEc0GUKeU6ieiZgCvB7Ae\nwN0A3gPgS4XfdxVecjeAawDcQUSvBNCtQ1deTMGoFNphAEWXoQWjVkdJlUsw+vqA3/0OeOMb479H\nV1fx+wtCCwZQDEtFEYyhIY7NuzoMPe8hSw4fBl7+cv8yeQUjaxHz5i/invPoUeDcc9MNSenf1SgY\n3s70+oSzGrMOSS0C8AciehTAAwB+rpTaABaKC4hoO4DzAXwRAJRS9wLYRURPA/i/AK7OuHyJMAXD\nKwq1OnGvqYlvrKyXB3nwweQzeru6gpO+Gq9gRN3UZ2iIY/N5y2GceGI0wchSxNJwGJOTXO41a9J3\nGJLHYDJ1GEqpXQCmzKVQSnUBeJ3Pa67NskxpokNSQNFhaGp1lFS5HEZ3d/Jr2tXF4cIgxsaA8XFu\nvADgxS/mZUKiMDjIgjE0xI1anU83rVwhqfFxbgCXLnUPSVWDw+jp4XvtuOOAhx9Op1ymwxBkpnci\nXB1GreQwdEiuXILh4g6CcHEY2l1QYThGHPEfGuI60NgYPD+gXA7jyBHOx7S0TK8chk54L1qUbkhq\n7lxxGBoRjATYchiaas9h7NkDPPJItNdUo8MIew8zHAXw5xsZiTaxTgtpWGiyXDkMvcifbTi4phod\nhhaMhQvTnYdxwgniMDQiGAkwQ1LeyXvekFS15TB+/nMg6riCcgtGEocxMeH2Hl7BIOLPGOXcQ0Pc\nOIc5zXKFpF54oSgYUUJSec9hHD1adBhp5jCOP14chkYEIwFeh+EXkqqv5x5pNY3lHh7Ov8Pwzn2J\n+nrATTC8eY6ojtF0GEGvK1dI6vBhzqnYVijQVLPDWLCAyz8+nrxcPT3V5TDe/37gD3/I7v1FMBLg\nzWHom09X1JmFIQVE1ZfHGBkBnn8e2G+dBWNH96STCMbICNDZGf483eDHXTOoq4u/n6gOAwhuaG3o\n6+IakiqHYER1GNWSw2hr43lR8+cXlz+Jy8QEf//LliVzGEqlu7ba9u3Axo32xx54ANi3L71zeRHB\nSIB3lJQWBDMcpam2PIZeMC+Ky0jDYfz0p8DVDoOpdY/PdVisl66u4FFC5mu9ghEU+7ehHYZrSEpy\nGPHOqR0GkE5Yqq+Pv7PW1mQO47rrgPPOS8+l/PCHwNe/PvW4UrzJV5btjAhGAvyS3mY4SlNteYyR\nEW684ghGayt/1jghgYcfdnMN2mGE3RxPPQW85jVTj3d1cc+x3A4jaO2mcjkMncPIS0hqYoLzD+Y6\nUuY5XXvnXsFIOlJKX4N58+I7jK4u4D/+g8NaaU0o3LLFPrT78OHi1rtZIYKRAL9hteYIKU21OYyR\nEZ4J7CoYOj8zaxbPM2htjTd5L6pghN0cR48Czz479bjpMIIapP7+9BxGUB0YG+PrNnNm+XIYes9s\n24ivcia9u7r4XDM9s8JmzOBr4trx0ElvIJ2RUvoazJ0b3x1885u8l8qtt/K+Gq9+NbB7d7JybdkC\n7Nw5td7qLYTFYeQUv4l7tpBUHnMY/f3Axz5mf2x4mHeZc50ApXvHmvnzo4elJid5T2YdDguiuzt4\ntVXN8DCXwzvgoKuLe9lhsXk/hxHlu3TJYZidjHLlMIJGfJUzh+Hdy9skyrVIOySl9zSP6zCGhoCb\nbgI+/nG+1uvWAR/4AHDZZfHLNDrKYlFfP/XziWDkGKW4EpnDak2HYQtJ5U0wnnoKuO02+2MjI8Dq\n1dzQuiS+vYIRJ4/x9NN87VwFwyUHod2KNxSgG5egxC+QTkjKJYdhXr+scxg6JAX4f/5yhqS8e3mb\nRBWMtjb+O82QVFyHceutwNln84KVmve8p9iwx2HHDl40cs2aqWGpXbvY4UtIKocMDnJlnjWL//fm\nMGwhqbzlMHbt8r8ZdQ7jzDPdwlI2wYgaknr4YeCss8IFY3ycr/WiRe6C4e2NJRGMqCEplxyGef3K\n5TAAf/Ert2Ck7TDSDEnFcRgTE8C//RvwyU+WHp83jyMTcXdU3LIFOPVUYNUqu2CcfLI4jFxihqOA\n0huvWkZJ7drl3zhr0UsiGFEdxiOPcBgsTDC0s3MJ8+n3OnCg9HgUwbDNw7C95pln7GPgXWZ6lysk\npRQ7DJ1gjuIwsnI9tiG15nkrFZLq6eFcXByH8ZOfAIsXc302mTGD61PcnIgpGDt3lj62ezc/Jg6j\nTGzfDnz/+27PNRPeQGmv0xaSymMOY9cu7q3bkoojI3yznnWWWx4jLcF49avDk97d3XwjhzX2ulxA\neRzGL34BfOc79jKEzfQul8Po6SnOJgf8d4tUqrTTk2UOIw2HoVRp0jvtkFRvb7S5FDfdBPzTP9kf\na2uLv5qzFoyVK+0O49RTxWGUjY0bgSuusI+q8eIVDLPXWS2jpHQs1XZDDg/zzVouhzE5CTz6KHDO\nOcUGyw/d83O5plowkjgM16R3X589dOEySqpcOQwzfwHY3ZJuKMnY/zJLEUvDYQwNFZP4QLohqYYG\nHq3lklvTPPcc8NKX2h+bP99ta2AbfiGpiQlg717Ol4jDKBPd3VzhvHFHG96QlMvEvTzmMAD7jaBz\nGCec4Jb4TioYnZ18Ix17LA+vDFpGJYrDGB7mPFOaDsMvJNXfbxeMPI2S8o5Isn1+bzgq6zKl4TDM\ncBTAgnH4cPxcAVB6HaLmMQYH+draaGuLJxgjIxx2etGLpgrG/v3FZVHEYZSJ7m7gQx/ifZv/+Mfg\n59pCUmET9/LkMCYneUXa1lZ/wWho4F6bi8tIKhiPPMLhL4AbzqDenBYMV4exfHl8hxFlHkaYw8hD\nSErPwdBEEYw85zDMEVL6dc3Nxfk6cTCvQ9Q8RhaCsX07sGIFf7YlS7i+6Q7Irl38mEsnKgkiGAZH\nj/JQzeuvBz7ykeDeiS0kFeQw8pbD2L+fG9358+2Ns9njPfPM8DyGuT0tEE8wzjyT/3YVDNccxooV\npYIxOcnfdVtbuiEpF4eRhWDoWeIueENSNvHr7i5tfIH85zDM/IUmaVhKz8MAou+JESYYcXIYOhwF\ncEduxYpi4lsLRtYdUxEMA90QXX45fyFBCXBbSKqachi6gunZvl60wwC455+1w3j44aJgNDWlJxjD\nw7wVqdlw9PXxa2fNip/0tr0mSQ7DrDNRcxgdHcDf/Z3bc70hKVt4TV9fk7znMLwhKSD5SClvSMrV\nYZirHtiI6zBMwQBKw1LiMCqA7lnV1QFf+Qrwmc/4PzfIYeRtLanf/W5qA2wKhp/D0IJx5pk8AzuI\nMMG46y7/66kT3qbDCOoxRw1JnXhiqcMwG5e4w2r9QlJ9faXHJieL+aCgOuDiMPzCpHv2uI8Icslh\nlFMwxsf5fN7GPup5/QTD77qMj4fn5fTgCiCawwhyF0D8pHeQYOzezfVcHEYZMW+UV72KlwmemLA/\nNyiHkaeQ1O7dwAUXAPfdV3pcC0ZDQ3DSG+A9kg8dCh655Lc0iA6X6NyQjc5OjqvrGz7tkNSSJXzt\ndcMTVTCiJL37+0vrjBbeurpkOQylgNe+1u7aDh2aKlR+2HIYtpCUTTDCXM8VV0SfxXzkCH8XM2bY\nH08iGEEhqY0bgTe/Ofg94zqMMMGI6zC2bp0qGN6QVNRJpVERwTA4erR4oxAFN1zekFRDQ3FOQ56S\n3p/7HH+mTZtKj4c5DDMkNWsWf56gRskrGLNncyM5MADccANfK7+4rRmOAtJNeg8Pc1nM8ERSwQhK\negOlLsK8Lq4hKVsjOTLCQmRrAA8d8u/93nFHaWfBZVhtHIcxOsoh3KiCEZS/cDmvxpv0BoJDUocO\nAY8/Hjwaz5v0TsthxMlhDA+zk1y9unjMnIuh7+c4WwhHQQTDwJvsa2ryD414HQZRsfHJSw5j2zbg\nnnuAr30tumB4P0Nra/CIE53YNVmwAHjySeCrXwW+9S3/m2Tz5uiCMW+eu8NoaiptPHSvFkg/6Q2U\nNixm4+EakrLlMPT5ogrGffcBH/xgsdGNG5IKS3o//jg/7up0NEH5CyBZ0jsoJHXkCF/jLVvsj09M\n8HXR4chKO4zt21kg6uuLx3RIanSU68Xxx3MnrbExuzyGCEYBpUpHRQDRBAMo9tbyMg9j7VpejfaC\nC7gXb4ZKggRDKa6EZuV0EQyvq1qwALjmGuDKK1kQ/ARjzx6u/JqwpHeUiXva7S1eXMxjuDoM7Ri1\n09IEJb2PPba08TavS2Oj/8z6sJCUrjve4cEAN4oDA/bwaW8vN47f/jb/7yoYUedh6A5JVMFI02FE\nCUnp0J5fbk4vP1NXaCErncPw5i8Azlns3cthqeOOKy4PH3VxzCiIYBQYHOQG0mwko4SkgGKoIg9L\ngzz6KPD73wPXXssx62OP5V4KwGJw4AD3SGyjpEZHufLVGbWjrS2eYOzbxzuOzZ7NDZpNgF94gZ+r\ncU16uzqMxsZ4ISntLswZz0Bw0nvpUn+HQeT/2rCQVJDD0MdsjXVPDy+r/YUv8HUz15EC0sthbNrE\n7xVHMNJwGFFHSR05wj12P8HwzkWptMOwCUZDA3eE7r+fO3+aLKMZIhgFzPyFJqrDSCskdcUVwMUX\nA+9+N69H42ebg/jsZ4FPf7oYTjn77GIvcM8ebthmzbKLopnw1rS2Bldym2CcdRZw4418nYj8e1ZH\njkwVDNcchmtIKo7DsE3aA+znHR3luPExx5Q2mt7r4lcPwhxGWEhqxgx7D7inh5dbecMbgPXruZx+\ni2aar4maw9i0iXc2jCoYBw9yw+5HVqOkjhxh5+03XNzrstLOYUQVjCeemCoYAIveb37DbkOT5dBa\nEYwCtslKQQ2XX0hqYCB4tVqXBcx++lPgXe8C2tt5jwi/PSv86OzkntOVVxaPveIVRcHQ4SjAPkrK\nTHhr4oSkvvxl4B/+ofj//Pn2sNSRI6W93iijpFxDUosWRRcMW/4CKHYkzMRifz83xN4lJLyNx5w5\n9tBkWA5Dv8YrGEpxo7h8ub2x1vX0c58Dbr65uHGSJo0cxpEjfG3PPruyguG9f4NCUl1dwPnnc0Ns\nCxF6RTNNhzF3Ltct150EDx3iVZDPO2/qY6tWsWCIw8iIoSHgooumHrfdJEEOwy8kpR2Gt/GcOZN7\ngWGVX2/M9Ja3AO99LwvHjh3Br/Hy2GMsEGaj7ycYtsbZnIOhiSMYXmyCoZfbdnUYk5PceM6dGy0k\ntXhxvJCUdw4GUEwsmnWjr4+f6+2JujoMl5BUXd3UBrCnh99/4UJ/hzFvHu9f/qEPTd03O415GA8+\nyG6ytTWeYCxe7P94kqT3nDlcv2wCfeQI3wNLl/LgEC/ekFSaDqOujt/bddmSm2/miZm2uSqrVhU/\ni0YcRoocOQL88pdTe3BRBSPIYdhCUoB/79JE51L0LNEXvSi6YGzZArz4xaXHTj+dcxiDg+GCYQtJ\nxclheLEJxsAAi6l5vjBnN2cO33SuE/fSdhi212mH0dIS7DDihqT6+3khSK9g6ByAX4NmNnyf/Sxw\nyy2lj9vK4+cwxsbswzU3beIOSUtLdME4cCC5wxgb48/gvR+J+NrYwlI6DHrGGfY8RpY5DMA9LDU2\nxiMMr73W/rgeLCIOIyP0TfXCC6XHbTkMv4ZrcrJ0yJ1GNyK2kBTgtuJlb29pRV21iht4vwmENp58\ncmq8s7GRj23e7CYYNocRNYfhxSYY3nAUEDxKymzM6uv5ugSNpTdHSZkOQzuauILhvSm143RxGGEh\nKT+HsWrV1FFSQYIxNsYdI3NYrzl8GZj6+cfG+Jp56zZRUTS8JBGMNEJSuk7UWVozvzyGFgy/VQyy\ndBiAu2D87GecpzjtNPvjK1fyb3EYGaErtLcS2XIYfg6jv784Mc3EdBi2xtPFhvb0lPaUdLjBZY8O\njc1hAMWwlEtIypb0zsJheMNRukx+zs4UDD3qKOjmMENSaToM73n7++0hKVsOIywk5TcPY+XKqTPu\nDx70Fwztgr2jvEy8n9+2F4bGlsdQikNScQRD51+SCoYt4a2x5TFGR/l6t7Sww7AlvsvhMFwm7/37\nv3Mo0Y+TTuK8lBnWE4eRIvqmsgmGa0jKFo4CgkdJAW6VzuswgGhhKXPNfC9+gmGbVVyuHIZ3hJQu\nU5DDMK9PUINvruM0dy43FIODpQnSJCGpuA4jLCRlC//09xeXYzd7ptpheENhwNTOh8vnsN0HGlvj\n/fTTLIJLlkQXjJ4e/qxB9cZFMPTKwzZsDkNP3CRiwXjssamhNq9gtLTwd+Ayg9pFMFzmYjz+OM+x\nCFrCpLWV52KYnVdxGCkS5DBcQ1JhghEUknIRDO97RxEMc818L694BS9E2N9f7JHYRknZkt5Z5TBs\nIakgwfCOXglyGOaeHkT8mZ95pjicGEg3JKUdhtlo2nIYYSEpHf4xXYZOwHvnFpghKW9jbet82D6H\n+fmjCsYDD3C9AqILRlg4yu+cXsIchvdeN0OSbW3cQ+/sLH2Ot2MyYwZ/jy6Tb9MKSd10E/CBD/iv\neqvx3qviMFIkimD4OQzbCCnALSQVJhi2XmEUwbBN8NGcdBI3QsuXF0MOWc7D8BIlJOWSwwCCh9Z6\ny7RoEfDUU6WNS5x5GIA9JOXiMFxCUsDUhlKXxczFAMWQji0kZdsIyYuekKdzZGGC4Q2V6fwFkF/B\n8IakvK7WFpayzUVxzWOkIRhbtgB33gn84z+Gn89LlgsQ1pxg+IWkbLY2rsOoZEjqySft+QuAbevZ\nZ5cmyKIkvYMchncDJRuuISnXpDcQ7DC8jfXixbzip9m4eBtME79htYB/SCqNUVJAsMMwE99BSW+/\nemqi10A/CUqLAAAgAElEQVTTHaMgwbDlMPIuGH4hKbPO2RLfNrFNWzD8chgPPcRzRL7xjeBZ8H7I\n0iAp0tdnt6lpOYz+/mSCkaXDAHjW70knFf/PQ9LbFpJySXoDwb0p7+dYtGiqYHgbTJO4IamkOQxg\nakOpyxIUkorjMLxlihKSGh7m63nGGfx/HgXDdq/bHIaLYLgmvpM4jI4OnpV/8828kVscJCSVIr29\n3GAmEQy/hmT2bH6fWbPsQ/xaW+PlMPSe1C7bcAY5DAD4+MeBz3+++L+rw9CzU2098YkJ+yJ9XtJK\nensFI4rD8Iak9HvYbrA48zBsguGSw3ANSXkFQ4+S8kt6uwiG+VmiCMbOnTwhUH8+vbhi0DBnk7BJ\ne7Zz2ghKeruGpDZvLh19lrXDsCW9/+d/gLe+Fbj9duCSS8LP40dNJb2J6EIi2kZEO4jok2m/f18f\nj2dPkvT2qxDNzVwZbe4CiB+SmjmTw0h67Xs/BgeB554rdRBeZs8uFSTbKClb0ruuzt4oAcUkf9Dw\nTSAbwXDdJxvgxmnHDrtg2G6wNOZheEN1thzG5CSHn4IEI27SOywkBUQTDDNMZiaPAf7+o7iMsEl7\n+pxhgmEuV+8laJSU5thj+X9zzbZKOIy77gI+/GH7EiBRqBmHQUR1AG4C8LcATgXwDiI6Oc1z9Pba\nBcPWS/FzGIOD/g4jTDCizsPQuISlnnqKn6eXOXbBNekN+Ce+XcJRAH+uwcHSHqhfSCoNh2ELSY2O\npicYtnkYusHUvVWbw/DezFqgTcH15jBsSe/RUT7X/PnJQlKmw4qSw7DdM1EEI62Q1Nat9mHkAAta\nd3fpuk22TsqFFwL//d/F/yuRw9ixAzjllPD3D6OWHMbZADqVUs8qpcYA3A7g0jhvtGuXfXGvvr5i\nSErf1JOTfNzbUPslXwcGgh2GX+MZd1gtwDeEd+ifl7D8hQ3XxQcB/zyGbfMkG0RTe1ZpOIwoISkg\nHcHwS3rr7Vh1I+d1GLaQlC1n5OIwtNjW1SXPYcQJSeVBMEZGeC0ov5nQM2bw922u7OB1RgCvL6cF\nQ++a6R3wkLXD2LHDX/iiUDMOA8BSAHuN//cVjkVCKV66+Pe/n/pYby83HETFi9rXxxfZu6+wX/LV\nr0LMns2VMe2QFODmMMLyFzZck96A/1wMV4cBTA1LxRkl5Z24F5T09g6r1WUwSSskpRsYM3TnFVNb\nSMp2/YKS3nqUlLmXRNxRUsDUkJSfyNgEw3styy0Yf/4zdwCD6p838W2rc+edx6OT+vqKLt8bYs0y\nhzExwTmhoHCyK7XkMGxRcIcFwUvZto3j/XpXLRPtJMxK5NerCgpJ+QlGX18yh5EkJBXHYbgmvYFg\nhxFHMIaHOTzl7ckFjZKKMnHPO4EybYdhS3oDpQ2LzWHEEQwz6a3dsSkYWqTCErdhnyVqDiOuw1Aq\nHcHYvLk4SssPb+LbJhjNzcCrXsVLhftdtzQdxpw5/Ll0ePbZZ7mcrvdREFk6jAjR7rKwD8AJxv/L\nAOy3PXHdunV/+bu9vR3t7e1/+f8Xv+Dftni7Dh1owVi50n+URVDS229jHf06G0lDUuVyGCMj9hsm\naQ4DKBUMfeN6e3JRcxh+ex7YJs3Nnu0uGDovYcMv6Q2UCobLKCnbygDefIEOSTU28k93d3GElH7+\njBmlrirtYbW2HIZe/E7jKhh9fVxeP0HWhAnGI49MXVDRizfxbRMMgMNS997LoxJt1y1Nh0FUvJ8W\nLuR7e82a8Pd2wazPHR0d6OjoSOeNkT/BeAjASUS0HMDzAN4O4B22J5qC4eWeezh5ZGvcdIOcxGH4\n5TDM4YU2koSkFi/mSuBX1t5ejtOak/JciBKSStth+N24QasEewU1yGHYZtwfd5zbnhBAvKQ3EOww\nbCEpWznN3vzEBDea+jk68e3d3lSPlNLPy2KUVFohKRd3YTunl0ce4Z0pgzDvdaX8521cfDHvEHn5\n5fEdhlLu94NOfGvBSCN/AZR2ALyd6fXr1yd671yFpJRSEwCuBbABwBYAtyulnoryHkePsk19y1vs\njZvXYQDphaR04+JXWWbPZgsaNE7dLyRFBKxe7Z/43rqVRdI2/yMI18UHgfRzGLYRUrpMNsHQqwSb\no8DClgbxCt+GDVNdWJpJbyDcYUQNSXn3FteJb5tgmD3grOdhJEl6uwpG0D4co6Nc7/0S3hpzoEBf\nH7+nrX6/6EU8h+qPf4zvMPSIN5f70MxjbN+enmDU1NIgSqlfKqXWKKVWK6W+GPX1990HvPa1vHqm\n12HofSyam90EI+o8jDCHQRTcS9G7ydlmkQPBYak4+QugOErKjH3b5mEA5XMYfklv2/cUZeIewA7M\nGwJLmvSemODy6u/fnBPhl8Mwr7dN2GyCoXEVjKgOY3ycy+IXhkszh+EyaQ+wL8SoefJJ/j7Dwlrm\nve5X5/S5LrqIJ87FdRgu4SiNOVIqbYcxOOi2HXRUcicYSbnnHuCNb7QPW9OJw7q60krkl8OIOg9j\nxgy+qfwEAwiei6FDXd7RWpogwXj66XgVrq6Oe+ym68lqHgbgJhgzZ3Jl9w6LtglGWEgq6LvQ2ARj\nfDx49rr5Gq8D0A23uby6Rq8C4F1iw3v9zHyBdxFEPVLKu5eEKRh6q18XwdAiph2JX+84zXkYLpP2\nNH5hqc2bw/MXQOm9bhtSa3LxxSxEcR1GHgRjxgz+rvzygEmYVoIxPs5jqd/wBrtg9PYWe++uIako\n8zAAPh7UeAb1UsL2LwgSjOefZ1cVB6+TKtcoKb+QlK1MgL/DcJ3p7YdNMLwi4MV0GN61xfSIJb/w\nhDeP4ReS0r1q7yKILg5jYIDfI2xpbKD4+YPCUbpM5c5h2M6rcUl4A6UhqSCHAQDnnsvnK5fD6Ori\n7//QIU62p0VWQ2unlWA88ADve7xsmT3ebk7Ocw1JRclhANyQhDkMv0oXtn9BUA7jwIH0BKNc8zCC\nbl5XwYgycc8P23sEhaO8rzHnYADFhtvv/N48RtSQlE56m6OkgNL5H67uAognGEqVJ4ehz2vruLkK\nhmtICuBr8drXltdhdHbyaDO/yEIcshpam7dRUonQ4SjAHj6J4zCGhvjmMHuaQZVi9uz4ghHmMI4/\nHti3z/7Y88+7xYRtVMphHDkCnH66W5mA6A4jSUgqTDDM83rzTnPncoMYNDjCHFrrN0rKDEn5OYxj\njy09r26sXRPe5meJIhiDg+ycvNe3XA5jbIxDR371x0Tf60oFrzul+fKX7dehubk4d8LPuUURjPnz\nebe8NMNRGnEYDpiCYQtJ+TkMvxzGzJksFN5Yul8OA0gWkgrrFS5cyBXeNsoqbYfhJxi2HIbLXhga\n15CULRxoawTTcBi2mytoDob5GqWmhqTCHIZLSMrMF9iS3p2dXD/N42YPOIpg6GsYJhhm8tnvnimX\nYGzdyiGcoO9Io0fW9fWFOwyAR13ZwkMuiyvGcRhZCEZWDmPaCMa+fVwJX/5y/l+HT8yRAuaNfcwx\nXHkmJ4NvFFviOyiHkWVIauZM+3LN4+PcCJu9zSh4b8igpHfeQlJZ5zD8MBOLXnHRPf0gh+ESktKN\nsy3p3dk5dXMdUzCyDknZ8hdA+QTDNRyl0R1EF8EIImwB0Tg5jDQn7WnEYYTw8MO8m5xOMM6axRXN\ntP7mTTRrFv/d1RVNMGwjX0yyDEkBPPFsv2fu+6FDfBPEjYG6hqSam7kR8w5xjCIYra38PUxMhAuG\nV6ht6xxlNUoqTDD0uQcGojuMqCEpW9J7cnJqgxvXYcQVjEo6jKiCocN4YaOkwgjb00YcRpVgW1PG\n2yP23ti61xF0o3gbU90b9Bt62Nwc3HgGVTiXXuFxx/GeFyZJwlGAe9JbL2fgLX8UwZgxg7+Dnp50\nRkk1NfmPOc/SYQBFsfImvXXy2a/xcB0l5ReSamoqrlZgkiQkFTWHYZuDAbgJRn8/f18u4STveTWV\nchhhe9tHzWEcPZrupD2NOIwQbILhzWN4G2RdiYJ27PI6jKD8BeAWkvKztGEhKcDuMJIkvAF3hwHY\nb5goggHwjXLoEDdSfp/XJhi2RjBozHnWgqHDYbakd9JRUt55GN7GddGiqYJRjlFSaeQw9KS9sA23\nzPOagjE5CTzxhFvCW2MKRljSOwi/kYKaqA5j1y5223HDyX6IwwjBRTBsDmP//uDZrd7ka1D+AgDW\nrgUuu8z/8aQhqaVLpwpGFg4jSDC8N0wcwXj6aX4vP6dmEww/QfULS1U6JOWawwhbS8pWFptgJBkl\n5Zr0ds1hBM0yjhKOAqYKRm9vMaTsig5J5clhtLXx89escRdPV7JaHmRaCMbzz3OFOuGE0uMuDqOz\nk28svy/MG0sPqxCrV/u7FSBZ0hson8Pwa2htPSzXDZQ08+dz3NYvHAX4j5KyNRJ+N0c5HIYtJBU1\nhxEWkvImvQF/wYjrMKKGpPwcxqxZPDgjaJZxlFne3vMGnTuItEJSttGXJlEEY/Zsvl5ph6OA4Nxe\nEqaFYGh34W30XRzGjh3BN4ktJBWlcfSSZFgtkF0OwztKKmuH0dkZfOPakt5+veasHEZYjF07BW9I\nqrmZy64XS/Riy2GETdzzluWaa3hFA5Osh9W65DCA8LBUUocRdG4/Fi3ijlZ/f/BnDCPNkBQR3wtZ\nCYY4DB/8NlHxNm7mxD2ABWP79uDK5+19h+UwwshilFRSh2HOpJ2c5GG6fhOT0sph7NgRLhguOQyg\ncg7DLyRVV8cN/KFDbjmMsLWkbGU599ypu7PFFYz6ev7OX3gh+DUuOQwge8GI6zC2bQsOg7qQZkgK\n4M+RhWBI0jsAv0XIbA7DG5KK6jDCchhhJA1J+eUw0gpJaXfhF6JL02EEhaTSyGG4lss20soWBvKi\nb0pbUrqlhb8Xl5neYWtJhU0i1MQNSRFxOZ9/PnkOA8ivYIS5WhfSdBgA8OY3A698ZbIy2RCHEYCf\nw3AJSfX15Sck5eIw5s/nMphlSjPpHRSOAvxzGFEFY+/eaA5jbIzL5resfJKQ1MyZ/GPOL0niMIDi\n8iC267JoUekiklHXkgoqz9AQj7qJ4jD0a73b33pxyWEA5RGMqCOdFi1iF5VkhBSQvsO4/vp0Fx3U\niMPw4YUXuAHzbhUJuCW9geCbxBaSSiIYTU1ccW2rb7r0ColYHJ5/nv9XKt2kd1gjm5bDAIIFw5v0\n1tfG5nxsvanxcQ6vuazWCky9wZIkvQEuq5/DuPhi3sP5T3/i/6NO3PODiJ/X3x9dMGbPLi594Uca\nOYyJCZ5DEWVmcxoOo62Nh2DnzWFkhTgMHx59FHjZy+xxSZeJe0Bw5Ys6DyMMv8lvgFtICihNfOu9\nkV0nQdmI4jDSymEA0UJSQQ2grTely+Q6XDGOYPglvYFgh1FfD3zqU8A//3NpWb3PCRol5YcOS0UJ\nSQH8+YP2wgDSyWHcey93bl7yEveypZH0rqvjuQ5JBSNth5EV4jB8eOQRezgKCHcYra0ciggLSUWZ\nh+GCLSw1MeEuRmYeI6m7AKY6jDDBKIfD8I6SChMM2z7ZLuEo8z3iCoZfSMrPYQDAe9/Lk88efjh8\nLSnXkJQ+b29vPIcRNnpIi5hSXAfiCMY3vwlcfbV7uYB0HAbAYSlxGMmoesHwy18ApYJhbs+qIWKX\nERaSSjOHAdgFQ4c1XEZwmCOlkuYvgNIbMmgOBjD1hlEq/DVeXAXDG5LyawBtSe+oIuYVjK6u8AY0\nKOk9d6491KRpaAA+8Qng85+Pvrx5EHPn8lyD0dFo9bS5Ofzz6jL19/Pf9fX25/kJxjPPsEC+7W3u\n5TLPq4krGAsXZu8w0uhQpoE4DA//+Z/8O2ibRlMwzO1ZTcIEI+2kN2AXDNdwFFAqGGk7DJeQlCkY\nw8PccEQZqhg3JOUXYrE5jCSCMTzMjdvJJwe/Ro928hMM/b5+XHklsGkTi5OfYCgV3WHs2+ef7/HD\nxWHoMoWFhPwE41vfYmcVpXNhnlcTJ+kN8H2TdAmOpib+TvwmJorDyCkf/SjvsHfggP84ZrM3YAsb\nALyJ/NKl/udJex4GYBcMlxFSmqVLizmMpENqgehJb7OHFWUvDI1ubKI4jKAQi81hJAlJPfkkz9gP\ne31zMw+6aGiYulKwrmtB16apCfjYx9j9+q0lNTJSnD3tQksLC0aUcBTgJhizZvFggiNHogvG0BBw\n663AVVdFKxeQnsP4138F3vnO6K8z0TlIP5eRF8HIammQqt1x7wtfAC66iDc78VvW2+wNeCftae68\nM7gnlvY8DMDfYbgKhtdhJA1JxXEYehfCqD15gBvD666LNkoqLIehN8PSJHEYeiCFy2sOHrTXKxeH\nAQAf/CDw+ONTR3PpHEaUhLc+r3YYUQjav1xDxN/dwYPBPfyWFmDnztJjd9zB2w+sWhWtXEB6gpHW\nAn86cmG77/IiGFktDVK1gvH+9wO//31w75qo+OV6J+1pwkIpeQ9JHTgQHjoJI0rSWy/truPucQQD\nYMEPK5N53cNyGGmGpILyYt7zhglGWBmam4vhVRPdSEYJR+nzPvtsPIcR9L2b5TpwILrD+MY3gHXr\nopXJPGfSUVJpEpT4zpNgiMMwIAJuu43tfBCmYASNMfcj7XkYQPKQlBYMpdJJensdRlgoRruMJIIR\npUwAXx+/RsKW4EsSktq8Gbj8crfXHDgAnHLK1MdcHYYfOvzT2xttyPTcuTwpMmqdaG52O08cwdi5\nk13PhRdGK5N5Ti0YExP83lEFMU38QlJjY1w+v8EA5SSrpHfVCgbAohG2y5z+cqOOS9ekPQ8D4Mq+\nb1/psSgOo6WFe/m9vekkvb2jpMJ6mvqaLllSXsE48UT7c9NyGAMD3Eg/+aTbXgt6B8IkDsMPHf45\nejReSCrqlp/XXuuWJ9GCERaSMgVj82YOR8XdEdKsn7pjFfe90sDPYehVm9NeqjwOWrRGR9MVsKpN\neruS1GFkMQ/DNnEvisMAii4jbYfh0jM3b5hyCkbUiXtxHMa2bcCyZW51RTfkSXIYQTQ0cPglqsM4\neDB6D3z58uDBH5r6+ugOwzXE54frkiTlws9h5CUcpcnCZdSEYHR3x3cY5ZqHEbV8xx3HseqjR4OH\np7oQJekNcOPyta/xdc1KMKImvW0T9+LkMFwT3vo1gL1BdxklFYYWjCgOQ583q5BNnJDUdBMMP4eR\nN8HIIo9RE4KR1GHkLekNsGA89hiLRVJ7HlUwbr6Zz/uSlwD33JPNTVKpiXtRGjcXhxF1zoFJQwMP\nYY0akjJ/p01UwVCKr6mrCPud02UNq3IhDmMaowUjicPI2zwMgMMHjzySPBwFRA9JzZnDo15uuw34\nxS/iCbFLmbxLgwRN3Esr6R1FMIIcxty53NAl2XshbkgKyN5huOYw9Gg+l3BX0DnFYUQnC4dR1Ulv\nF1pbOQk4Nhav0uZxHgbADuPHP04+pBaI7jA0553HCWLvznhpEHXiXhpJ7/5+dm2uveGZMzmmbxPM\nBQtYTJNQXx89JJW1YNTXc10NarTnzOHGc3LSfzfMKJg7Qsad5Z0m4jCmMUkdRp5DUrt2VcZhmMyZ\nk96EKL8yAfFXq3Vl9mwWv9bWaOsNNTfbBYMIOP989/exkcRhZBmSAoIFo66Or/3AQLScUNA5xWFE\nR3IYMdBfbhrzMCYnueImTfKmEZI67jj+nXRILRB9WG05qK9nVzg5yT+2JcQ1thsjTkgqirswX5dk\nafkg8pr0BsIbbR2WSprw1ufMk2AEOYyk4eo0yWJ5kJoQjLSS3lH3WPCjsZEbQdsGQa7o8FoaDsNs\nnPMiGETFUER/P1d+v+S+3pN6fLx4LI7DmJiI3rj5OYw0yLNghK07lZVg5CHp7d02QZNHhyEhqYgk\nnbinGy29amgaFcK2iVLUkJQWijQcBlHxpozaM88SnfgO29uBaOrNEUcwgOiN2+zZ2QmGzmFEcTAN\nDfyTVUiqvp7fO2ySX0sLh0x7e+27YUYhbw5DQlLTmKQOo66Ol2kYGUm3QnjDUlFDUg0NHGtPw2EA\nxdBbXhwGUCyTy2ZA3jxGnJAUED0kNWdOvkJSAHDppdH2zI5aJpcGu6UFuP9+njGf1JGbS73nIek9\ndy63JxMTpcfzJhiS9I5B0ol7QDEslWaM0hSMsbHoG94AwBVXpDNKCig2znlzGK6CsWBB6Yq1UR3G\nvHns1nRuyJWbbgJe85por3GloSHa5kmaO+7IruGKIhi/+13ycBTAoci6Og455sFhzJjBn6+3t/R4\n3gSjqhwGEa0lon1EtLnwc6Hx2HVE1ElETxHR643jFxLRNiLaQUSfTKMcLS3ceBw9Gj90oBuurByG\nXkk3ak/sS19KvoOYJs8Ow0XsV67kEIgmqmCsXMlJ76jfwWmnZXe99PvmKZHa0ODWw29pAR56KB3B\n0OcdGcmHYAD2xHfeBCMLh5H1PIwblVI3mgeI6BQAfw/gFADLAPyaiFYDIAA3ATgfwH4ADxHRXUqp\nbUkKQMSNc9RYsIl2GGluv2gKRtRwVBboGzJPgqGXB3FxGCtWlApGHKeUVRgnLnrRuDwJRn29u8MY\nG0s+pFbjuttfubDlMQYHeQfPvNDczNcrTbIWDFt/7VIAtyulxgHsJqJOAGcXntuplHoWAIjo9sJz\nEwkGwF/u6Gj8Wbe64UrbYdx/PzdSXV2VXa4ZqP6Q1MqVpZv2ZLXGVTnRwp1VjiQOUUJSTU3RV80N\nOu/AAN+Dle5cAbXrMLLOYVxDRI8R0XeISN/ySwHsNZ7zXOGY9/i+wrHEtLUlq2R6tE6aOYzLLgP2\n7AE+8QnebvYlL0nnfeOS15CUyygpwC4YeRG+uOQxJDV7tlsYtKWFw3WuW8uG0dDAOap585Itt5IW\nfg4jT4KRu6VBiOhXAEwjTwAUgM8A+CaAzymlFBF9HsANAN4Pu+tQsIuX8jv3OmP7rvb2drS3t/uW\ns63Nvim9K2bSO60Kcckl/JMXTIeRJ8HQOYyoghF1tdo8kkeHcfXV4ZuWAeycX/Wq9M7rsuhhOakG\nh/G61wFjYx1Yt64jtfdMJBhKqQscn3ozgJ8X/t4H4HjjsWXgnAUBOMFy3Mq6CPs9+k20cUWHpNLM\nYeQN02HkpWduhqTCYsMnngjs3p1sr/G8kccchuuQ1quuchMWVxoaeLOwvAhGNTiMJUuA972vHUD7\nX46tX78+0XtmOUrKnFJ2GYAnC3/fDeDtRFRPRCsAnATgQQAPATiJiJYTUT2Atxeem5jW1vRCUnmq\nEGmS15CUaw5jzhwOgxw4wP9Pl5CUnvFebbjshhkFcRj5IMuk95eJ6HQAkwB2A7gKAJRSW4noRwC2\nAhgDcLVSSgGYIKJrAWwAC9ktSqmn0ihIW1uy2bhZzMPIGw0N+Ut6RxklBRTDUkuWTJ+Q1Jw5+djy\ns9LkTTDa2oCtW0uPiWAkQCn1roDHrgdwveX4LwGkNK6iSBpJbz1KKk/x5DTRS6DkzWG4Jr2B4tDa\nc87Jl/DFpaFh+nZQoqIFIy/DVqshJJUF034/DAA466xkPRNzHkZeKmza5Dkk5TpLXzuM0VEenZNm\nSKQS1NdP3w5KVLRgpDVMNykSkprGnH9+sr0JspiHkTfyPEoqSkjq/vunR8IbEIdhIknvfJCDEc35\nJ4t5GHmjsZHXLSJKb+x8UqIKxooV7DBEMKYfecth1KrDEMFwIIulQfJGYyP3mPIU99fX3XXpFL2e\n1HTIXwDFpLfA12JoKD+C4XUYSrFgTIeOShAiGA7USkiqpyc/4SiAy3T0KOciXMq1bBlw+DDfyNPh\nxl2zhvdNF4rff6WXNtdoh6EKU4tHRjjnVO15szByEnzIN7UwD6OhIZ+CcfCg+zpbM2awaGzbNj0c\nxmmn8Y/gvjVsudA7b+rh29O5bTARh+FALczD0A4jTw1tVMEAOCy1dev0cBhCkbwJBlCaxxDBEP6C\nTr5O9xxGtTsMgAVjyxYRjOlGHgXDzGOIYAh/IYvFB/NGXpPeeoVSV7TDyNPnEJLT0FDc6S4vmA7j\nlluAU0+tbHnKgeQwHKilpHel9+UwaWzkSXhRZumvWAE8/TTw4hdnVy6h/Oh9OPK0TIpe1PS//gu4\n805g06ZKlyh7RDAcqJV5GHkMSQHRHcbEhISkphuuGzeVk7Y2YMMG4PbbgY0bgWOOqXSJskcEw4Gm\nJs5f5Gnp77RpaGBBzNPniysY5muF6UEeBaO1FbjpJuBnP6sdRyuC4UBjI2+jqofSTUd0A1vtDqOt\njZ8vDmN6kUfBeM1rgFNOAd70pkqXpHyIYDjQ1AQcOTJ98xdAvgUj6krDK1eKYEw3liwBVq+udClK\nedvbKl2C8iOC4UBTEye3Tjgh/LnVim6c8xTK0Y1+1ET8ypX5+hxCci66iH+EyiKC4UBjIy8BIA6j\nvMQJSQHAy15WGwlIQSg3IhgO6J5uLQhGnnrmcQXjM59JvyyCIMjEPSdqQTC0s8iTw9BlSbJboiAI\n6SGC4UB9PY+Omq5zMIB8hqTq6vja52kyoSDUMiIYDhBxgzqdHUYeQ1IAuzsRDEHIByIYjkx3wdB7\nYOfJYQDAD3/IQyoFQag8kvR2pKlpegsGwKKYN4chQykFIT+Iw3CkqWl65zAAFou8OQxBEPKDCIYj\n0z0kBYhgCIIQjAiGI7UQkmpoyF9IShCE/CCC4UgtCIY4DEEQghDBcKSxsTZyGOIwBEHwQwTDEXEY\ngiDUOjKs1pH3vQ847bRKlyJbPvYxXrhPEATBBimlKl2GyBCRqsZyC4IgVBIiglIq9jZwEpISBEEQ\nnBDBEARBEJwQwRAEQRCcEMEQBEEQnBDBEARBEJxIJBhE9HdE9CQRTRDRGZ7HriOiTiJ6iohebxy/\nkIi2EdEOIvqkcfxEInqAiLYT0Q+JSIb8CoIg5IikDuPPAP4XgN+ZB4noFAB/D+AUABcB+CYxdQBu\nAvC3AE4F8A4iOrnwsi8BuEEptQZAN4ArEpatJujo6Kh0EXKDXIsici2KyLVIj0SCoZTarpTqBOAd\n1zgrvukAAATUSURBVHspgNuVUuNKqd0AOgGcXfjpVEo9q5QaA3B74bkAcB6AnxT+vg0sREIIcjMU\nkWtRRK5FEbkW6ZFVDmMpgL3G/88VjnmP7wOwlIgWADiqlJo0jh+XUdkEQRCEGITmCYjoVwAWmYcA\nKACfUUr93O9llmMKdoFShed7XyNTuQVBEHJEKkuDENFvAXxMKbW58P+nACil1JcK//8SwFqwKKxT\nSl3ofR4RHQawSCk1SUSvBLBWKWXdoJOIREwEQRBikGRpkDRHIpmFuBvA94noK+Aw1EkAHgQ7jJOI\naDmA5wG8vfADABsBvBXAHQDeDeAuvxMl+cCCIAhCPJIOq30zEe0F8EoA9xDRfwOAUmorgB8B2Arg\nXgBXK2YCwLUANgDYAk6Mbyu83acAfJSIdgCYD+CWJGUTBEEQ0qUqV6sVBEEQyk9VzfT2m/RXCxDR\nMiLaSERbiejPRPS/C8fbiGhDYcLjfUQ0r9JlLRdEVEdEm4no7sL/NTn5k4jmEdGPC5NktxDRK2q1\nXhDRRwqTiZ8gou8TUX2t1AsiuoWIDhLRE8Yx33pARF8vTK5+jIhOdzlH1QhGyKS/WmAcwEeVUn8F\n4FUAril8/k8B+HVhwuNGANdVsIzl5sPgsKemVid/fg3AvUqpUwCcBmAbarBeENFxAD4E4Ayl1EvB\nOdp3oHbqxXfB7aOJtR4Q0UUAVimlVgO4CsC3XU5QNYKB4El/0x6l1AGl1GOFv/sBPAVgGfga3FZ4\n2m0A3lyZEpYXIloG4GIA3zEO19zkTyJqAfA3SqnvAkBhsmwParReAJgBoLngIpoA7AdwLmqgXiil\n/gDgqOewtx5cahz/XuF1mwDMI6JFCKGaBMM66a9CZakoRHQigNMBPAAeinwQYFEBcGzlSlZWvgLg\n4yjM16nhyZ8rAbxARN8thOf+g4hmowbrhVJqP4AbAOwBTxbuAbAZQHcN1gvNQk89WFg47je5OpBq\nEgy/yYA1BRHNAXAngA8XnEYtXoM3ADhYcFy6XtTq5M+ZAM4A8A2l1BkABsBhiFr47CUQUSu457wc\nLArN4LXsvNTctbEQqz2tJsHYB+AE4/9lYLtZMxRs9p0A/lMppeepHNRWkogWAzhUqfKVkb8G8CYi\n2gngh+BQ1FfBtlrX6VqpH/sA7FVKPVz4/ydgAanFevE6ADuVUl2FIfw/BXAOgNYarBcav3qwD8Dx\nxvOcrks1CcZDKEz6I6J68IS/uytcpnLz/wBsVUp9zTh2N4D3FP4OnPA4XVBKfVopdYJSaiW4HmxU\nSr0TwG/Bkz+B2rkWBwHsJaIXFQ6dD57jVHP1AhyKeiURNRIRoXgtaqleeJ22WQ/eg+JnvxvAuwCg\nsLJGtw5dBb55Nc3DIKILwSNC6gDcopT6YoWLVDaI6K8B3A9eUl4Vfj4NnkH/I3BvYQ+AtyqluitV\nznJDRK8FL0vzJiJaAR4M0QbgUQDvLAyQmNYQ0Wng5P8sADsBvBec/K25ekFEa8GdiDFwHXg/uPc8\n7esFEf0AQDuABQAOgpdj+hmAH8NSD4joJgAXgsOY79VLOwWeo5oEQxAEQagc1RSSEgRBECqICIYg\nCILghAiGIAiC4IQIhiAIguCECIYgCILghAiGIAiC4IQIhiAIguCECIYgCILgxP8Hj0Usyfk8fncA\nAAAASUVORK5CYII=\n", 184 | "text/plain": [ 185 | "" 186 | ] 187 | }, 188 | "metadata": {}, 189 | "output_type": "display_data" 190 | } 191 | ], 192 | "source": [ 193 | "plot_images(plt, results, (rows, rows), (2, rows))\n", 194 | "plt.figure()\n", 195 | "plt.plot(blslda.loglikelihoods)\n", 196 | "plt.figure()\n", 197 | "plt.plot(np.diff(blslda.loglikelihoods)[-100:])" 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": null, 203 | "metadata": { 204 | "collapsed": true 205 | }, 206 | "outputs": [], 207 | "source": [] 208 | } 209 | ], 210 | "metadata": { 211 | "kernelspec": { 212 | "display_name": "Python 3", 213 | "language": "python", 214 | "name": "python3" 215 | }, 216 | "language_info": { 217 | "codemirror_mode": { 218 | "name": "ipython", 219 | "version": 3 220 | }, 221 | "file_extension": ".py", 222 | "mimetype": "text/x-python", 223 | "name": "python", 224 | "nbconvert_exporter": "python", 225 | "pygments_lexer": "ipython3", 226 | "version": "3.5.1" 227 | }, 228 | "widgets": { 229 | "state": {}, 230 | "version": "1.1.2" 231 | } 232 | }, 233 | "nbformat": 4, 234 | "nbformat_minor": 0 235 | } 236 | -------------------------------------------------------------------------------- /examples/KL_thresh_lda.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# This notebook is used to decide on a tolerable level of corruptableness.\n", 12 | "%matplotlib inline\n", 13 | "\n", 14 | "import numpy as np\n", 15 | "import matplotlib.pyplot as plt\n", 16 | "import pandas as pd\n", 17 | "\n", 18 | "from scipy.stats import entropy as KL_divergence\n", 19 | "\n", 20 | "from slda.topic_models import LDA\n", 21 | "from modules.helpers import plot_images" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 2, 27 | "metadata": { 28 | "collapsed": false 29 | }, 30 | "outputs": [ 31 | { 32 | "name": "stdout", 33 | "output_type": "stream", 34 | "text": [ 35 | "2016-06-17 13:10:37.273754 start iterations\n", 36 | "2016-06-17 13:10:37.526897 0:00:00.253143 elapsed, iter 10, LL -1437405.5555, 4.24% change from last\n", 37 | "2016-06-17 13:10:37.735663 0:00:00.461909 elapsed, iter 20, LL -1292458.5259, 10.08% change from last\n", 38 | "2016-06-17 13:10:37.933441 0:00:00.659687 elapsed, iter 30, LL -1222462.6581, 5.42% change from last\n", 39 | "2016-06-17 13:10:38.126610 0:00:00.852856 elapsed, iter 40, LL -1193247.2897, 2.39% change from last\n", 40 | "2016-06-17 13:10:38.330853 0:00:01.057099 elapsed, iter 50, LL -1177533.2230, 1.32% change from last\n", 41 | "2016-06-17 13:10:38.522471 0:00:01.248717 elapsed, iter 60, LL -1171413.2940, 0.52% change from last\n", 42 | "2016-06-17 13:10:38.724416 0:00:01.450662 elapsed, iter 70, LL -1170943.1427, 0.04% change from last\n", 43 | "2016-06-17 13:10:38.925852 0:00:01.652098 elapsed, iter 80, LL -1164091.9810, 0.59% change from last\n", 44 | "2016-06-17 13:10:39.135559 0:00:01.861805 elapsed, iter 90, LL -1152807.8121, 0.97% change from last\n", 45 | "2016-06-17 13:10:39.331949 0:00:02.058195 elapsed, iter 100, LL -1133648.2206, 1.66% change from last\n", 46 | "2016-06-17 13:10:39.528754 0:00:02.255000 elapsed, iter 110, LL -1114795.9604, 1.66% change from last\n", 47 | "2016-06-17 13:10:39.716154 0:00:02.442400 elapsed, iter 120, LL -1101549.1451, 1.19% change from last\n", 48 | "2016-06-17 13:10:39.910225 0:00:02.636471 elapsed, iter 130, LL -1093958.8392, 0.69% change from last\n", 49 | "2016-06-17 13:10:40.100278 0:00:02.826524 elapsed, iter 140, LL -1088794.0874, 0.47% change from last\n", 50 | "2016-06-17 13:10:40.292516 0:00:03.018762 elapsed, iter 150, LL -1083356.9493, 0.50% change from last\n", 51 | "2016-06-17 13:10:40.479587 0:00:03.205833 elapsed, iter 160, LL -1079378.8629, 0.37% change from last\n", 52 | "2016-06-17 13:10:40.674492 0:00:03.400738 elapsed, iter 170, LL -1073994.9560, 0.50% change from last\n", 53 | "2016-06-17 13:10:40.865301 0:00:03.591547 elapsed, iter 180, LL -1069892.1355, 0.38% change from last\n", 54 | "2016-06-17 13:10:41.049652 0:00:03.775898 elapsed, iter 190, LL -1068351.1703, 0.14% change from last\n", 55 | "2016-06-17 13:10:41.232535 0:00:03.958781 elapsed, iter 200, LL -1066679.1082, 0.16% change from last\n", 56 | "2016-06-17 13:10:41.423505 0:00:04.149751 elapsed, iter 210, LL -1063951.6275, 0.26% change from last\n", 57 | "2016-06-17 13:10:41.609765 0:00:04.336011 elapsed, iter 220, LL -1062242.5754, 0.16% change from last\n", 58 | "2016-06-17 13:10:41.801387 0:00:04.527633 elapsed, iter 230, LL -1062294.8319, -0.00% change from last\n", 59 | "2016-06-17 13:10:41.985343 0:00:04.711589 elapsed, iter 240, LL -1062505.4828, -0.02% change from last\n", 60 | "2016-06-17 13:10:42.168113 0:00:04.894359 elapsed, iter 250, LL -1062054.3414, 0.04% change from last\n", 61 | "2016-06-17 13:10:42.370340 0:00:05.096586 elapsed, iter 260, LL -1061741.7024, 0.03% change from last\n", 62 | "2016-06-17 13:10:42.559607 0:00:05.285853 elapsed, iter 270, LL -1061197.7113, 0.05% change from last\n", 63 | "2016-06-17 13:10:42.752415 0:00:05.478661 elapsed, iter 280, LL -1059302.2141, 0.18% change from last\n", 64 | "2016-06-17 13:10:42.948136 0:00:05.674382 elapsed, iter 290, LL -1060328.5761, -0.10% change from last\n", 65 | "2016-06-17 13:10:43.132353 0:00:05.858599 elapsed, iter 300, LL -1060221.5216, 0.01% change from last\n", 66 | "2016-06-17 13:10:43.330924 0:00:06.057170 elapsed, iter 310, LL -1060089.7175, 0.01% change from last\n", 67 | "2016-06-17 13:10:43.522714 0:00:06.248960 elapsed, iter 320, LL -1060489.0166, -0.04% change from last\n", 68 | "2016-06-17 13:10:43.711419 0:00:06.437665 elapsed, iter 330, LL -1060146.9494, 0.03% change from last\n", 69 | "2016-06-17 13:10:43.895481 0:00:06.621727 elapsed, iter 340, LL -1061146.3669, -0.09% change from last\n", 70 | "2016-06-17 13:10:44.084830 0:00:06.811076 elapsed, iter 350, LL -1059927.6139, 0.11% change from last\n", 71 | "2016-06-17 13:10:44.274399 0:00:07.000645 elapsed, iter 360, LL -1060137.9333, -0.02% change from last\n", 72 | "2016-06-17 13:10:44.458073 0:00:07.184319 elapsed, iter 370, LL -1061000.2644, -0.08% change from last\n", 73 | "2016-06-17 13:10:44.642123 0:00:07.368369 elapsed, iter 380, LL -1059301.8571, 0.16% change from last\n", 74 | "2016-06-17 13:10:44.827708 0:00:07.553954 elapsed, iter 390, LL -1060537.5811, -0.12% change from last\n", 75 | "2016-06-17 13:10:45.020233 0:00:07.746479 elapsed, iter 400, LL -1060460.5574, 0.01% change from last\n", 76 | "2016-06-17 13:10:45.210116 0:00:07.936362 elapsed, iter 410, LL -1059676.7759, 0.07% change from last\n", 77 | "2016-06-17 13:10:45.393776 0:00:08.120022 elapsed, iter 420, LL -1059961.2559, -0.03% change from last\n", 78 | "2016-06-17 13:10:45.582722 0:00:08.308968 elapsed, iter 430, LL -1060266.5682, -0.03% change from last\n", 79 | "2016-06-17 13:10:45.769316 0:00:08.495562 elapsed, iter 440, LL -1060809.5966, -0.05% change from last\n", 80 | "2016-06-17 13:10:45.966623 0:00:08.692869 elapsed, iter 450, LL -1059931.3829, 0.08% change from last\n", 81 | "2016-06-17 13:10:46.152906 0:00:08.879152 elapsed, iter 460, LL -1058578.6618, 0.13% change from last\n", 82 | "2016-06-17 13:10:46.346322 0:00:09.072568 elapsed, iter 470, LL -1058678.2547, -0.01% change from last\n", 83 | "2016-06-17 13:10:46.541890 0:00:09.268136 elapsed, iter 480, LL -1059360.5098, -0.06% change from last\n", 84 | "2016-06-17 13:10:46.727676 0:00:09.453922 elapsed, iter 490, LL -1057946.9908, 0.13% change from last\n", 85 | "2016-06-17 13:10:46.915303 0:00:09.641549 elapsed, iter 500, LL -1057706.1739, 0.02% change from last\n", 86 | "2016-06-17 13:10:47.106396 0:00:09.832642 elapsed, iter 510, LL -1058117.3462, -0.04% change from last\n", 87 | "2016-06-17 13:10:47.294672 0:00:10.020918 elapsed, iter 520, LL -1058547.8565, -0.04% change from last\n", 88 | "2016-06-17 13:10:47.478729 0:00:10.204975 elapsed, iter 530, LL -1058680.3533, -0.01% change from last\n", 89 | "2016-06-17 13:10:47.660106 0:00:10.386352 elapsed, iter 540, LL -1058470.9536, 0.02% change from last\n", 90 | "2016-06-17 13:10:47.846131 0:00:10.572377 elapsed, iter 550, LL -1057969.2569, 0.05% change from last\n", 91 | "2016-06-17 13:10:48.032943 0:00:10.759189 elapsed, iter 560, LL -1059088.4917, -0.11% change from last\n", 92 | "2016-06-17 13:10:48.219986 0:00:10.946232 elapsed, iter 570, LL -1057865.2123, 0.12% change from last\n", 93 | "2016-06-17 13:10:48.403181 0:00:11.129427 elapsed, iter 580, LL -1058113.3617, -0.02% change from last\n", 94 | "2016-06-17 13:10:48.585760 0:00:11.312006 elapsed, iter 590, LL -1057428.5906, 0.06% change from last\n", 95 | "2016-06-17 13:10:48.768048 0:00:11.494294 elapsed, iter 600, LL -1058393.4613, -0.09% change from last\n", 96 | "2016-06-17 13:10:48.954911 0:00:11.681157 elapsed, iter 610, LL -1057858.4402, 0.05% change from last\n", 97 | "2016-06-17 13:10:49.138742 0:00:11.864988 elapsed, iter 620, LL -1057761.7893, 0.01% change from last\n", 98 | "2016-06-17 13:10:49.327808 0:00:12.054054 elapsed, iter 630, LL -1057108.6607, 0.06% change from last\n", 99 | "2016-06-17 13:10:49.518715 0:00:12.244961 elapsed, iter 640, LL -1057604.9099, -0.05% change from last\n", 100 | "2016-06-17 13:10:49.705813 0:00:12.432059 elapsed, iter 650, LL -1057226.3066, 0.04% change from last\n", 101 | "2016-06-17 13:10:49.893396 0:00:12.619642 elapsed, iter 660, LL -1057383.6062, -0.01% change from last\n", 102 | "2016-06-17 13:10:50.085391 0:00:12.811637 elapsed, iter 670, LL -1058325.1338, -0.09% change from last\n", 103 | "2016-06-17 13:10:50.270424 0:00:12.996670 elapsed, iter 680, LL -1057206.0105, 0.11% change from last\n", 104 | "2016-06-17 13:10:50.463753 0:00:13.189999 elapsed, iter 690, LL -1058307.3640, -0.10% change from last\n", 105 | "2016-06-17 13:10:50.653514 0:00:13.379760 elapsed, iter 700, LL -1058013.3241, 0.03% change from last\n", 106 | "2016-06-17 13:10:50.835588 0:00:13.561834 elapsed, iter 710, LL -1058354.0128, -0.03% change from last\n", 107 | "2016-06-17 13:10:51.029948 0:00:13.756194 elapsed, iter 720, LL -1057606.1315, 0.07% change from last\n", 108 | "2016-06-17 13:10:51.215566 0:00:13.941812 elapsed, iter 730, LL -1057033.2762, 0.05% change from last\n", 109 | "2016-06-17 13:10:51.405754 0:00:14.132000 elapsed, iter 740, LL -1058343.9157, -0.12% change from last\n", 110 | "2016-06-17 13:10:51.599527 0:00:14.325773 elapsed, iter 750, LL -1057374.9404, 0.09% change from last\n", 111 | "2016-06-17 13:10:51.783697 0:00:14.509943 elapsed, iter 760, LL -1057780.2156, -0.04% change from last\n", 112 | "2016-06-17 13:10:51.968802 0:00:14.695048 elapsed, iter 770, LL -1057663.2874, 0.01% change from last\n", 113 | "2016-06-17 13:10:52.159400 0:00:14.885646 elapsed, iter 780, LL -1057816.3689, -0.01% change from last\n", 114 | "2016-06-17 13:10:52.347056 0:00:15.073302 elapsed, iter 790, LL -1057470.6286, 0.03% change from last\n", 115 | "2016-06-17 13:10:52.551647 0:00:15.277893 elapsed, iter 800, LL -1057798.7536, -0.03% change from last\n", 116 | "2016-06-17 13:10:52.741805 0:00:15.468051 elapsed, iter 810, LL -1057335.5568, 0.04% change from last\n", 117 | "2016-06-17 13:10:52.942231 0:00:15.668477 elapsed, iter 820, LL -1057682.2412, -0.03% change from last\n", 118 | "2016-06-17 13:10:53.125723 0:00:15.851969 elapsed, iter 830, LL -1057825.5602, -0.01% change from last\n", 119 | "2016-06-17 13:10:53.309812 0:00:16.036058 elapsed, iter 840, LL -1057707.4948, 0.01% change from last\n", 120 | "2016-06-17 13:10:53.501474 0:00:16.227720 elapsed, iter 850, LL -1057344.9374, 0.03% change from last\n", 121 | "2016-06-17 13:10:53.686120 0:00:16.412366 elapsed, iter 860, LL -1057506.0158, -0.02% change from last\n", 122 | "2016-06-17 13:10:53.877212 0:00:16.603458 elapsed, iter 870, LL -1057615.3388, -0.01% change from last\n", 123 | "2016-06-17 13:10:54.062415 0:00:16.788661 elapsed, iter 880, LL -1057345.2976, 0.03% change from last\n", 124 | "2016-06-17 13:10:54.251166 0:00:16.977412 elapsed, iter 890, LL -1057865.7333, -0.05% change from last\n", 125 | "2016-06-17 13:10:54.441907 0:00:17.168153 elapsed, iter 900, LL -1057560.5180, 0.03% change from last\n", 126 | "2016-06-17 13:10:54.632236 0:00:17.358482 elapsed, iter 910, LL -1057530.8174, 0.00% change from last\n", 127 | "2016-06-17 13:10:54.829072 0:00:17.555318 elapsed, iter 920, LL -1057892.4606, -0.03% change from last\n", 128 | "2016-06-17 13:10:55.014171 0:00:17.740417 elapsed, iter 930, LL -1057639.1125, 0.02% change from last\n", 129 | "2016-06-17 13:10:55.214619 0:00:17.940865 elapsed, iter 940, LL -1057511.4740, 0.01% change from last\n", 130 | "2016-06-17 13:10:55.402414 0:00:18.128660 elapsed, iter 950, LL -1057484.9104, 0.00% change from last\n", 131 | "2016-06-17 13:10:55.601927 0:00:18.328173 elapsed, iter 960, LL -1057782.1699, -0.03% change from last\n", 132 | "2016-06-17 13:10:55.811131 0:00:18.537377 elapsed, iter 970, LL -1057643.0236, 0.01% change from last\n", 133 | "2016-06-17 13:10:56.006725 0:00:18.732971 elapsed, iter 980, LL -1057466.6467, 0.02% change from last\n", 134 | "2016-06-17 13:10:56.205204 0:00:18.931450 elapsed, iter 990, LL -1058018.8458, -0.05% change from last\n", 135 | "2016-06-17 13:10:56.393372 0:00:19.119618 elapsed, iter 1000, LL -1058122.0383, -0.01% change from last\n", 136 | "2016-06-17 13:10:56.583853 0:00:19.310099 elapsed, iter 1010, LL -1057756.1571, 0.03% change from last\n", 137 | "2016-06-17 13:10:56.771178 0:00:19.497424 elapsed, iter 1020, LL -1057741.7515, 0.00% change from last\n", 138 | "2016-06-17 13:10:56.955418 0:00:19.681664 elapsed, iter 1030, LL -1057600.3687, 0.01% change from last\n", 139 | "2016-06-17 13:10:57.138217 0:00:19.864463 elapsed, iter 1040, LL -1058270.1875, -0.06% change from last\n", 140 | "2016-06-17 13:10:57.352184 0:00:20.078430 elapsed, iter 1050, LL -1057786.3114, 0.05% change from last\n", 141 | "2016-06-17 13:10:57.576510 0:00:20.302756 elapsed, iter 1060, LL -1058285.5017, -0.05% change from last\n", 142 | "2016-06-17 13:10:57.796982 0:00:20.523228 elapsed, iter 1070, LL -1057385.9683, 0.08% change from last\n", 143 | "2016-06-17 13:10:58.013104 0:00:20.739350 elapsed, iter 1080, LL -1058210.3118, -0.08% change from last\n", 144 | "2016-06-17 13:10:58.211885 0:00:20.938131 elapsed, iter 1090, LL -1057908.6873, 0.03% change from last\n", 145 | "2016-06-17 13:10:58.416276 0:00:21.142522 elapsed, iter 1100, LL -1058468.8849, -0.05% change from last\n", 146 | "2016-06-17 13:10:58.603322 0:00:21.329568 elapsed, iter 1110, LL -1057619.2204, 0.08% change from last\n", 147 | "2016-06-17 13:10:58.796025 0:00:21.522271 elapsed, iter 1120, LL -1057785.1044, -0.02% change from last\n", 148 | "2016-06-17 13:10:58.981232 0:00:21.707478 elapsed, iter 1130, LL -1057289.2216, 0.05% change from last\n", 149 | "2016-06-17 13:10:59.163764 0:00:21.890010 elapsed, iter 1140, LL -1057307.1718, -0.00% change from last\n", 150 | "2016-06-17 13:10:59.348753 0:00:22.074999 elapsed, iter 1150, LL -1057839.2465, -0.05% change from last\n", 151 | "2016-06-17 13:10:59.530934 0:00:22.257180 elapsed, iter 1160, LL -1057301.1392, 0.05% change from last\n", 152 | "2016-06-17 13:10:59.718168 0:00:22.444414 elapsed, iter 1170, LL -1057275.5159, 0.00% change from last\n", 153 | "2016-06-17 13:10:59.908703 0:00:22.634949 elapsed, iter 1180, LL -1056935.5136, 0.03% change from last\n", 154 | "2016-06-17 13:11:00.111827 0:00:22.838073 elapsed, iter 1190, LL -1057510.6013, -0.05% change from last\n", 155 | "2016-06-17 13:11:00.308798 0:00:23.035044 elapsed, iter 1200, LL -1057643.9358, -0.01% change from last\n", 156 | "2016-06-17 13:11:00.498339 0:00:23.224585 elapsed, iter 1210, LL -1057890.1881, -0.02% change from last\n", 157 | "2016-06-17 13:11:00.686906 0:00:23.413152 elapsed, iter 1220, LL -1057591.6045, 0.03% change from last\n", 158 | "2016-06-17 13:11:00.869978 0:00:23.596224 elapsed, iter 1230, LL -1057327.5614, 0.02% change from last\n", 159 | "2016-06-17 13:11:01.059223 0:00:23.785469 elapsed, iter 1240, LL -1057853.5008, -0.05% change from last\n", 160 | "2016-06-17 13:11:01.244951 0:00:23.971197 elapsed, iter 1250, LL -1057975.9690, -0.01% change from last\n", 161 | "2016-06-17 13:11:01.429801 0:00:24.156047 elapsed, iter 1260, LL -1057603.7133, 0.04% change from last\n", 162 | "2016-06-17 13:11:01.612419 0:00:24.338665 elapsed, iter 1270, LL -1057793.9136, -0.02% change from last\n", 163 | "2016-06-17 13:11:01.800438 0:00:24.526684 elapsed, iter 1280, LL -1057272.9601, 0.05% change from last\n", 164 | "2016-06-17 13:11:01.985682 0:00:24.711928 elapsed, iter 1290, LL -1058053.1538, -0.07% change from last\n", 165 | "2016-06-17 13:11:02.167643 0:00:24.893889 elapsed, iter 1300, LL -1057930.8225, 0.01% change from last\n", 166 | "2016-06-17 13:11:02.349811 0:00:25.076057 elapsed, iter 1310, LL -1057663.4197, 0.03% change from last\n", 167 | "2016-06-17 13:11:02.532825 0:00:25.259071 elapsed, iter 1320, LL -1057976.6743, -0.03% change from last\n", 168 | "2016-06-17 13:11:02.721613 0:00:25.447859 elapsed, iter 1330, LL -1057031.1312, 0.09% change from last\n", 169 | "2016-06-17 13:11:02.903822 0:00:25.630068 elapsed, iter 1340, LL -1057909.5857, -0.08% change from last\n", 170 | "2016-06-17 13:11:03.086480 0:00:25.812726 elapsed, iter 1350, LL -1057181.3712, 0.07% change from last\n", 171 | "2016-06-17 13:11:03.269645 0:00:25.995891 elapsed, iter 1360, LL -1058250.4331, -0.10% change from last\n", 172 | "2016-06-17 13:11:03.452580 0:00:26.178826 elapsed, iter 1370, LL -1057827.3525, 0.04% change from last\n", 173 | "2016-06-17 13:11:03.635448 0:00:26.361694 elapsed, iter 1380, LL -1057821.7964, 0.00% change from last\n", 174 | "2016-06-17 13:11:03.820471 0:00:26.546717 elapsed, iter 1390, LL -1058027.4480, -0.02% change from last\n", 175 | "2016-06-17 13:11:04.004303 0:00:26.730549 elapsed, iter 1400, LL -1057609.2496, 0.04% change from last\n", 176 | "2016-06-17 13:11:04.186923 0:00:26.913169 elapsed, iter 1410, LL -1057853.6963, -0.02% change from last\n", 177 | "2016-06-17 13:11:04.368835 0:00:27.095081 elapsed, iter 1420, LL -1057926.2057, -0.01% change from last\n", 178 | "2016-06-17 13:11:04.551367 0:00:27.277613 elapsed, iter 1430, LL -1057933.3259, -0.00% change from last\n", 179 | "2016-06-17 13:11:04.737879 0:00:27.464125 elapsed, iter 1440, LL -1058137.8651, -0.02% change from last\n", 180 | "2016-06-17 13:11:04.919822 0:00:27.646068 elapsed, iter 1450, LL -1057619.8346, 0.05% change from last\n", 181 | "2016-06-17 13:11:05.102256 0:00:27.828502 elapsed, iter 1460, LL -1058086.0081, -0.04% change from last\n", 182 | "2016-06-17 13:11:05.285646 0:00:28.011892 elapsed, iter 1470, LL -1058390.1582, -0.03% change from last\n", 183 | "2016-06-17 13:11:05.473943 0:00:28.200189 elapsed, iter 1480, LL -1057295.1406, 0.10% change from last\n", 184 | "2016-06-17 13:11:05.656719 0:00:28.382965 elapsed, iter 1490, LL -1057538.1868, -0.02% change from last\n", 185 | "2016-06-17 13:11:05.850708 0:00:28.576954 elapsed, iter 1500, LL -1057974.6099, -0.04% change from last\n", 186 | "2016-06-17 13:11:06.044180 0:00:28.770426 elapsed, iter 1510, LL -1057416.3535, 0.05% change from last\n", 187 | "2016-06-17 13:11:06.226593 0:00:28.952839 elapsed, iter 1520, LL -1057762.2577, -0.03% change from last\n", 188 | "2016-06-17 13:11:06.414366 0:00:29.140612 elapsed, iter 1530, LL -1057713.0372, 0.00% change from last\n", 189 | "2016-06-17 13:11:06.596794 0:00:29.323040 elapsed, iter 1540, LL -1057331.7771, 0.04% change from last\n", 190 | "2016-06-17 13:11:06.784614 0:00:29.510860 elapsed, iter 1550, LL -1057761.4479, -0.04% change from last\n", 191 | "2016-06-17 13:11:06.969252 0:00:29.695498 elapsed, iter 1560, LL -1057854.2256, -0.01% change from last\n", 192 | "2016-06-17 13:11:07.157597 0:00:29.883843 elapsed, iter 1570, LL -1057997.7047, -0.01% change from last\n", 193 | "2016-06-17 13:11:07.341740 0:00:30.067986 elapsed, iter 1580, LL -1057323.5061, 0.06% change from last\n", 194 | "2016-06-17 13:11:07.525324 0:00:30.251570 elapsed, iter 1590, LL -1057882.7724, -0.05% change from last\n", 195 | "2016-06-17 13:11:07.709046 0:00:30.435292 elapsed, iter 1600, LL -1057557.3565, 0.03% change from last\n", 196 | "2016-06-17 13:11:07.892641 0:00:30.618887 elapsed, iter 1610, LL -1058021.6806, -0.04% change from last\n", 197 | "2016-06-17 13:11:08.077684 0:00:30.803930 elapsed, iter 1620, LL -1057482.9917, 0.05% change from last\n", 198 | "2016-06-17 13:11:08.260967 0:00:30.987213 elapsed, iter 1630, LL -1057416.4411, 0.01% change from last\n", 199 | "2016-06-17 13:11:08.447840 0:00:31.174086 elapsed, iter 1640, LL -1057848.0435, -0.04% change from last\n", 200 | "2016-06-17 13:11:08.630185 0:00:31.356431 elapsed, iter 1650, LL -1057683.2289, 0.02% change from last\n", 201 | "2016-06-17 13:11:08.814836 0:00:31.541082 elapsed, iter 1660, LL -1057552.1824, 0.01% change from last\n", 202 | "2016-06-17 13:11:09.000564 0:00:31.726810 elapsed, iter 1670, LL -1057806.1368, -0.02% change from last\n", 203 | "2016-06-17 13:11:09.187316 0:00:31.913562 elapsed, iter 1680, LL -1057260.3276, 0.05% change from last\n", 204 | "2016-06-17 13:11:09.369329 0:00:32.095575 elapsed, iter 1690, LL -1057668.3761, -0.04% change from last\n", 205 | "2016-06-17 13:11:09.556560 0:00:32.282806 elapsed, iter 1700, LL -1058174.3369, -0.05% change from last\n", 206 | "2016-06-17 13:11:09.739722 0:00:32.465968 elapsed, iter 1710, LL -1056998.4681, 0.11% change from last\n", 207 | "2016-06-17 13:11:09.927147 0:00:32.653393 elapsed, iter 1720, LL -1057991.7070, -0.09% change from last\n", 208 | "2016-06-17 13:11:10.113610 0:00:32.839856 elapsed, iter 1730, LL -1057342.3064, 0.06% change from last\n", 209 | "2016-06-17 13:11:10.294833 0:00:33.021079 elapsed, iter 1740, LL -1058239.4359, -0.08% change from last\n", 210 | "2016-06-17 13:11:10.478141 0:00:33.204387 elapsed, iter 1750, LL -1057713.8061, 0.05% change from last\n", 211 | "2016-06-17 13:11:10.663564 0:00:33.389810 elapsed, iter 1760, LL -1057527.9539, 0.02% change from last\n", 212 | "2016-06-17 13:11:10.848634 0:00:33.574880 elapsed, iter 1770, LL -1057879.7333, -0.03% change from last\n", 213 | "2016-06-17 13:11:11.034800 0:00:33.761046 elapsed, iter 1780, LL -1057921.9414, -0.00% change from last\n", 214 | "2016-06-17 13:11:11.217707 0:00:33.943953 elapsed, iter 1790, LL -1057664.7590, 0.02% change from last\n", 215 | "2016-06-17 13:11:11.399538 0:00:34.125784 elapsed, iter 1800, LL -1057632.6469, 0.00% change from last\n", 216 | "2016-06-17 13:11:11.584036 0:00:34.310282 elapsed, iter 1810, LL -1057753.8580, -0.01% change from last\n", 217 | "2016-06-17 13:11:11.767065 0:00:34.493311 elapsed, iter 1820, LL -1057705.6573, 0.00% change from last\n", 218 | "2016-06-17 13:11:11.952836 0:00:34.679082 elapsed, iter 1830, LL -1057373.1562, 0.03% change from last\n", 219 | "2016-06-17 13:11:12.134437 0:00:34.860683 elapsed, iter 1840, LL -1057597.1580, -0.02% change from last\n", 220 | "2016-06-17 13:11:12.317176 0:00:35.043422 elapsed, iter 1850, LL -1056923.8004, 0.06% change from last\n", 221 | "2016-06-17 13:11:12.501502 0:00:35.227748 elapsed, iter 1860, LL -1057823.8369, -0.09% change from last\n", 222 | "2016-06-17 13:11:12.684613 0:00:35.410859 elapsed, iter 1870, LL -1057896.4055, -0.01% change from last\n", 223 | "2016-06-17 13:11:12.869461 0:00:35.595707 elapsed, iter 1880, LL -1057176.8540, 0.07% change from last\n", 224 | "2016-06-17 13:11:13.054705 0:00:35.780951 elapsed, iter 1890, LL -1058206.9325, -0.10% change from last\n", 225 | "2016-06-17 13:11:13.236649 0:00:35.962895 elapsed, iter 1900, LL -1057581.9471, 0.06% change from last\n", 226 | "2016-06-17 13:11:13.418828 0:00:36.145074 elapsed, iter 1910, LL -1057696.6917, -0.01% change from last\n", 227 | "2016-06-17 13:11:13.600831 0:00:36.327077 elapsed, iter 1920, LL -1058241.3655, -0.05% change from last\n", 228 | "2016-06-17 13:11:13.784223 0:00:36.510469 elapsed, iter 1930, LL -1057885.3465, 0.03% change from last\n", 229 | "2016-06-17 13:11:13.968371 0:00:36.694617 elapsed, iter 1940, LL -1057912.9225, -0.00% change from last\n", 230 | "2016-06-17 13:11:14.151257 0:00:36.877503 elapsed, iter 1950, LL -1058170.6172, -0.02% change from last\n", 231 | "2016-06-17 13:11:14.333455 0:00:37.059701 elapsed, iter 1960, LL -1057754.7908, 0.04% change from last\n", 232 | "2016-06-17 13:11:14.516926 0:00:37.243172 elapsed, iter 1970, LL -1058450.5486, -0.07% change from last\n", 233 | "2016-06-17 13:11:14.701658 0:00:37.427904 elapsed, iter 1980, LL -1057317.3581, 0.11% change from last\n", 234 | "2016-06-17 13:11:14.883850 0:00:37.610096 elapsed, iter 1990, LL -1057309.4540, 0.00% change from last\n" 235 | ] 236 | } 237 | ], 238 | "source": [ 239 | "# Generate topics\n", 240 | "# We assume a vocabulary of 'rows'^2 terms, and create 'rows'*2 \"topics\",\n", 241 | "# where each topic assigns exactly 'rows' consecutive terms equal probability.\n", 242 | "rows = 3\n", 243 | "V = rows * rows\n", 244 | "K = rows * 2\n", 245 | "N = K * K\n", 246 | "D = 10000\n", 247 | "seed = 42\n", 248 | "topics = []\n", 249 | "topic_base = np.concatenate((np.ones((1, rows)) * (1/rows),\n", 250 | " np.zeros((rows-1, rows))), axis=0).ravel()\n", 251 | "for i in range(rows):\n", 252 | " topics.append(np.roll(topic_base, i * rows))\n", 253 | "topic_base = np.concatenate((np.ones((rows, 1)) * (1/rows),\n", 254 | " np.zeros((rows, rows-1))), axis=1).ravel()\n", 255 | "for i in range(rows):\n", 256 | " topics.append(np.roll(topic_base, i))\n", 257 | "topics = np.array(topics)\n", 258 | "\n", 259 | "# Generate documents from topics\n", 260 | "# We generate D documents from these V topics by sampling D topic\n", 261 | "# distributions, one for each document, from a Dirichlet distribution with\n", 262 | "# parameter α=(1,…,1)\n", 263 | "alpha = np.ones(K)\n", 264 | "np.random.seed(seed)\n", 265 | "thetas = np.random.dirichlet(alpha, size=D)\n", 266 | "topic_assignments = np.array([np.random.choice(range(K), size=N, p=theta)\n", 267 | " for theta in thetas])\n", 268 | "word_assignments = np.array([[np.random.choice(range(V), size=1,\n", 269 | " p=topics[topic_assignments[d, n]])[0]\n", 270 | " for n in range(N)] for d in range(D)])\n", 271 | "doc_term_matrix = np.array([np.histogram(word_assignments[d], bins=V,\n", 272 | " range=(0, V - 1))[0] for d in range(D)])\n", 273 | "\n", 274 | "# Generate responses\n", 275 | "# Choose prameter values\n", 276 | "np.random.seed(seed)\n", 277 | "\n", 278 | "# Estimate parameters\n", 279 | "_K = K\n", 280 | "_alpha = alpha\n", 281 | "_beta = np.repeat(0.01, V)\n", 282 | "n_iter = 2000\n", 283 | "lda = LDA(_K, _alpha, _beta, n_iter, seed=42)\n", 284 | "\n", 285 | "lda.fit(doc_term_matrix)\n", 286 | "results = lda.phi" 287 | ] 288 | }, 289 | { 290 | "cell_type": "code", 291 | "execution_count": 3, 292 | "metadata": { 293 | "collapsed": false 294 | }, 295 | "outputs": [ 296 | { 297 | "name": "stdout", 298 | "output_type": "stream", 299 | "text": [ 300 | "5.34768078599e-05\n", 301 | "0.000173033500443\n", 302 | "0.000304934534738\n", 303 | "0.000311130904051\n", 304 | "0.000231199587745\n", 305 | "1.03023470842e-05\n" 306 | ] 307 | } 308 | ], 309 | "source": [ 310 | "for res in results:\n", 311 | " minimized_KL = 1\n", 312 | " for topic in topics:\n", 313 | " KL = KL_divergence(topic, res)\n", 314 | " if KL < minimized_KL:\n", 315 | " minimized_KL = KL\n", 316 | " print(minimized_KL)" 317 | ] 318 | }, 319 | { 320 | "cell_type": "code", 321 | "execution_count": 4, 322 | "metadata": { 323 | "collapsed": false 324 | }, 325 | "outputs": [ 326 | { 327 | "data": { 328 | "text/plain": [ 329 | "[]" 330 | ] 331 | }, 332 | "execution_count": 4, 333 | "metadata": {}, 334 | "output_type": "execute_result" 335 | }, 336 | { 337 | "data": { 338 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYEAAAEHCAYAAABIsPrhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHWpJREFUeJzt3X2wHXWd5/H3hwTYiWCEWQgWEHSRB2F1ImyFUCR7s6VA\nwBliTVGDllU8bJWybGbHKqtWHgSJDBj1L4dhZ1ln0QG2LJ2hBggga6Ccm8BahiwhgEogzPBkiNEt\nCBICGMJ3/+i+187Jebzd53Sf7s+r6lTOQ3f/fvd+Tud7+5zu308RgZmZNdN+ZXfAzMzK4yJgZtZg\nLgJmZg3mImBm1mAuAmZmDeYiYGbWYC4CZmYN5iJQIEmHSLpT0k5Jz0n6TNl9snwkrZC0QdJbkr5T\ndn+sGJIOkPQ/JT0v6TVJGyUtK7tfZZhddgdq5m+At4DDgFOA+yRtioinyu2W5bAV+EvgbOAPSu6L\nFWc28CKwJCJekvRJ4O8l/duIeLHkvo2UfMVwMSTNAV4FToqIf06fuxXYGhFXldo5y03SXwJHRsR/\nLLsvNhySHgdWRsSdZfdllPxxUHGOB3ZPFYDU48DJJfXHzPokaR5wHPDzsvsyai4CxTkI+G3Lc78F\nDi6hL2bWJ0mzgf8F/F1EPFN2f0bNRaA4O4H3tjw3F3i9hL6YWR8kiaQAvA38l5K7UwoXgeI8A8yW\ndGzmuT+igYeXZmPkFuBfA38aEXvK7kwZXAQKEhG7gH8ErpM0R9Ji4E+A28vtmeUhaZakfwXMIiny\nB0qaVXa/LD9JNwMnAudFxO/K7k9ZXASKtQKYA/ya5BDzP/n00LF3NbALuBz4bHr/y6X2yHKTNB/4\nPLAA2C7pdUm/beK1PblOEZV0CPAD4BjgeeDPIuK1Nss9D7wGvEtyBs3CGTdqQ+dc68vZWqu8RwJX\nAA9GxAnAj4ErOyz3LrA0Ij7mN9NYcK715WxtL3mLwHLg1vT+rcCnOiynAtqy0XGu9eVsbS95Qz48\nIrYDRMSvgMM7LBfAA+kYLJ/L2aYNn3OtL2dre+k5dpCkB4B52adI3iBXt1m80xcMZ0TENkmHkbyx\nnoqIhwfurRXGudaXs7VB9CwCEXFmp9ckbZc0LyK2SzqC5KyYdtvYlv77G0l3AguBtm8oSR7MqFx3\nQ5JDRMi51spUtoXts861OiJCM1kv78dBq4GL0/sXkb7JstJz5g9K778HOAv4Wc52h+qdd97Jdbvm\nmmtyr18htcnVpl2c/utsC3LllVfy+uuvz/iWd/088haBbwBnSnoa+DjwdQBJ75d0b7rMPOBhSY8B\nPwXuiYg1Odu1EXCuteV91qblmk8gIl4BPtHm+W3AH6f3nyO5IMPGTHoaYfaxc62BiPA+a9N8CtgQ\nTExMlLq+mY3WkiVLSl0/j8pNKlOFL5reeeedsrvA7NnlT/o20y+a2qlCrpZwrsXL+7l8XgcffHBp\nXwybmdkYcxEwM2swFwEzswZzETAzazAXATOzBnMRMDNrsEKKgKRlkjZLekbS5R2WuVHSFkmbJPlC\nlDHgXOvL2dqU3EVA0n7ATcDZwMnAZySd2LLMOcCxEXEccClwc952bSScaw15n7WsIo4EFgJbIuKF\niNgNfJ9k4oqs5cBtABGxHpgraR5Wdc61nrzP2rQiisCRwEuZx79Mn+u2zNY2y1j1ONd68j5r08of\nm8DMzAby0EMP8dBDDxWyrSKKwFZgfubxUelzrcsc3WMZqx7nWk/eZ8fckiVL9hp0btWqVTPeVhEf\nB20APiTpGEkHAJ8mmWwmazVwIYCkRcCOqXlOrdKcaz15n7VpuY8EImKPpD8H1pAUlVsi4ilJlyYv\nx7cj4oeSzpX0LPAGcEnedm0knGsNeZ+1LA8l3YaHkk54yOF6cq7F81DSZmY2llwEzMwazEXAzKzB\nXATMzBrMRcDMrMFcBMzMGsxFwMyswUYyn4CkCUk7JG1Mb1cX0a4Nl3OtL2drU3JfkZQZm/zjwMvA\nBkl3R8TmlkXXRcR5eduzkXKuNeR91rJGNZ8AQGFXKdrIONd68j5r00Y1nwDA6ek0dfdJOqmAdm34\nnGs9eZ+1aaMaoOZRYH5E7EqnrbsLOH5EbQ9s1qxZZXdhXAyU67XXXjt9f+nSpSxdunToHczauXPn\nSNtr58ADDxxpe2vXrmXt2rXTj6+//vp+Vx2rfbZsc+bMGWl7k5OTe+WaR+4B5NJhZldGxLL08RUk\nIxF+o8s6zwGnRsQrbV4rfUCqKgyqJ1XiSPxHReZa9u+1iUWg1QEHHABwOgXts1XYX6tgz549pbY/\na9asUgeQ6zk2eXZuUkkLSYrPPv9RWOU413ryPmvTRjKfAHC+pMuA3cCbwAV527WRcK415H3Wsjyf\nQBtV+J1U4eOgosedL/v36o+Dko+DPJ9A8Zr+cZCZmY0pFwEzswZzETAzazAXATOzBnMRMDNrMBcB\nM7MGcxEwM2uwouYTuEXSdklPdFnmRklb0gGpFhTRrg2Xc60n52pZRR0JfBc4u9OL6QBUx0bEccCl\nwM0FtWvD5VzrybnatEKKQEQ8DLzaZZHlwG3psuuBudmxSayanGs9OVfLGtV3Aq3jl2+l/fjlNl6c\naz051wbxF8NmZg02qklltgJHZx4flT5n422gXFeuXDl9v4xJZZqodVKZPnl/rbhKTSozvSHpA8A9\nEfGRNq+dC6yIiE+mk9B8KyIWddhO6aMSlj3aJVRnFNEicy379+pRRH8/imid9tcqGOdRRAs5EpD0\nPWAp8IeSXgSuBQ4gHZs8In4o6VxJzwJvAJcU0a4Nl3OtJ+dqWZ5PoI0q/E6qciRQ1LZ8JJCoypFA\nUdurwv5aBeN8JOAvhs3MGsxFwMyswVwEzMwazEXAzKzBXATMzBrMRcDMrMFcBMzMGsxFwMyswUYy\nqYykCUk7JG1Mb1cX0a4Nl3OtJ+dqWUUNIPdd4K9JxyDvYF1EnFdQezYazrWenKtNG9WkMgDlj4Ng\nA3Gu9eRcLWuU3wmcns5Xep+kk0bYrg2Xc60n59oQo5pP4FFgfkTsSucvvQs4fkRtD+x3v/td2V0Y\nFwPlWoVB8awvY7W/VsF++432HJvJyUkmJycL2VaR8wkcQzI++Uf7WPY54NSIeKXNa6WPSvj222+X\n3YXSR5uE6fkEapOrJZxr8coeIVdSJUYRFR0+R8xOUi1pIUnx2ecNZZXkXOvJuRowokllgPMlXQbs\nBt4ELiiiXRsu51pPztWyPKlMG/44KOHJR+rJuRav7P9Hq/JxkJmZjRkXATOzBnMRMDNrMBcBM7MG\ncxEwM2swFwEzswZzETAza7DcRUDSUZJ+LOnnkp6U9BcdlrtR0pZ0UKoFedu14XOu9eR91rKKuGL4\nHeCLEbFJ0kHAo5LWRMTmqQXSQaiOjYjjJJ0G3AwsKqBtGy7nWk/eZ21a7iOBiPhVRGxK7+8EngKO\nbFlsOekEFhGxHpibHZ/Eqsm51pP3Wcsq9DsBSR8AFgDrW146Engp83gr+77prKKca305WyusCKSH\nlXcAX0j/urAacK715WwNihtFdDbJm+n2iLi7zSJbgaMzj49Kn7MKc6715WzHW+UmlZF0G/D/IuKL\nHV4/F1gREZ+UtAj4VkS0/ZKpCqMSehTRabdTo1wtkU4qU8g+61wT4zyKaO4iIOkMYB3wJBDp7Srg\nGH4/PjmSbgKWAW8Al0TExg7bK/1N5SIw7V1qlKtNW0xB+6xzTTS6CBStCm8qF4GEx52vJ+davLL/\nH/V8AmZmNiMuAmZmDeYiYGbWYC4CZmYN5iJgZtZgLgJmZg3mImBm1mAuAmZmDTaSSWUkTUjaIWlj\ners6b7tVtnbt2lLXL4pzrSfvs8XLO45PUeMAzUQRRwJTE1ScDJwOrJB0Ypvl1kXEKent+gLarax1\n69aVun6BnGs9eZ8tWKOLQJ8TVAAUdqm6jYZzrSfvs5Y1qkllAE5P5yq9T9JJRbZrw+Vc68vZGhFR\nyA04CPi/wPIOr81J758DPNNlO+FbNW7OtZ63IvfZsn8W3/bNddBbUfMJzAbuBe6PiL/qY/nngFMj\n4pXcjdvQONf6crY2paiPg74D/KLTmyk7QbWkhSRDWPvNVH3Otb6crQEFTC+ZTirzWeBJSY+RHJq0\nTlBxvqTLgN3Am8AFedu14XKu9eVsLatyk8qYmdnolHrFsKRDJK2R9LSkH0ma22G55yU9LukxSY+k\nzy2TtFnSM5Iu77DejZK2pGc4LGh5rev6vS6WkXSLpO2Snujy83Vrv+v6fbTf84KfPvowtIuGZprt\nuOfazzaGna1zda4D5VrU2UEzPKPoG8CX0vuXA1/vsNy/AIdkHu8HPEty+Lo/sAk4sWWdc4D70vun\nAT8dcP0JYHWXvi8mObXuiQ6vd2y/z/V7tX8EsCBzJsfTg/wOBthG134UmW0dcq1Cts7VuQ6Sadlj\nBy0Hbk3v3wp8qsNyYu+jloXAloh4ISJ2A99Pt9W67dsAImI9MFe//7Krn/Wn2m0rIh4GXu30eo/2\n+1m/V/v9XPDTqw/DvGhoJtmOfa59bqNXH3Jl61z35Vw7K7sIHB4R2yH5AYHDOywXwAOSNkj6HMkP\n/lLm9V+y7y+jdZmtmWX6WR/yXSzTrf1+9dW+Ol/w03cfumyj7360GDhb4CKakWvffcibrXPtW2Nz\nzX12UC+SHgCyFVUkb5B2n1V1+pb6jIjYJukw4AHgH4rtZVuPAvMjYpekc4C7gONH0O5A7Us6CLgD\n+EL618HAemyjYz+GkO0jQMfPbAtSdq599yFvts7VuXbqQ9bQjwQi4syI+Gjm9pH039XA9qlDHUlH\nAL/usI1t6b+/Ae4k+etjfmaRo0gqZtZW4OgOy2zttX5E7IyIXen9+4H9JR3a7WeVdLukbZJ2kHx+\neGGPPnbUT/tKLvi5A7g9Iu5us5luv4O+ttGtH0PI9kHghG797fEzDSXXKZKOI/nMdlWPPnY1imyd\na1/766SkNyX9Fjgb+Psefeyq6rl2UvbHQauBi9P7FwH7/FCS5qSVD0nvAc4C7gc+JOkYSQcAn063\n1brtC9P1FgE7pg5jgQ291ld/F8uIvT9/WwV8MCLeB3wJuFjSx9q032n9QdvvesEP3X8HfW2jz350\navvi9H6/2Z4EHFzBXKfcRPI57LEd2u9nG6PK1rn2zjWA/xwR7yX5buPxDu1328agfaherjHg2QFF\n3oBDSf5KeBpYA7wvff79wL3p/Q+SnA3wGPAkcEX6/LJ0vS2Z5y4FPp/Z/k0kZxU8DpzS0nbX9YEV\nwM/Sdn8CnNay/veAl4G3gReBS1rWPwF4A9jWof1e6/dq/wxgT+Z3szH9mQb5HfTcRq9+FJ1thXP9\nNskXkl8BNndqvwrZOte+c30a+Lt+2q9zrr5YrGCS/hvJX0p/QBLSv4/08MzGk6T3kvw1+h+AzwHH\nRsSF3deyqpP0TyRHKSIpCFdHRDVmdBqhsj8Oqp2IWEFyDu9i4B9J/mqw8XYd8LcR8XLZHbFCfQn4\nNyRn3/wtcI+kD5bbpdFzERiCSPyE5Auey8ruj82ckis2PwF8q+y+WLEiYkNEvBERuyPiNuD/AOeW\n3a9RG/opog03m/RLRBtbEyRXqr4oSSRHebMknRQR/67crlnBgpldQDfWfCRQEEmHSbpA0nsk7Sfp\nbJKzGB4su2+Wy/8gKeQLgD8CbiYZh/+sMjtl+UiaK+ksSQdKmiXps8AS4H+X3bdR85FAcYLko5//\nTlJcXyC5mOO+UntluUTEW8BbU48l7QTeCo+tP+72B64nOYtvD8kZX8sj4tlSe1WCXGcHSToE+AHJ\n4fLzwJ9FxGttlnseeA14F9gdEQtn3KgNnXOtL2drrfJ+HHQF8GBEnAD8GLiyw3LvAksj4mN+M40F\n51pfztb2krcIzHQUUKs251pfztb2kjfkmY4CatXmXOvL2dpeen4xPIQRBR+Q9FQkY3O3a8+XMFeA\npIiIqdPlnGt95crWuVZHZn8dSM8iEBFndnpNyVRr8yJie78jCkq6k2SSiLb/WQC89to+31P1bdWq\nVVx5ZaePOftb/5prrpnx+gA33HADX/7yl3Otf8MNN+Tqw8qVK1m5cuWM1v3whz/M5s2bgf5Hiuwn\n1zwnIeT5eaYkp/nbMPZZG195Pw6a6SigP8vZrg3Reeedl33oXOvn4vRfZ2u5i8A3gDMlPQ18HPg6\ngKT3S7o3XWYe8LCkx4CfAvdExJqc7doQXX55Mo+3c60t77M2LdfFYukFM59o8/w24I/T+8+RXG05\nEosXLy51fYAlS5aUuj7A0qVLZ7zuoYcmc1CkpxFOKzPXPD+P7S0iKrXPWrkqN5S0pMjznUARZs8u\n/0LqOXPmlNq+pBl/0dRhe1H2e83fCSSKzrWobVk+M83V5wGbmTWYi4CZWYO5CJiZNZiLgJlZg7kI\nmJk1mIuAmVmDFVIEJC2TtFnSM5Iu77DMjZK2SNqUzttqFedc68vZ2pTcRUDSfsBNwNnAycBnJJ3Y\nssw5wLERcRxwKckUfVZ9zrWGvM9aVhFHAguBLRHxQkTsBr5PMmZ51nLgNoCIWA/MlTQPqzrnWk/e\nZ21aEUXgSOClzONfps91W2Zrm2WsepxrPXmftWn+YtjMrMGKGCRnKzA/8/io9LnWZY7uscy0VatW\nTd9fvHhxIQOqWXeTk5NMTk62Pl1ortn5AJYuXepB4cpT+D5r4yv3AHKSZgFTw9JuAx4BPhMRT2WW\nORdYERGflLQI+FZELOqwPQ8gRzUGkAP+mQJz9QBylTGbgvZZDyBXHUObWayPhvdI+nNgDcnHS7dE\nxFOSLk1ejm9HxA8lnSvpWeAN4JK87dpIONca8j5rWR5Kug0fCXgo6TrzUNL15KGkzcxsYC4CZmYN\n5iJgZtZgLgJmZg3mImBm1mAuAmZmDeYiYGbWYC4CZmYNNpJJZSRNSNohaWN6u7qIdm24nGt9OVub\nkvvS2MwEFR8HXgY2SLo7Ija3LLouIs7L256NlHOtIe+zljWqSWUAfM3++HGu9eR91qYVMUhOuwkq\nFrZZ7nRJm0iGo/2vEfGLThucO3duAd2yAhSa686dOwvuns1Q4fusja9RjZT2KDA/Inalc5feBRw/\norZteAbK9Wtf+9r0/SVLlnieiGrzPtsQI5lUJiJ2Zu7fL+lvJB0aEa8U0L4NT6G5XnXVVcPppQ3K\n+6xNK+I7gQ3AhyQdI+kA4NPA6uwC2QmqJS0kGcLab6bqc6715H3Wpo1kUhngfEmXAbuBN4EL8rZr\nI+Fca8j7rGVVclKZsvtgiaInH3n99deL2tyMHHzwwaW2XxWeVKaePKmMmZkNzEXAzKzBXATMzBrM\nRcDMrMFcBMzMGsxFwMyswVwEzMwarKj5BG6RtF3SE12WuVHSFkmbJC0ool0bLudaT87Vsoo6Evgu\ncHanF9MBqI6NiOOAS4GbC2rXhsu51pNztWmFFIGIeBh4tcsiy4Hb0mXXA3OzY5NYNTnXenKuljWq\n7wRaxy/fmj5n48251pNzbRB/MWxm1mCjmlRmK3B05vE+45fbWBooV08qMza8vzZIkUVAdJ6TdDWw\nAviBpEXAjojYXmDbNjyF5epJZSrF+6sBBRUBSd8DlgJ/KOlF4FrgANKxySPih5LOlfQs8AZwSRHt\n2nA513pyrpbl+QSsI88nUE+eT6CePJ+AmZkNzEXAzKzBXATMzBrMRcDMrMFcBMzMGsxFwMyswVwE\nzMwazEXAzKzBRjKpjKQJSTskbUxvVxfRrg2Xc60n52pZRY0d9F3gr0nHIO9gXUScV1B7NhrOtZ6c\nq00b1aQy0HmwKqso51pPztWyRvmdwOnpfKX3STpphO3acDnXenKuDTGq+QQeBeZHxK50/tK7gONH\n1LYNz0C5fvOb35y+PzExwcTExPB7aDMxUK5f+cpXpu9PTEywdOnSoXewavbbb7Tn2ExOTjI5OTn9\n+Ktf/eqMt1XYKKKSjgHuiYiP9rHsc8CpEfFKm9c8KmFFRISKzHX37t3D6Gbf9t9//1Lbr4qic92z\nZ88wujlWRl0EWkmqxCiiHSepyE5SLWkhSfHZ5w1lleRc68m5GjCiSWWA8yVdBuwG3gQuKKJdGy7n\nWk/O1bI8qYx1VPTkI/44qBqKztUfB/njIDMzG1MuAmZmDeYiYGbWYC4CZmYN5iJgZtZgLgJmZg3m\nImBm1mC5i4CkoyT9WNLPJT0p6S86LHejpC3poFQL8rZrw+dc68n7rGUVccXwO8AXI2KTpIOARyWt\niYjNUwukg1AdGxHHSToNuBlYVEDbNlzOtZ68z9q03EcCEfGriNiU3t8JPAUc2bLYctIJLCJiPTA3\nOz6JVZNzrSfvs5ZV6HcCkj4ALADWt7x0JPBS5vFW9n3TWUU51/pytlbYfALpYeUdwBfSvy6sBorM\n9brrrpu+7/kEyldUttmx7Js6n8Cotc4nkEchA8hJmg3cC9wfEX/V5vWbgX+KiB+kjzcDExGxvc2y\nHkCuOvanwFw9gFw1pPMJFLLPegC5hAeQg+8Av2j3ZkqtBi4EkLQI2NHuPwqrHOdaX87WgAKOBCSd\nAawDngQivV0FHMPvxydH0k3AMuAN4JKI2Nhhez4SqI53KTBXHwlUxmIK2md9JJAY5yMBzydgHXk+\ngXryfALFG+ci4CuGzcwazEXAzKzBXATMzBrMRcDMrMFcBMzMGsxFwMyswVwEzMwabCTzCUiakLRD\n0sb0dnXedm34nGs9eZ+1rCKOBKbGJj8ZOB1YIenENsuti4hT0tv1BbRrw1eZXNeuXTusTTdRZfbZ\nIgZBy7uNuvRhpkY1nwBAYVcp2mhUKVcXgeJUaZ8tIte82yiiD40uAlldxiYHOD2dpu4+SScV2a4N\nl3OtL2dro5pP4FFgfkTsSqetuws4vqi2bXica305W4MRzSfQZvnngFMj4pU2r3kAueroOp9AK+c6\nHvqZT6BVp2yda3XMdAC5oo4Euo5NLmne1FjkkhaSFJ99/qOAYkc4tHwk3YZzratC9lnnOv5yF4F0\nPoHPAk9Keoz2Y5OfL+kyYDfwJnBB3nZtuJxrfTlby6rcfAJmZjY6pV4xLOkQSWskPS3pR5Lmdlju\neUmPS3pM0iPpc8skbZb0jKTLO6x3o6Qt6RkOC1pe67p+r4tlJN0iabukJ7r8fN3a77p+H+33vOCn\njz4M7aKhmWY77rn2s41hZ+tcnetAuUZEaTfgG8CX0vuXA1/vsNy/AIdkHu8HPEty+Lo/sAk4sWWd\nc4D70vunAT8dcP0JYHWXvi8mObXuiQ6vd2y/z/V7tX8EsCC9fxDw9CC/gwG20bUfRWZbh1yrkK1z\nda6DZFr22EHLgVvT+7cCn+qwnNj7qGUhsCUiXoiI3cD30221bvs2gIhYD8yVNG+A9afabSsiHgZe\n7fR6j/b7Wb9X+/1c8NOrD8O8aGgm2Y59rn1uo1cfcmXrXPflXDsruwgcHukZCBHxK+DwDssF8ICk\nDZI+R/KDv5R5/Zfs+8toXWZrZpl+1od8F8t0a79ffbWvzhf89N2HLtvoux8tBs4WuIhm5Np3H/Jm\n61z71thcC7tYrBNJDwDZiiqSN0i7z6o6fUt9RkRsk3QY8ADwD8X2sq2yL5bpq311v+CnLz220bEf\nQ8j2EaDjZ7YFKTvXvvuQN1vn6lw79SFr6EcCEXFmRHw0c/tI+u9qYPvUoY6kI4Bfd9jGtvTf3wB3\nkvz1MT+zyFEkFTNrK3B0h2W29lo/InZGxK70/v3A/pIO7ffn7tF+T/20r+SCnzuA2yPi7pn0odc2\nuvVjCNk+CJzQrb89fqbK59pvH/Jm61yda7+/h7I/DloNXJzevwjY54eSNCetfEh6D3AWcD/wIUnH\nSDoA+HS6rdZtX5iutwjYMXUYC2zotX7280B1vlhGdP78rVv7Pdfvs/2uF/z02YeeFw310Y9ObV+c\n3u8325OAg2uQa9dtjChb5+pc+8s1Bjw7oMgbcCjJXwlPA2uA96XPvx+4N73/QZKzAR4DngSuSJ9f\nlq63JfPcpcDnM9u/ieSsgseBU1ra7ro+sAL4WdruT4DTWtb/HvAy8DbwInDJgO13Xb+P9s8A9mR+\nNxvTn2mQPvTcRq9+FJ3tuOdahWydq3MdJFdfLGZm1mBlfxxkZmYlchEwM2swFwEzswZzETAzazAX\nATOzBnMRMDNrMBcBM7MGcxEwM2uw/w/D++wgFj9towAAAABJRU5ErkJggg==\n", 339 | "text/plain": [ 340 | "" 341 | ] 342 | }, 343 | "metadata": {}, 344 | "output_type": "display_data" 345 | }, 346 | { 347 | "data": { 348 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaIAAAEACAYAAADx33KKAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XuUVeWd5vHvA4h4AeQS0XAJoKLijUAQEk1STQYRuzNi\nRjuu6QyYJmvs1nTnMkm3Gae5dJLOaNo1RJNo94y9BFe3pKO2txBFW8q2O4Igdy9Qmhi5iBegiKBc\nhN/88b7otqyiKM6p2gX1fNaqxT7vu/d73r3PPvthX87eigjMzMzK0qnsDpiZWcfmIDIzs1I5iMzM\nrFQOIjMzK5WDyMzMSuUgMjOzUlUURJIul7Ra0l5JIxvUfUdSnaTnJV1UKB8paaWktZJmFcq7Spqb\np3lK0qBC3ZQ8/hpJkwvlgyUtzHV3SepSqLs5t7Vc0ohK5tPMzFpPpXtEq4DLgCeKhZLOBP4QOBOY\nCPxUknL1rcDUiBgGDJM0IZdPBbZExGnALODG3FYvYBowGhgDTJfUM09zA3BTbqs+t4GkicApua2r\ngdsqnE8zM2slFQVRRKyJiDpADaouBeZGxLsR8TJQB5wv6SSge0QszuPNASYVppmdh+8GxuXhCcD8\niNgWEfXAfODiXDcOuCcPz27Q1pzcx0VAT0n9KplXMzNrHa11jqg/sK7wekMu6w+sL5Svz2UfmCYi\n9gLbJPVuqi1JfYCtEbHvQG01eH8zM2tnujQ3gqRHgeLehIAAro+IB1urY3x4L+tQxzEzs3as2SCK\niPGH0O4GYGDh9YBc1lR5cZqNkjoDPSJii6QNQE2DaRZExGZJPSV1yntFjbXV2Pt8gCTfbM/M7BBE\nRFV2Bqp5aK7YoQeAK/OVcEOAU4GnI2IT6ZDb+fnihcnA/YVppuThK4DH8/AjwPgcOr2A8bkMYEEe\nlzxtsa3JAJLGAvUR8VpTHY8I/1Xpb/r06aX34Uj68/L0smyvf9XU7B7RgUiaBNwC9AUekrQ8IiZG\nxHOS/hl4DtgDXBPv9/xa4A6gGzAvIh7O5bcDd0qqAzYDVwJExFZJ3wWWkA4Jzox00QLAdcDcXL8s\nt0FEzJN0iaQXgR3AlyuZTzMzaz0VBVFE3Afc10TdD4AfNFL+DHBOI+W7SJd8N9bWHaTwalj+G9Il\n3Y1N89Wme25mZu2F76xgVVVTU1N2F44oXp7V42XZfqnax/oON5Kioy8DM7OWkkS0w4sVzMzMWsxB\nZGZmpXIQmZlZqRxEZmZWKgeRmZmVykFkZmalchCZmVmpHERmZlYqB5GZmZXKQWRmZqVyEJmZWakc\nRGZmVioHkZmZlcpBZGZmparowXh2+IqATZvg5JNbPt2//RusXw/DhsE558Cvfw2/+AWsWAETJsC7\n78Ijj8Dpp8OgQbB3L+zbB2ecAb/5DezYAZ/+NKxcCf/xH3DKKfDmm7BmDUyalMZ/7DF4+GH4whfg\n3nuhvh6+8Q1YvBjOPDO116kTfOYz8C//Au+8k/rz9tuwezfs3Am9esGDD8LHP57qjzoKPvIRqKuD\nESNSv4cMgW3b4MUX4dxz07S9esFbb6W+DhmShs84I83r1q2wejW8/DKMHg2vvZba/O1vU93JJ8Oe\nPfD662l+xoyBVatSO6efnvr8yiuwaxe8+mpaPh/7WJqfW25Jry+7LM3vmDFpWXXuDD/5Serzueem\n/gwfDm+8AZs3p3F+9Ss48cT0vrt3p+Xy29/C0KHp9fbt6b1ffDF9jlu3wqhRMHAg3HNPeo/jj4cB\nA9K8LVwIX/oSfPSjsGwZXHBBmu9zz4VnnknrQa9ead5POCF9jqNHw6mnpn698gr07p0+13fegZNO\ngu99L733978P69al9WTIkDT81ltw7LHp81u6NC3rs89O71Nfn+bxjTegT580/h/9ESxfntr+3e/S\ncq+rS5/1U0+l6VauTOvTG29A9+7Qvz88/XRaZv36wWmnwQsvpLaHDUvz060b9OiRlsdTT6X+H388\nPPtsGv7kJ99f7599NvX1U5+CY45J0zz4YPpsXn899a9bNxg7Fvr2Tcv1zTfTMn7rLejSJX0un/tc\n+j698EJqt2vX9H369KfTerdrV+r/xo1pvd65M30uRx+dluHevenz/NGP4NJL0zLs1y+t76NGpene\nfjt9Py+4IH2fTjoJvvKVtM7165fWu1690ndhwIDUl2OOSdP17ZvWoa1bobY29ffoo6u3LQI/j+iw\neB7Rxo1phe7ePa28998PP/gBSDB+fFpptm+HcePSBmT37vTFPOqoFDZnnJFWqH/6p/RF3b49fdl+\n+csUHLt2pRXsK1+B445LK+Zjj6Uvy3nnpS/6vn1w992pzT17PtzHvn3Tl2zYsPRFWr069XnnzrRC\n9+iR+tu1a9pIFZ18clq577orfamuuCJ9wX73u7Tx3rgR7rsvlW/YkDao/frBQw+lL9bQoWmDsHRp\n2ijuD6PPfz6933nnpQ0lpI1qz55w8cVp4z18eGqzri7Ng5Q26Cee+H6YLVmS5vnNN+Gzn31//Ndf\nTyFy8smp3/s3YgMGwIIFqS+9e8Pjj6dg6NMn9a1r17TxHzQozdM776T3ffrp1O+lS1Nfzz03hdXw\n4fDEE+8vr498JJUddVQKhaFDU9v79qWNxTPPwFVXwR13pPEvvBAGD4ZFi1K///AP07wNHQpbtqTP\neffu9H4vvQT/+q9p4/+tb72/bnTtmjaSRx2VluGAAen9duxIG6Xjj0+v3303/T36aNpYDhqUNtif\n/3xaJhFpw3zssWlZbdmSlu1ZZ6XPY82aNP3w4Wld7NIlfXaf/WzaWA8Zkj6Huro0D+eckzbaNTXv\nf2YLFqQN7VlnpeX6xhvpPdeuTRvhT3wiLaMTTkhh1LVrCtBXX03vefLJaYO8Y0danu+8k5bvs8+m\n8XbsSH1//fX0fqeemtadE05IIXfssWk+1q5NfR03Ln3nhg9P34vNm9N7vPRS+k6PGZOW3bJlaX1f\ntCh9R44+On12e/ak/xStW5c+z0mTUqiuXAlPPpk+t+HD03f9xRdTgI8eDc8/nz7bHj3S+HV1qe/v\nvAPPPZfGe+WV1P64cakva9emvu7cmbYbzz+fpu/UKY23b19apzt3hocfrt7ziBxE7SCI9uxJG6Yt\nW9JewIknpg3WSy+ljVNDRx0FX/xi+mKceCKcf3764nfrljbQCxemL/OIEWkjsmNHWqH79UvvsWBB\n+l/37/1e+vK/8kr699pr398oHXNMWvl6907/s+vUKa3sX/xi+iLvV1eX9mi6dElf+oOxd2/awB17\nbFUWX4eyZ0/6/M0gBfvBfu+qrZoPxnMQtYMg+va34W//Nu32f+5z6fDTBRekwPjVr+Dv/z4djlqz\nJoXJ5z6XgsHMrCwOoioqK4g2bkyHa156Kb1euTIdZjAzOxz4UeGHueuuS8dsX3oJZs1K5xwcQmbW\nUfmquTY2fTrccEO6WuWrXy27N2Zm5fOhuTY8NLdqVTq/89xz6SobM7PDlQ/NHYbmz0+XWf7VXzmE\nzMyKHERt4NZb0+91/uZv4E//tOzemJm1Lz5H1IoiYNq09Ivyf/7n9ONFMzP7IJ8jasVzRPffn34F\nfe+96QekZmZHinZzjkjS5ZJWS9oraWShvLekxyW9JenmBtOMlLRS0lpJswrlXSXNlVQn6SlJgwp1\nU/L4ayRNLpQPlrQw190lqUuh7ubc1nJJIyqZz0Px1lvwZ38GN97oEDIzO5BKzxGtAi4DnmhQvhP4\nX8D/aGSaW4GpETEMGCZpQi6fCmyJiNOAWcCNAJJ6AdOA0cAYYLqknnmaG4Cbclv1uQ0kTQROyW1d\nDdxW4Xy2yI4d6R5wf/AH6a4JZmbWtIqCKCLWREQdoAblb0fEr4BdxXJJJwHdI2JxLpoDTMrDlwKz\n8/DdwLg8PAGYHxHbIqIemA9cnOvGAffk4dkN2pqT+7II6CmpXyXz2hLTpr1/N1wzMzuwtr5qrj+w\nvvB6fS7bX7cOICL2Atsk9S6WZxuA/pL6AFsjYt+B2ipOU8X5aNLtt8PPf54u1/bNKc3MmtfsVXOS\nHgWKexMCArg+Ih5srY7RYC+rgnHaTER61srdd6fHA5iZWfOaDaKIGF/F99sADCy8HpDLinUbJXUG\nekTEFkkbgJoG0yyIiM2SekrqlPeKGmursff5kBkzZrw3XFNTQ01NTVOjHtCTT6ZHG4wefUiTm5m1\nW7W1tdTW1rZK21W5fFvSAuBbEfFMg/IpwCci4s8KZQuBPwcWA78Abo6IhyVdA5wdEddIuhKYFBFX\n5osVlgAjSYcSlwCjIqJe0s+AeyPiZ5JuBVZExG2SLgGujYjflzQWmBURY5voe9Uu3/7hD9PDu3xu\nyMyOdO3mMRCSJgG3AH1JV60tj4iJue43QHega667KCJekDQKuAPoBsyLiK/l8Y8G7gQ+DmwGroyI\nl3PdVcD1pEOC34uIObl8CDAX6AUsA74UEXty3Y9JFzXsAL4cEUubmIeqBNG776Yngf7wh3DJJRU3\nZ2bWrrWbIDoSVCuIFiyAr389Pc20rCcmmpm1lXbzg1Z73113pcdoO4TMzFrGe0RV2CPatw8++tH0\nWO+hQ6vUMTOzdsx7RO3MggXp0Q5DhpTdEzOzw4+DqAr+/d/h85/3YTkzs0PhIKqCJ5+ET32q7F6Y\nmR2efI6ownNEb78N/frBxo3QvXsVO2Zm1o75HFE7UlcHgwY5hMzMDpWDqEJr18Jpp5XdCzOzw5eD\nqEKPPQYXXFB2L8zMDl8Oogo9/TRceGHZvTAzO3z5YoUKLlbYsgUGD4bNm/3sITPrWHyxQjvx5JMw\ndqxDyMysEg6iCjzzDIwZU3YvzMwObw6iCqxYAeecU3YvzMwObz5HdIjniOrr0w1OX3gBTjyxFTpm\nZtaO+RxRO/D443D++Q4hM7NKOYgO0fz5cNFFZffCzOzw16XsDhyOdu6E+++HRx8tuydmZoc/7xEd\ngp//HM44A84+u+yemJkd/rxH1EK7d8N3vgM//nHZPTEzOzL4qrkWXjX3jW/A6tU+LGdmHVs1r5rz\nHtFBiIDt22HGDJg1C1auLLtHZmZHDgdRM/bsSQ++27o1vX7gAf+I1cysmhxEzfjJT1II/fCHMHEi\nnHVW2T0yMzuy+BzRAc4RRaSnr953H4wa1cYdMzNrx3xnhTby0ENw9NEwcmTZPTEzO3I5iA5g7lz4\nkz8BVSXzzcysMT4018ShuV274PjjYdMm6NOnhI6ZmbVjPjTXBp55Jt05wSFkZta6KgoiSZdLWi1p\nr6SRhfL/JGmJpBWSFkv6vULdSEkrJa2VNKtQ3lXSXEl1kp6SNKhQNyWPv0bS5EL5YEkLc91dkroU\n6m7ObS2XNKKl87Z4MXzyky2dyszMWqrSPaJVwGXAEw3K3wD+ICLOA64C7izU3QpMjYhhwDBJE3L5\nVGBLRJwGzAJuBJDUC5gGjAbGANMl9czT3ADclNuqz20gaSJwSm7rauC2ls7YihUwosXxZWZmLVVR\nEEXEmoioA9SgfEVEbMrDzwLdJB0l6SSge0QszqPOASbl4UuB2Xn4bmBcHp4AzI+IbRFRD8wHLs51\n44B78vDsBm3Nye+/COgpqV9L5m35cjjvvJZMYWZmh6LVzxFJuhxYGhF7gP7A+kL1+lxG/ncdQETs\nBbZJ6l0szzYA/SX1AbZGxL4DtVWc5mD7vGdPevKq765tZtb6mr2zgqRHgeLehIAAro+IB5uZ9izg\nB8D4Q+jbwVyNUZUrNmbMmPHecE1NDX371jBwIBx3XDVaNzM7/NXW1lJbW9sqbTcbRBFxKCGCpAHA\nvcB/i4iXc/EGYGBhtAG5rFi3UVJnoEdEbJG0AahpMM2CiNgsqaekTnmvqLG2GnufDykGEcA//qPP\nD5mZFdXU1FBTU/Pe65kzZ1at7Woemntv7yRfTPAQ8JcRsXB/eT5vtE3S+ZIETAbuz9UPAFPy8BXA\n43n4EWB8Dp1epL2rR3LdgjwuedpiW5NzX8YC9RHx2sHOyIoVPj9kZtZWKr18e5KkdcBY4CFJv8xV\nXwVOAaZJWiZpqaS+ue5a4HZgLVAXEQ/n8tuBvpLqgK8D1wFExFbgu8ASYBEwM1+0QB7nm5LWAr1z\nG0TEPOA3kl4E/g64piXz5SAyM2s7vrNCI3dWOP30dKPTM88sqVNmZu2c76zQyl59FU4+uexemJl1\nDA6iBnbsSJdv9+zZ/LhmZlY5B1ED+/eGfMdtM7O24SBqwIflzMzaloOoAQeRmVnbchA14CAyM2tb\nDqIGHERmZm3LQdTAa69Bvxbdp9vMzCrhIGrgzTehb9/mxzMzs+pwEDWwebMfD25m1pYcRA04iMzM\n2paDqAEHkZlZ2/JNTws3Pd23D7p2hZ07oUuzT2oyM+u4fNPTVlJfD8cf7xAyM2tLDqICH5YzM2t7\nDqICB5GZWdtzEBU4iMzM2p6DqMBBZGbW9hxEBZs3Q+/eZffCzKxjcRAVvPUW9OhRdi/MzDoWB1HB\njh3p8m0zM2s7DqKCHTvguOPK7oWZWcfiICpwEJmZtT0HUcH27T40Z2bW1hxEBd4jMjNrew6iAl+s\nYGbW9hxEBdu3e4/IzKytOYgKfGjOzKztVRREki6XtFrSXkkjC+WjJS0r/E0q1I2UtFLSWkmzCuVd\nJc2VVCfpKUmDCnVT8vhrJE0ulA+WtDDX3SWpS6Hu5tzWckkjDmZ+fGjOzKztVbpHtAq4DHiikfJR\nEfFxYCLwd5L2v9etwNSIGAYMkzQhl08FtkTEacAs4EYASb2AacBoYAwwXVLPPM0NwE25rfrcBpIm\nAqfktq4GbjuYmfEekZlZ26soiCJiTUTUAWpQvjMi9uWXxwD7ACSdBHSPiMW5bg6wf2/pUmB2Hr4b\nGJeHJwDzI2JbRNQD84GLc9044J48PLtBW3NyXxYBPSX1a25+fI7IzKzttdo5IknnS1oNrAD+JAdT\nf2B9YbT1uYz87zqAiNgLbJPUu1iebQD6S+oDbC0EXqNtFac5UH/37IGI9KhwMzNrO80+FFvSo0Bx\nb0JAANdHxINNTRcRTwNnSzodmCPply3s28E8C70qz0ufMWMGO3dC587wxBM11NTUVKNZM7MjRm1t\nLbW1ta3SdrNBFBHjK3mDiFgjaTtwNmnPZGChekAuo1C3UVJnoEdEbJG0AahpMM2CiNgsqaekTnmv\nqLG2GnufD5kxYwbr18Odd4IzyMzsw2pqPvif9JkzZ1at7Woemntv7yRfzdY5D38MOB14OSI2kQ65\nnS9JwGTg/jzZA8CUPHwF8HgefgQYn0OnFzA+lwEsyOOSpy22NTm//1igPiJeO1Dnd+6EY45p+Uyb\nmVllmt0jOpB8WfYtQF/gIUnLI2IicCFwnaTdpAsV/jQituTJrgXuALoB8yLi4Vx+O3CnpDpgM3Al\nQERslfRdYAnpkODMfNECwHXA3Fy/LLdBRMyTdImkF4EdwJebm5ddu3x+yMysDIqIsvtQKkkRESxb\nBn/8x7BsWdk9MjNr/yQREVU5T+87K2S7dsHRR5fdCzOzjsdBlDmIzMzK4SDKHERmZuVwEGW+WMHM\nrBwOomz3bu8RmZmVwUGU+dCcmVk5HESZg8jMrBwOoszniMzMyuEgyrxHZGZWDgdR5iAyMyuHgyjz\nVXNmZuVwEGU+R2RmVg4HUeZDc2Zm5XAQZQ4iM7NyOIgyB5GZWTkcRNnu3T5HZGZWBgdR5j0iM7Ny\nOIgyB5GZWTkcRJkPzZmZlcNBlHmPyMysHA6izEFkZlYOB1HmW/yYmZXDQZT5Fj9mZuVwEGU+NGdm\nVg4HUeZDc2Zm5XAQZT40Z2ZWDgdR5kNzZmblcBBlPjRnZlaOioJI0uWSVkvaK2lkI/WDJL0l6ZuF\nspGSVkpaK2lWobyrpLmS6iQ9JWlQoW5KHn+NpMmF8sGSFua6uyR1KdTdnNtaLmlEc/PiQ3NmZuWo\ndI9oFXAZ8EQT9TcB8xqU3QpMjYhhwDBJE3L5VGBLRJwGzAJuBJDUC5gGjAbGANMl9czT3ADclNuq\nz20gaSJwSm7rauC25mbEh+bMzMpRURBFxJqIqAPUsE7SpcCvgWcLZScB3SNicS6aA0zKw5cCs/Pw\n3cC4PDwBmB8R2yKiHpgPXJzrxgH35OHZDdqak/u4COgpqV9T87F3L+zbB126NDWGmZm1llY5RyTp\nOOAvgJl8MKT6A+sLr9fnsv116wAiYi+wTVLvYnm2AegvqQ+wNSL2Hait4jRN9Xf/DU/1oTg1M7PW\n1uw+gKRHgeLehIAAro+IB5uYbAbwfyLibR361v1gJqxKdMycOYMImDEDampqqKmpqUazZmZHjNra\nWmpra1ul7WaDKCLGH0K7Y4D/IulGoBewV9JO4F5gYGG8AaS9FfK/A4GNkjoDPSJii6QNQE2DaRZE\nxGZJPSV1yntFjbXV2Pt8yDe+MYN/+IcURGZm9mEN/5M+c+bMqrVdzUNz7+2dRMRnImJoRAwlXXjw\nNxHx04jYRDrkdr7SrtJk4P482QPAlDx8BfB4Hn4EGJ9DpxcwPpcBLMjjkqcttjUZQNJYoD4iXmuq\n434WkZlZeSq9fHuSpHXAWOAhSb88iMmuBW4H1gJ1EfFwLr8d6CupDvg6cB1ARGwFvgssARYBM/NF\nC+RxvilpLdA7t0FEzAN+I+lF4O+Aaw7UoZ07oVu3g5xpMzOrKkVE2X0olaRYvjyYPBlWrCi7N2Zm\nhwdJRERVztP7zgrA22/DsceW3Qszs47JQYSDyMysTA4iHERmZmVyEOEgMjMrk4MIeOcdB5GZWVkc\nRHiPyMysTA4iYMcOOOaYsnthZtYxOYiALVugd++ye2Fm1jE5iIDNm6FPn7J7YWbWMTmIcBCZmZXJ\nQQS8+aaDyMysLA4iYNMm6Nfk81vNzKw1+aanUnTrFrz5Jhx3XNm9MTM7PPimp1V23HEOITOzsjiI\ngAsvLLsHZmYdl4MI+Mxnyu6BmVnH5SACTj+97B6YmXVcDiJgwICye2Bm1nE5iID+/cvugZlZx+Ug\nAk44oewemJl1XA4ioEuXsntgZtZxOYjMzKxUDiIzMyuVg8jMzErlIDIzs1I5iMzMrFQOIjMzK5WD\nyMzMSlVREEm6XNJqSXsljSyUf0zS25KW5r+fFupGSlopaa2kWYXyrpLmSqqT9JSkQYW6KXn8NZIm\nF8oHS1qY6+6S1KVQd3Nua7mkEZXMp5mZtZ5K94hWAZcBTzRS92JEjMx/1xTKbwWmRsQwYJikCbl8\nKrAlIk4DZgE3AkjqBUwDRgNjgOmSeuZpbgBuym3V5zaQNBE4Jbd1NXBbhfNpZmatpKIgiog1EVEH\nNPaUvg+VSToJ6B4Ri3PRHGBSHr4UmJ2H7wbG5eEJwPyI2BYR9cB84OJcNw64Jw/PbtDWnNzHRUBP\nSX4YuJlZO9Sa54gG58NyCyTtf/Rcf2B9YZz1uWx/3TqAiNgLbJPUu1iebQD6S+oDbI2IfQdqqzhN\ndWbLzMyqqdm7rEl6FCjuTQgI4PqIeLCJyTYCgyJiaz53dJ+k4S3s28E8C70qz0s3M7PyNBtEETG+\npY1GxB5gax5eKuklYBhpz2RgYdQBuYxC3UZJnYEeEbFF0gagpsE0CyJis6SekjrlvaLG2mrsfT5k\nxowZ7w3X1NRQU1PT1KhmZh1SbW0ttbW1rdK2IqLyRqQFwLci4pn8ui/pwoN9koaSLmY4JyLqJS0E\n/hxYDPwCuDkiHpZ0DXB2RFwj6UpgUkRcmS9WWAKMJB1KXAKMym39DLg3In4m6VZgRUTcJukS4NqI\n+H1JY4FZETG2ib5HNZaBmVlHIomIqMpRqYqCSNIk4BagL+mqteURMVHSF4C/BnYD+4BpETEvTzMK\nuAPoBsyLiK/l8qOBO4GPA5uBKyPi5Vx3FXA96ZDg9yJiTi4fAswFegHLgC/lvTEk/Zh0UcMO4MsR\nsbSJeXAQmZm1ULsJoiOBg8jMrOWqGUS+s4KZmZXKQWRmZqVyEJmZWakcRGZmVioHkZmZlcpBZGZm\npXIQmZlZqRxEZmZWKgeRmZmVykFkZmalchCZmVmpHERmZlYqB5GZmZXKQWRmZqVyEJmZWakcRGZm\nVioHkZmZlcpBZGZmpXIQmZlZqRxEZmZWKgeRmZmVykFkZmalchCZmVmpHERmZlYqB5GZmZXKQWRm\nZqVyEJmZWakcRGZmVqqKgkjS5ZJWS9oraWSDunMl/SrXr5DUNZePlLRS0lpJswrjd5U0V1KdpKck\nDSrUTcnjr5E0uVA+WNLCXHeXpC6FuptzW8sljahkPs3MrPVUuke0CrgMeKJYKKkzcCfw3yPibKAG\n2JOrbwWmRsQwYJikCbl8KrAlIk4DZgE35rZ6AdOA0cAYYLqknnmaG4Cbclv1uQ0kTQROyW1dDdxW\n4XzaQaqtrS27C0cUL8/q8bJsvyoKoohYExF1gBpUXQSsiIjVebytERGSTgK6R8TiPN4cYFIevhSY\nnYfvBsbl4QnA/IjYFhH1wHzg4lw3DrgnD89u0Nac/N6LgJ6S+lUyr3Zw/GWvLi/P6vGybL9a6xzR\nMABJD0taIunbubw/sL4w3vpctr9uHUBE7AW2SepdLM82AP0l9QG2RsS+A7VVnKYaM2ZmZtXVpbkR\nJD0KFPcmBARwfUQ8eIB2LwA+AewE/lXSEuB3Lehbw72sQx3HzMzasWaDKCLGH0K764F/i4itAJLm\nASOBfwQGFsYbQNpbIf87ENiYzzH1iIgtkjaQzjEVp1kQEZsl9ZTUKe8VNdZWY+/zIZLzrJpmzpxZ\ndheOKF6e1eNl2T41G0QtUNyaPwJ8W1I34F3gs6SLCjZJ2ibpfGAxMBm4OU/zADAFWARcATxeaOv7\n+QKFTsB44LpctyCP+7M87f2Ftq4FfiZpLFAfEa811umIcAqZmZVIEXHoE0uTgFuAvqSr1pZHxMRc\n91+B/wnsA34REd/J5aOAO4BuwLyI+FouP5p0pd3Hgc3AlRHxcq67CriedEjwexExJ5cPAeYCvYBl\nwJciYk+u+zHpooYdwJcjYukhz6iZmbWaioLIzMysUh36zgqSLpb0Qv5B7F+W3Z/DgaSX8w+Ul0l6\nOpf1kjS9xRc+AAAC5ElEQVQ//+D4kcLvvJD0nfzD4uclXVRez9sHSbdLek3SykJZi5dfUz8M72ia\nWJ7TJa2XtDT/XVyo8/JsgqQBkh6X9KykVZL+PJe3/voZER3yjxTCLwIfA44ClgNnlN2v9v4H/Bro\n1aDsBuAv8vBfAv87Dw8nHTLtAgzOy1tlz0PJy+9CYASwspLlRzqXOjoPzwMmlD1v7Wh5Tge+2ci4\nZ3p5HnBZngSMyMPHA2uAM9pi/ezIe0TnA3UR8dtI55Xmkn4IawcmPrwnXfwxcvGHxf8ZmBsR70Y6\n31dHWu4dVkT8O7C1QXGLll8zPwzvUJpYntD4TzsuxcuzSRGxKSKW5+HtwPOkK45bff3syEHU8Eev\nxR/EWtMCeFTSYklfyWX9Il+VGBGbgBNzuX9YfHBObOHyO9APwy35ar7P5P8rHEry8jxIkgaT9jQX\n0vLvd4uXZ0cOIjs0F0TESOAS4FpJnyaFU5GvgKmMl19lfgoMjYgRwCbgppL7c1iRdDzpNmtfy3tG\nrf797shBtAEYVHh9wB+9WhIRr+Z/3wDuIx1qe23/vfzybvnrefQW/bC4A2vp8vNyPYCIeCPyyQng\n//L+4WAvz2bkJxjcDdwZEft/l9nq62dHDqLFwKmSPqb0iIorST+EtSZIOjb/bwlJx5FubruKtNyu\nyqM1/GHxlUqP+BgCnAo83aadbp/EB89htGj55cMj2ySdr3RbkMmFaTqiDyzPvLHc7wvA6jzs5dm8\nfwCei4gfFcpaf/0s+0qNkq8SuZh0ZUgdcF3Z/Wnvf8AQ0tWFy0gBdF0u7w08lpflfOCEwjTfIV1N\n8zxwUdnzUPYf8E/ARmAX8ArwZdIPslu0/IBR+TOoA35U9ny1s+U5B1iZ19X7SOc4vDybX5YXAHsL\n3/GleRvZ4u93S5enf9BqZmal6siH5szMrB1wEJmZWakcRGZmVioHkZmZlcpBZGZmpXIQmZlZqRxE\nZmZWKgeRmZmV6v8DGF+ZHU5dqMQAAAAASUVORK5CYII=\n", 349 | "text/plain": [ 350 | "" 351 | ] 352 | }, 353 | "metadata": {}, 354 | "output_type": "display_data" 355 | }, 356 | { 357 | "data": { 358 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYwAAAEACAYAAACgS0HpAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXu4HVdZ/7/vSc7JOUlOktO0zb2l95vSNtUW6AMcUUr5\nqVB5BCkKBarVBxSk/tQWwTb4QxQeLEq5WMFysVIRUQpiKRVCC/QSWpK2pKTB3pKQS3NyP/fL+v3x\n7uVeZ/ZaM2tm1sye2ef9PE+eJPvss/fs2TPrO9/v+641pJSCIAiCICTR1e4NEARBEOqBCIYgCILg\nhQiGIAiC4IUIhiAIguCFCIYgCILghQiGIAiC4EUQwSCiTxPRXiJ6xHhsgIjuIqJtRPQNIlpq/Ox6\nItpORI8T0WXG4+uJ6BEieoKIPhJi2wRBEIQwhHIYtwJ4ReSx6wDcrZQ6C8C3AFwPAER0LoDXATgH\nwCsBfJyIqPE7nwBwtVLqTABnElH0NQVBEIQ2EUQwlFLfBXAw8vCrAXy28e/PArii8e9XAbhdKTWl\nlHoawHYAFxPRSgD9SqlNjed9zvgdQRAEoc0UWcM4USm1FwCUUnsAnNh4fA2AHcbzdjUeWwNgp/H4\nzsZjgiAIQgUos+gta5AIgiDUmPkFvvZeIlqhlNrbiJv2NR7fBWCd8by1jcdcj7dARCI+giAIGVBK\nUfKz7IR0GNT4o7kDwJsb/74KwFeMx19PRD1EdAqA0wE82IitDhPRxY0i+JuM32lBKSV/lMINN9zQ\n9m2oyh/ZF7IvZF/E/8lLEIdBRP8MYBDAciJ6FsANAP4KwL8S0VsBPAPujIJSaisRfRHAVgCTAN6m\nmp/k7QA+A6AXwNeVUneG2D5BEAQhP0EEQyn1BsePfsnx/A8A+IDl8YcA/GyIbRIEQRDCIjO9a87g\n4GC7N6EyyL5oIvuiieyLcFCIXKtsiEjVcbsFQRDaCRFBVaToLQiCIHQwIhiCIAiCFyIYgiAIghci\nGIIgCIIXIhiCIAiCFyIYgiAIghciGIIgCIIXIhiCIAiCFyIYgiAIghciGIIgCIIXIhiCIAiCFyIY\ngiAIghciGIIgCIIXIhiCIAiCFyIYgjBH+fu/B265pd1bIdSJIHfcEwShfvzP/wDd3e3eCqFOiGAI\nwhxlbKzdWyDUDREMQZijjI4C8+a1eyuEOiE1DEGYo4yOApOT7d4KoU6IYAjCHGV0FJiYaPdWCHVC\nBEMQ5ijiMIS0iGAIwhxlbEwEQ0iHCIYgzFHEYQhpEcEQhDmKCIaQFhEMQZijiGAIaRHBqAh/+ZfA\n0aPt3gphLiFdUkJaRDAqwi23ADt3tnsrhLmEFL2FtIhgVISJCbnaE8pFIikhLSIYFWFiAhgfb/dW\nCHMJEQwhLSIYFUEchlAmk5PA9LQIhpAOEYyKIIIhlMnoKP8tgiGkQQSjAiglkZRQLnppc7lIqR/T\n08Ab3tCe9xbBqABTU03REIQyEIdRX4aHgS98AZiZKf+9RTAqgBYKEQyhLEZHgd5eEYw6MjLCf7fj\nuxPBqABaKPJEUuPjcn9mwZ/RUWDJEhGMOjI8zH+3I8IWwagAIRzGzp3A9deH2R6h8xHBqC/aYbQj\nkRDBKJht25KfE0IwxseBgwfbk2sK9UMEo75ohyGC0YG89KXAT38a/5xQkZRSwOHD2V9DmDuMjbFg\nSN2sfmiHIZFUBzIy0rwicBHKYQDAgQPZX0Mojk2bgL/+63ZvRRNxGPVFHEYHMzra7Hl3IYLR+fzw\nh8D3vtfurWgyOgr093NPv1Lt3hohDVLD6FCmpviP7nl3ESqSAkQwqsrQUPKFQ5mMjgJ9fcD8+eIy\nQvGRj5Sz4rR0SXUo+gv1FQxxGJ1LVQWju1sEIxSf+hTwnvcU/z7iMDoUPUAkCYYe7EUwOpehoWot\n/TI21j7BeOYZ4N57y33PMhgbA26/HXjssWLfR2oYHYoWCnEYQpUdRtkDz223ATffXO57lsHoKPD2\ntwN/9mfFvo90SXUoeoDwLXpLDaNzqbJglO0wtmwBDh0q9z2T2LiRO9nyMDoKXHstsHlzsQ0OHe0w\niOhpItpCRD8kogcbjw0Q0V1EtI2IvkFES43nX09E24nocSK6rOjtKxLfSCqUw+jtFcGoKlWLpPRa\nUj09IhgAcMcdwFe/mu81xsaAZcuADRuA664rrvus02sYMwAGlVIXKqUubjx2HYC7lVJnAfgWgOsB\ngIjOBfA6AOcAeCWAjxMRlbCNhZBGMIjyC8aqVSIYVWX/fnEYAF8dP/FEsYLx8z8PHD2a7ncmJljU\ns6JUsy70xjfyqgv/+Z/ZXy+OTu+SIsv7vBrAZxv//iyAKxr/fhWA25VSU0qppwFsB3AxakqaGsai\nRfkjKRGMajIzwwNIlQSjXUXvxx4DVq4sTjCmpoAf/ADYsyfd701OsqhnZXyc92VXFzBvHnDVVcDd\nd2d/vThGRvi761SHoQB8k4g2EdFvNx5boZTaCwBKqT0ATmw8vgbADuN3dzUeqyVpHEZ/v/0A2LUL\neOUrk99LBKO6HDrEg0mVBKNdRe8tW4DBQd4nRUQ2Bw/y32ndQl6HMTbGEZ9m8eLiHMDwMDAw0B7B\nmF/Ce1yqlNpNRCcAuIuItoFFxCT1oXPjjTf+778HBwcxODiYZxsLIU3Re/Fi+wGwbx/w+ON+77Vq\nVWe2K9adoSFg9Wrgqad4kKxCyNquSGrLFuCSS4Avf7npckKiL5jSuoW8DkPvT01vb/KFYlZGRlgw\nfARp48aN2LhxY7D3LlwwlFK7G38/R0T/AY6Y9hLRCqXUXiJaCWBf4+m7AKwzfn1t47EWTMGoKmkd\nhu0AGBvzO/BMh1GVQUlghoaAE07gWcATE8CCBflfc/Nm4H3v44E3C7roXbZgbN4MvPa1XBw+dKg4\nwcjiMPIKhukwenuLc5RpHEb0YnrDhg253rvQSIqIFhLR4sa/FwG4DMCjAO4A8ObG064C8JXGv+8A\n8Hoi6iGiUwCcDuDBIrexSHxrGOPj7khqfNxfMPr7eTA6diz9tgrFMTQELF8edhB5+mlgx47EpznR\nV8RldknNzACPPgqcfz4LRhErK2uhyOIwhoayx2RRt9TXV7xgtKPoXbTDWAHg34lINd7rNqXUXUT0\nAwBfJKK3AngG3BkFpdRWIvoigK0AJgG8Tan6Lo02NsZr9fg4jMWL7Z0daRzGggXAccfxVVZ/f7Zt\nFsKjBWPBgnAn+YEDzfbKLLSj6P3UUzzQDQw0HUZo8jiMsTHep4sWpX/fdkRSHVfDUEo9BeACy+MH\nAPyS43c+AOADRW5XWei+bF/BcEVSehHD+THfVlQwTj4537YL4di/P7zDOHAg34DUjhrGli3sLoBi\nBaO3N5vDAFhosghGtOhdlUgqNDLTu0DGxviL9Sl6x0VSgF+sZQqGUB2KiKRCCkZZA8/mzcULxtAQ\ncPrp2RwGkL2OEXUYRUZSaYreoRHBKJCxMR7AfR2G7cRNs4ChCEY1KUIwDh7MF0m1o+i9ZQtwQSNv\nWLq0OIdx5pnZHEZfXzjBKMphzMzwey1dKg4jN1/7Wr5Oh9CMjvKVQJ5IShxG/RkaAo4/PnwNI4TD\nKLPoXVYkdeaZ2RzGqlXZ52LYIqkiahj6fTp54l5pfOhD+RcQc6FU+hNLR1JlO4w8E5CqzIEDwMc+\nFu71Dh3iFs+iKSqSmpzk2lYWyq5hHDzI++HUU/n/RUZSWR3G6tXVdxjDw8DChSz0Eknl5Nix4nbi\n3XcDV1yR/DyTNEVvVw1DIqkmTzwBfPSj4V5vzx5epbRoihIMINtVrBaZ7u7yBOORR4DnP5+XzgCK\ndRhnnNGcj+RLXodRVg1Dd3H19IjDyM3wcHGC8eyzvGRxmoMwTQ1Dit7JTEw0l34IwfBwca2PJrpL\nKmQkpfdDlu03J5mVJRhmHAUUKxirVvG+PnLE//cmJ/n3sjqMsrqktMNYsEAEIzfHjhW3E/fv54lG\n//M//r+jaxhJB874OF81TEy0CpI4jCZaMELNzNGCUeRMH6WKcxj9/dkK3+bVcFldUmUJxtAQnwPH\nH9/qFiYnga1b7b+nHUbISKqIixHTYUgklZMiHYY+kNLUSNLUMFwdK2kdxvLlnS0Yk5P5uoNMhoe5\n66TIK+yREV6mZeHCcIIxNcUXR6tWZXcYpmCU4TCefRZ43vOa/y9ipvfkJH+nS5fyeRAd/P/7v4Fr\nrnH/bshIqrsbmJ7OXmNyYdYwxGHkpMgaxv79nI3+4Af+v5NGMHp67AeBOIwm+rsNFUvp+woUGUtp\ndwGwYIQ4Pg8d4kFx0aL8glFWl5S+KNIU4TAOHeLX7eqyO4xnn3Xv/7wOIxpJEfE+Dj0eaYchkVRO\nJiZYzYsSjOeeAy6/PL1g6H7pmRn380zBiG6/CEYTfYKEFoxQjsWGbqkF+PsJ4TAOHuQLkb6+bNtu\nrntUlsOYmOD30hQhGDqOAuwOY8cO9yAbuksKKCaWki6pQOgF94p0GK94BfDDH7LV9GF0tFmgihso\ntGDYrhr0rVfTCkZ9V+ByU5RglOkwQgjGgQP8PS9cWJ+itz7GNUUIxoEDzX1tcxhxghGiS8p0GEAx\nhW/pkgpE0bct3L+f+7tPPJHbO30wJ9n4CIYrkvKJtbRg9PWxHS6j+6dsRDAYLRh9ffWpYUQFo7eX\nL7xCDqh6vwDpHcbEBKcBSuV3bZoiWmulSyoQZTiM448Hfu7n/AvfpmDEndhxkdT4uN9cDi0YQOfG\nUqEFQw8MSftWKf+LhCi6pRYI11ZrCkZduqSigkEUvvBtRlJpHMb0NH/H8+fzd5XFZbgiqSIdhkRS\nOSjSYUxMNLsvfv7n/esYaQXDdtXg4zB0J4ZezbZTBaNdRe/77wcuznhn+SIchq5h5Imk2lH0NgUD\nCB9LmZFU1GEoxYJhGx8mJ5vbdvzx2eoY0aI3UHwNQxxGDop0GPqk7+pih5FGMPr6kg8cfRc2VySV\n5DBMdwF0rmC0K5K65RaeBJalLlTFSKpdRe8yBMPlMPbv5wsr2yA7OdksyGcVjLIdhkRSOdEnfxE7\nUcdRAHDhhTwJyae/WhfCkk7s8fH4SCrJYcwlwejvL1cwDh8G/v3fgXnzsg3ORbTVhoik2l30BsIL\nRlyX1I4dwCmn2McHc9tCRlJF1jAkkspJkQ7DFIwlS4CTTgJ+9KPk30tb9I6LpOIGhrIFY2rKnT2/\n5z3Af/1XMe87MQGsXBlWMLq64oXgttuAyy7j78B2R8QkTMEI1VbbCUVvoNhIKuowduwATjutOIfh\niqTEYVQUrbxFCMZzzzUFA/CrY+hZnj096YretrbaqkVSd9zhnjH70EPAAw8U874TE8CKFWEFY/ly\n975ViuOoa65hZ5PlXunmPIyQNYy8bbVVEYyQRW9bl5SOEZ99lh3GzExrW7y5bbZiuQ/tmIchgpGD\nY8f4YCnKYZxwQvP/PnUMPX9Cz/jM2iXlU/QuWzAOHQJ277b/bGgo3XpbaRgfDy8Yxx/v3rc/+AHX\nLl72Mvc915MoqoaRZ+JeFbqkgGIjqb4+bgLRseOOHcC6dXyeRAXSdBi2dlwf2tElZVt7rmg6SjCW\nLy8+kgL8BMPMieMEQ6nmLFhXJFU1hzE66j6p9u8HfvKTYt63CIcRJxi33AL8zu9wbJXVYRTdVpu3\n6J23S+qHP0z+TLptdd682Y8XGUkBswf/HTs4SrZdlEUdRqhIqsgaRlcX78/Qa1Ul0TGCMTxcnMOI\nRlIXXMA1jLgrM/MAirOmU1N8JdTV5Y6kkpZIL1swRkbaJxihaxguwTh6FPjSl4C3vIX/73IYP/kJ\n8M1v2l/fXAwP6MyZ3r/7u8DXvx7/HD0gE81+vMguKWB2vKQdhu0cizqMKkdS2mEAybHUzAzwJ3+S\n7ULHRccIRpkOY+FCYO3a+OjFFIy4Kw3z6qYukdTICJ9U0fWxJiZ4O0dHw69Eql9fO4wQVjxOML7y\nFeAlL2GBAlgwbCfe3XcDn/mM/fV1dKRvGhRCMJQKH0nlEYz9+4HNm+OfY4ujgLD39dbivGRJ87Go\nw3AJRgiHUdbSIMPDswUjbrz7wheAe+9tPj8EHSMYRTqMaA0D4JVrt293/45p++OiA/NgjUZSSlWz\n6D0ywmIRPdl1hnzaacXUMcbHeUBI6mzyJU4wfvpT/o41/f12h3HkiHtQMOsXQJi22uHhZkddFbqk\nhoaSBcOcGGcS0mHoyYxdxoimHcb0NNfc1qxJdhhZit66wcU8B4FiIykgvlNqfBx473uBv/qrVmeX\nh44RjDIdBpAsGL41DD1pD2g9mCcnOadMWsa6HYIBtF6J6Y6g008vJpbS4jowECaWihOM4WF2FRqX\nwzhyxP3dRAUjRFutGbu0u0tqcpI/f1aHEVIwonEU0IyXdu/m79nViRidh5HWYeg0ITowF1n0BuIj\nqb//e+Ccc4CXvjTs+3eMYGiHUUTHR7SGAfBChHHrC0UjKdeJrSftAa0HgO/SIlURDC2sdRCM6Wl+\nPVfcZ1p/ILvDMI+bEAOIOTC2u0vq4EEeYA8ejD/eyhAMs0NKo+MlHUcByQ5j0SI+NtIIsS2OAopt\nqwXckdSRI8D73w984ANh3xvoIMEoqq1WqWwOw7foHY2kzO3XrblpBWPxYn6sqJmgcYKxfDlHUlUX\nDH3iuQbdqGCEcBguwfjWt4CnnvLb7qhgZO2S0sdmni4pLYjnnx/vMspyGOa+BpoOwxQMW4xjbh9R\n+ljKtlItEN5h6JV0kyKpD3+Yb8Xw/OeHe29NxwhGUTWM4WGOhfSXpPFxGGlrGDaH4ZNVRwWDiPdF\nqG6iKCMjPGjHRVJF1DB0fBdKMBYtcu/bEA7DbKkF3G21n/wkcNddftuts3qg/ZGUFsQLL6yGYPg6\njOh3YDoMIH0sZeuQAsLXMCYmuEajt9X2WfbuBW6+GXjf+8K9r0nHCEZRNQybuwD4ABwackcC0RqG\nb5eUKRjaYfT2xt+1LyoYQLGx1MgIf/6yIykd35UhGMeOlecwjh3z/zxFRFJ5BeOCC7IJxsKF/N4h\nYmRbJKUdxrPP8hwMILmGAcR3Sm3eDDz88OzH4iKpkIJh1i8A+2f5zneAF7949v3TQ9IxglGUw7DV\nLwBW+rjoxbeGERdJaYdBFF8wtQnGwECxgnHSSXaHsXw5txwfONCcZRuK0JFUksMwi96uiXtpuqT0\ndxhtCT52zP9KO0QkVRXBCHlPDFsklaWGoX/PFUl96UvAv/zL7MfiIqmQNQyzfgHYI6mREd6nRdEx\nglGkw4i21GrOOMMdS2URDFfRO+k1bIKxdCkPZkXgEgztMLq6+ArnySfDvq8pGHnFUAuGK9ax1TBc\nkZTrezHjI4AnaNpm5x49mk0wqhBJHXcccO65fOHk46KjhIql4rqkkgQjun1xkdTYWKvAuSKpvA7j\nnntmXxjZHEZ0vDNrHEXQMYKhZ9TaFhfLgyuSAuIL3+ZVh2/R2xZJaSFIKxhLltiv3I4dA17wgnwn\n6ehovGAAxdQxynYY0RpGWocRPcEBu1NME0mZIqQHpLSTGKNF76yRkHYYvb38fbtWcC5DMEJ1Senf\nczmM8fHW88q2LAiQv4Zx443AN77R/H/UYdg+iwiGB1NT/KX39oZbr0cTJxhxhe+sDiMaSYV2GPv2\n8Wqy73+//bV8SIqkgGLqGCGL3now9xWMLA7Dd0G6rJGUXk4m7aAUOpIC4gvfZTmMaCS1cCFfQB48\n2JyxX7bDyBNJHTnC9RdN9ALEFUmJYCSg82ad9YcUjOeei4+kXA4jTdFbD/bRA0AXvfVrpHUYNsE4\ncoRrDLfemt0BJEVSQDGC4Vv0fvJJ4PLL418rhMPQ/fpTU3ZXazt5bbO9sxa9gWyxVBGCEVfHiBOM\nUMuD2CIp3SK7enVzBrhtfLA5DJdgjI+3nldFRVJHjrA70tgchkRSGTA7WkLfWKRoh5E0cS9PJGUT\njMOH+b4A73oXcN119tdLwsdhFDEXwzeSuvde4Pvfj38tny6p6EzvqMM4epSFxDUw+DiMmZnsDgNI\n3yk1NcXvqQfIdgtGkZEUwNun4yjAv0vKFUnZHEZRkdThw7MFQxxGIMyOltC3LowTjJUr3QvtZZ2H\nESqSctUwDh/mq7prr+Vo6rvftb+mi5kZ3q5Vq1iQdAE3ujJrO2sYDzzAg3nc/SviBEMpP4dx5Ajv\nZ9fAYBOMaA1DD/a+g2a0kJ62U0pvk17GIpRgnH8+37rY1vrdrkgK4HM3STCiDuO449LVMIp0GGYk\nJTWMQEQdRlmCQcQDoy2WyjrTO1TR21XD0ILR18dLB1x7rXt+hw39uebPn92tpK/wtPU/+WRewC/k\nd+ErGA8+yN/NT3/qfo4WhO5u/vxm51J0ghTAn3lycvbzDh9mwXB9vyMjyQ7j2DEeNH1X4LU5jDSC\nEb0a1veoyNIoYgrGccfx92KbsV60YExO8r42V6rVLF/enIMB+DmMuLXbXDWM0EuDTEzwe8U5DNvF\ncVRUQtMRgmE6jDJrGIA7lkqz+GARbbVxNQztAq68kg+u/n7g1FO5e+rTn7a/h8YcBM2sNzrnoLub\nr+yefjr+9XzRN5oyBcM2wI6NAY8/zje58hEM2x0Ro+4C4OdFJ+/5OIykGoZuB+/uTo6W9CDS3998\nbOHCdJGU7Wo4S6eUUq3f+QUX8A2VbNtdpGDo5d5tq7Kee+7sJTJ8HEacMxgfZ+dqXmQVsTTI0aO8\nb44ebX6/vvMwRDASaJfDANyF7xD3wyii6K2vigG+iv72t4E9e/gmQFdcAdx5p/09NOYBaQqGbT+F\nrGNMTfHVcFcX7wvXEuebNwNnn83v7SMYgJ9gAK11DC0YritJn0jq6FF+XZ+BU8dR5sCYNZIyyRJL\nHTvGv2deWbs6pZIEI+/EPVccBXBr6pVXNv/v4zDiBnrdxmxeOBSxNMjhw7xv1q4Fdu7kx3xmeotg\neFCUw5ie5pPUVkzTuBxGdB7G+Lj9ijgukkpT9I5a4qQahoaIr1hPOw246KLkQcslGNGVWYGwdYzo\nSe2KpR54ALj4Yr73wa5d7teLE4xowVtjcxg63osODNPTPAhHhdwWSfX3N2OpKA891LyatR2L7RKM\nqLsAuBtp377W55bhMOLOUZMQDgOYfW65Iqn581vjTl/0xci6dc1YSrqkAlGUwzh0iL+0+fPdz/Fx\nGHFLe5Q90zsqGCauQcskzmFEB5A0rbVJdRRfwXjwQRaM1avDO4zoAoRxDkMPIrZ7JEQjqcWL+fNE\nB06lgEsvBT7/ef6/jl5MQkRSoQTDNdAWLRj33gucdZbfc23jQ1qH0dU1WzBckRRR9lhKX4ysW9cs\nfEuXVCCKchhJ9QuAHcb27a3uIXrV4YotojdQikZSPg7DdCIanxpGFNugFSVNJOVqCIjyne9w/SSO\nNIJxySXpHYY56MZFUr41DFv9ArA7DB1JRT/PkSPsVK67jgco25V03qI3EFYwbOdekYJx7Bhw003A\n//2/fs/3cRiuNb8AfvyEE2afW65ICsguGDo6PumkeIchgpGBohxGUv0C4JOGqHVOQvTEdJ3YSZFU\nkTWMKD4OwxwIkyKppHuGaO65B9i0CXjsMfdzTGEF7IJx4ABHImedVZzDcAmGzWHYBhFXDcMm1vv2\ncbfZr/wKZ/EhBKNIh+HjoqPkFYxPfpLvKnfeeX7P96lhzJtnX/ML4HPtxBP9Iikgex3DFknJWlKB\niDqMUBP3fAQDsA+MUZvqOnDiJu7lKXovWsTvFx0I4iKppUv553HtnWkiqVNP5YM9aTC6/34uVP/T\nP7mfY+4nwC4YDz7IdZh58/LVMLIUvaPfra2lFoivYdgE48QTgb/8S+C221hYo4LRri6pUJHU0qXZ\nl3kZGeGbBb33vf6/4xIM02EA7s8yNtYqGK5ISr9OltZafWyddFIzkpIuqUAU5TB8IinALRhpHUbI\nmd5EfMBFJ6/FCUZ3N7+PbZE9TZpIqqeHB+64u8kpxYXqm27iQdFVy/CJpHT9AuCJhbt3u8Uvqegd\nwmG4IilXDSP6ebRgnHAC8Od/zi3P0RpG1RxG2khq8WLenr17070/wPetftGLgJ/9Wf/fcUVS0e1z\nCYZ2GEVHUmYNI85hiGBkwOxqCTnT29dhrF7desBHbapvJBVqpjdgj6XiahhAciwVFQw9I9Y2gADJ\nsdSTT/JnvPxyvnq+5x7783wF45JL+N99fXxMuNYESnIYri4pX4eRNpKKcxgA8Hu/x/MJohcwRQnG\nxz/uXn0WCOcwiPhzbdmSvO0mo6PAhz6Uzl0AYRzGihWtDiN0JKWjY1301qsPxHVJzczYOyZD0hGC\nYZ78ZdcwAPvqliEcRp6Z3oBdMOJqGEBy4dsUDPNzu/ZV0q1sH3igOcj/1m+5Y6mkGoZSsx0GEF/H\nME++MmsYaYre+/Y1BWL+fGDjRuCtb539nCyRlE/R++abgcFB+0Q8wL52U5aiN8CC8cgjiZv+v0xN\ncUx38cU8WTANrsUHfRzG1FTz9sfRGkYRDmPJEr64mzePz8mkLin93XYVOKp3hGCYDiN0JOUjGLbV\nLaO5ZlyXVBFttUDrXIyZGb6ijRMMH4fhmult21dJDsMUjDe8Afjyl5NrPUCrYDz9NA98a9Y0H4ur\nY5iiEF3xtegaRjSS6u93F721wwD4OdHXzNIl5eMwhoe5O+vyy7nGFCVU0RvwF4zxceAf/oHrXRs3\nssNISx6Hoc/H6HkVV/TOU8PQSYCOpaLHZfSzFB1HARUUDCK6nIh+TERPENGf+vxOUQ4jesK6sK1u\nmcVhzJ/PV8p6XZ88RW+gdS7GsWN8QOn1g2wkda2YB+XSpfz/kRF+bVvUlUYw1qwB1q8Hvva11ucl\nRVJRdwG4HYZSs6/WinIYadtqo/v9ueeSj78QkVRPT6tgjIwAb3wj8I//CPzqrwL33Tf757bZ1Vki\nKcBfMC68EPi3f+Ol+e+9l4+ttOSpYejzLHpeJRW98zgMoBlL2Yre5lg35wSDiLoA3AzgFQDOA3Al\nEZ2d9Ht5HMattwLvfrf9Z/v2cV6ZRDSSmplpPQhdWWb0ZDIP6DxFb6A1kkqqXwDpIiki/uxPPMG/\nZ7PCep7G55uJAAAgAElEQVSKa7sffZTXfdL81m81J6qZJAnGv/wLRygmLocxOsqvpYWziC6pLG21\nrqJ3HKEm7kUHUb0PfvmXgRtuAD72sdk/D1X0Bni9p23b4gvvk5N8nP3XfwEvfrH7eUn4tNUC8Q5D\ndxNq4iKpvDUMoDkXI6noPecEA8DFALYrpZ5RSk0CuB3Aq5N+KavD2LYNePvb3fdO2LvX32GYgqEH\nep81f6LZfFQwkhyGTZw0UcFIql8A6YreAH/2bdvc0d3JJ/NaVbaTZssWvko0T4LXvIbXt4rWXuIE\n49vf5qz9mmtm/47LYURPPN+lQdI4jDRttT5FbxdFFL31Evb6eZde2lrLSFv0jkY+JgsX8nGybZv7\nOeZN0vLgM3EPsEdJ+ry2CUZcJBXCYehIKm7i3lwUjDUAjAV9sbPxWCxZHMbkJF/NvvWtPKBFUcov\nEgBaIymbRfWJpKLb71P01r9vO5GiWWtcS60mTSQF8Gf/8Y/di7/Nn8+DgW1NKTOOMrfZdj8CV9F7\nehr4wz8EPvjB1n3uchhRB5HFYegbHy1e7HYYvm21SfMw4sgiGElF72jx9LzzuDVaO5mpqeay7NHP\nliWSApJjKVerc1ryOAwdEUfPq6LmYZg1jKee4v1ungPtiKRiVkmqNjfeeOP//vvgwUEsWjQIwH/i\n3l/8BXegbNjA/f9RDh7knW+LeqIMDPABND3NMYetzc514ESLuWkdhiuOAlqzVh/BGBiIX5I8rcMA\nmnWM6GzcBx4AXvay1ufbbjvqKnp/6lP8mX7911tfx+UwsgqG6TD01d68eflmemvRWbqUxUgfQ3rh\nS5cQa9JGUj5Fb1v0cfbZPKC/4AVcv1i2rDWC1AOYUrMvYHwFY8sWbnyw4fpO0pLGYdgiqWgNQ6li\nlwYBOJLato2/a3O/+jiMjRs3YuPGjek3wEHVBGMXAON2J1jbeKwFUzA++MF0DuP73wduuYVt9nHH\n8cEYHeR96xcAn+BLlvAJfvzxdsHwdRjmQeBT9I4TjCVLZl/Z+9QwkiKp6JXz8cdzwXn9evfvuFpr\nH3gAuP761sdttx2N7id9gr7nPcBdd9kdVpEOw4wM0tQwos/VNYyuLhakI0dYDIeG+LuIW/jStu1J\n+ERStpvwXHghny8veIF7zo1efn5qavYA7CsYn/iE++euuTFp8Vl8EIh3GGYkNTnJx57re8q7NAjA\nDmPbttn3QQH8BGNwcBCDRnFvw4YN6TfGoGqR1CYApxPRyUTUA+D1AO6I+4Xp6dlXTUkT90ZGgKuu\n4olJq1bxl71iRevEO9/6hcasY9hsv2/R2zygzaK37aobSBaMLA4jbSTl6zBMhoZYlM+2tDTYBkHb\nST0wwPfwuPBC+/ueeCKLn61lNKtgaIdhntBpaxi2SEp/Hi3Wvh16RXRJRR0GwBcEuo7hEgzAf0CO\n0s5IKovD0EvoxMVR+nXSRlLj4xx56jFk7Vq7GMz5Liml1DSA3wdwF4AfAbhdKfV43O/onaTtcZLD\neO97uSvnNa9pPrZyZatgpHEYwOw6Rp4aRshIylbDKKLoPTKSXjAefJC/B1uLr00cbYPOu98NvP/9\n7vedN48H3WiNKkkwfIrePg4jqa02erFj1jHSCEaIiXvmIOpyGA8/zP+OEwzbvvARjJNP5n3rMzM/\nDyFqGLqhZWwsPo5yvU4Seq6Uds29vXwsRD9/O4reVYukoJS6E4Dn6vatB1KcYNx3H/DP/8ytnCYr\nVrQOKmkdhtlaGzKSMoveIyOt+XDoGkaWojcQn7XbBOP++1sL3hpXJBX9nH/wB+731KxezbHUunXN\nx/I6DKWSHYZPDUMPzPpiJ4tguJynC59IyuYwzj8f2LqVn5dWMFxdfCZEvCbUo48Cv/ALrT8PFUmF\ncBhA89yK65ACsq2TZbuwW7eu9eIqmqbMOYeRhejVoEswxsa4I+rv/q71anjlylbByOIw4gTDZ6a3\nuf0zM7MHyXnzOCeNHuztiKTMAUfvyziHsW4dF0r11fn0NDca/Mqv2J9vG4CjRW9f1qxpLXzbBMPn\nfhjz5/PAMjaW7DB82mqjx24ZkZRP0dvmMBYtYhfw+OPFRFIAi5IrlqpSlxTQjKV8Iqm0DsM8tjQn\nndT6+aMNPjahD03tBcPXYWzYwF06r31t689sglFEDSONw7C1y7oGUl/BCFH0djmMOMHo6pp9f++v\nfpWf/8IX2p/vG0n5oB2GSZzD0Iu8uU48Xfj2cRhJbbVm/QIoJ5KyCZmPwwCasVQRkRQQX8eoSpdU\nVDB8Iqm0NQzbebpuXevxJPMwMuDjMMbGgL/929bZqhpb0TtvDSNN0ds2cc831gpdw1i8mN/DdR/i\nLJEUMDuW+pu/Aa691j0Jy6dLyhdba22cYIyPs5Nwdb3oOkbWLikzkkpyGD5L6+uBzmd58pkZngC2\ndu3sx6NFb5vDAJqF76IcRpJghIyk9LL3SvkLhnmu6XMrKZKyvc7TT8ff59sVSdlqGBJJpcTHYRw5\nwgebSwBcDiONYERrGHkm7oUUjLSRVFcXP8cVS2VxGEBziZBNm4BnnpnddBDFt0vKB1trbZxgJA1M\naRxGUiSlW2o1WRwGkX8s9cwzfJxGByNbJNUOh/EzP8N1EttgGiqS6uqafTe9qSm+OLDdez3JYRw5\nkhxJ2S4Ur7yS18JyYYukXv5y7gg00Rc1eu05EQwPbA4jajmjJ2YUVw0jaySVt+g9Pm4XgrSCsWgR\n/1wPBj6CAbgL39PTvL3mZ1u0CHjb21pn/UY54wyei3HTTcA73xk/v8AlGD6TKKOkdRhJA5Ovw/Bp\nq40eu1kEQ2+/Tyz1ox/xuk1Rol1SroHnwgt5cp3t7oqaPILR38/noo4uTUJFUsDsKMe1bT5F76wO\nY/durum5sEVS55/PQhP3WUQwPPBxGNGsOIqtrTatw8hS9FaqdZ2dkJFU9K57PjUMwF341lfN5tUY\nEUd9SWvwn3EG8N3vAnfeCVx9dfxzfWZ6+5LFYcQNTCFqGGYkZR6XWYregH+n1NatbsHwcRjHHcd/\nHn649V4YmjyRFMB1xq1bWx8PFUkBswdZWxwFhC16m9+NUjy2xNUJbQ4j7rPo/S2C4YFPDSPJYUTb\nakdG+ECKE5koy5c3axi+RW99sNqm+4dwGMDsOoZPDQNwF77zHJBnnMGzzq+6Klm02lnDSBIMm8PQ\nNQDz9rKuSEovYa/XYwrlMEIKRtz3fOGFvI98HYaOS+KW1DdZvtx+sRIqkgLCOAwd96adh3H0KP8/\nrhPR9zwFZicqrtpTSGovGNErD9tMb9dELE1/f3MxOaBZ8E6zMmbUYdhqGD5WXQteCIcBNA9spfJH\nUq6rZh9WreJOqXe8I/m5IWsYy5bxYGiuMhsVhd5efv3paX+HYZ7URK0DgyuS0u83Pt56IaOd3ego\nv5bPdwX4R1Jbt7au5wX4OwyguQSMb9E77fdmu0uk3qYqOgyfeRjm6+gkI6TDkEgqBdErD5fDiHML\nRLPrGGnrF8DsBQh9B3vbyRQykgJmF+e6uvwXUwztMIg4mz7llOTnhmyrJWptrY1eienC8dhY8sBk\ncxhA6wDjsyCdzWEcPNhcJdn3gsUnklKK51Ccc07rz2xLg8Q5jL4+/8X20n5vej2tKFVzGFkjKS0Y\ncQ7DNzoGZo93IhgeRE9wVw0j6erEbK1NW78A2HLrEz6EYISMpI4cSXcQuhxGGQckYL9iTvqccZx5\nJg+WGtsVtN63SQOTrYZh/j6QvIKpbq11zcNIe8HiE0k9+yxvr605IY3DuOQS4LLL3O+TVzDiHEYo\nwTDHiBAOI00kJQ6jzfg4jKSiN5DfYQDN1lrfGoat86eISOrw4XS5aJxgxJ0coQgZSQEcozz0UPP/\ncYKRpYYBzB4YJifZzbluGuRyGNrZZRGMpEjKVb8A/LukAJ4b8h//4X6fEJGUeVdDTVGRVIgaRtpI\nau3acDUMEYyURA+k7m6OhcwCZFLRG5gtGFkcBtCsY9hsqnmvAE2ZDsO3fgEUE0mlIWQkBQAXXdRc\nOA/IJxh6Pak4h5EkrK4aRl8fH7s7dqQTDJ9IKkkwfB1GEkU5jKIiqTK6pMzX0Ss0h3IYEkmlJHog\nEbUWvn0chhlJZXUYpmBErzq6uni7kk6momoYaQSjipFUCIehxdq29EUawdCRlHlMmQNDUnOAy2EQ\nsVhv3x4+knIVvAH/pUF8iN4gqoqRVB6HkXdpkL17gbPOShYM33NVHEZKbPWJ6OS9shyGbq21DfZA\nqz0ts0sqzUHomodRpmCEmrgH8MJtU1M8YQpIdhhJRe99+3iQjd7QSW9z0iDiqmEALNbbtoWPpFyT\n9gC/xQd9id7vo5O6pLJM3DPbqAEeW84+O3wkpZQIhhe2kz+ao/oUvUPUMLTDcB1E0YHQNhmtqHkY\naWsY7YykQq5WC/CV+0UXNesYeSOpXbtaxdccYLJGUgDv+yeeCBtJKcUOw9YhBdjXkqpSJKUXhAx1\n7GV1GGYkpbczKZIyO/CA2Q7DjKdNskRS+s5/rrpZKGovGC6HYQpGUlstkL9LCoivYQCtA2EZkVSW\nGka7I6nQNQyAY6mHH25OsIu+lm+XVH8/TwSMntBpHIYrkgLY3T35pN/Cg7b3trFrF+9T19yJNBP3\nkihiHsboKL+u7+S/JMzIOqvDWLy4+R3GOYzoa+3dy8vE6xswRdHblfSa5meZmCjv3Ky9YNiuhmw1\njDIdhiuSiuaZcZFUKIeRpYbR7qJ36BoG0HQY+niJznHQ7+njMJ57rlUw0tQwzEjK5jCmpsJGUnEF\nb8B+x70qOYyQcRQwO7J2bZ/+jkwXYDoMIr542Ls3uXPQPO/1xajrHEvjLgARjNSEdBh79vDJevBg\n8uqrNnxqGO1yGFnmYUQtc7trGCEchmswTNNWC4RzGNHjcmCA/w4ZScUVvIHZDiNvFp636K0n7pnH\nXsgOKcCvhqFvWGY6L9NhAHw+7dnjJxhjY80lh/R8GJuLTxMdA82xTgTDk1A1jIUL+fd+8hM+abPY\nX58aRtldUllqGL29dsucZ2mQNOiM32yNzlP0BniG+fAwxz15BEMfR3EOI28NAwgbScUVvIHZgpF0\nP5Ak8ha9dfRkHnshO6QAvxoG0OqWTIcBsGDs3ZscH+nzXrsL3Q0nDqNkXHdHswmGz0KCK1fyDVyy\n1C+AbDUM18S9ds7DAOwHdGkHZVfrlWqeojfAJ+n69XwfgiTBSOqSApIdRlJb7bFj/P1Hj5Nly/j9\n0+znvJGUWfTO+x3njaSA1uVBQkdSUcFImmCpiTqMJUv4e/SNpMy7eLo6EdMkAeZnEcHwwFUMs0VS\nPgecFows9QugmpFUlhoGYLfMZc30Bvz2VVouuihZMJLij74+FrSkGkZSW+3QEB+T0VrKwED64y/O\nYegOKV+HkfdqPm/RG2id7V10JJXHYQD+kZR5F09XJ6JEUgXiEoJ2OQy9AKGrc8Kn6F2FtaT0Z2mX\nwwBac/lQgvG97+WLpIj4mItzGD6RlBaMKMuWpReMuBrG7t0cL8VFXGbRuwoOI1r4LjqSyuow9PmU\nNpIC4h2GRFIFcfiwfTE1swtiaooHVZ8r4xUr8jkMvQDh8HB2hxHyFq0AH0Tj4xyVhXAYZQmGGbOk\nvaeCi/Xr+SIjj2AAdsFI4zB6e7nTyiYYF10EvPGN8e9v23ZXJPXBDwKXXx7/+6EdRhGCETqSMttq\nfRyGUm7B8HUYpmC4HEbaCzs9XpR1bmYsbVWDQ4fcgqEPCFcbpY2VK/m+x1kdBsB1jKEh+yAeLXq7\nJu7pmd4hHIa+697OnemuXKogGPqz5i14a047jfdBXsHo789Xw1iwgAXcNgiedhrf8jYNrkjqzjuB\nL38Z2Lw5/vdNwQjhMEJEUqZghI6kom21Lodhnq9TU83OKXM79fPiMGsYp5/Ojw0MtN7YC8jmMCSS\n8sRHMHxaajUrV/LfeQRj+fJml1GURYtm38gnKZIK4TAAvmIZH69XJGV+1rwFb01XF9/PIU4wfNZR\nCuEw9u9Pd0fHOGyR1L59wFvfCnzuc+7bqWpCOoy6RVK+DsN2AecbSaVxGGlrGBJJpcBHMHxaajX6\ny8waSQHsMFwH0MqVzfWMgHIiKYAPwHnz0p107XYY5iAYon6huegi+35YuBA4cIDfJyn6SnIYPjUM\nl8PIQjSSUgp4y1uAN78ZGBxM/v2QXVKhit5Rh9HuLinbBVwVahhlF707MpIyM0rfgjcQxmHECcaa\nNcCmTc3/uxxGyLZagA/AJUvS3XJ2YGC2uAHtq2GEFIw/+qPZs5rN99u/309Ur7+ehcck7Wq1oQXD\nPC4++UmukWzY4Pf7dXAYodwY0OowXOerj8NYsICdaxxF1jDKdhgdKRjRSMr3xNSCkddhuK4u16yZ\nfavQiYnWbQvdVgs0BSMNy5bNvksd0N4aRijBWL3a/X6+gmG741yW1WpDCYbpxnbvBv78z4F77vFf\niM7sksq7yF9RRW99boYg6jBcYpTkMJYs8VvzKe08jKyRVBqhycqciKR8r05OPJG/gDyCoWsYNtau\n5eKzxjVxTwtG9Gc9PVx8011DgH8NI+3BVLVIKkTRO440gmEj7UxvINxVs+nG/viPgauvdq9Ma2P+\nfD6m9LIgeR1GHSKptF1SLofh033Z29tst9f1pLrOw6i9w1i7tvXxrA6jp4dvXpNnclpcJLV6NV8B\nzsywjU2KpKKvo5dKHh3lz6SUv8NIKxi2ondZS4MAswfBUEXvpPcbGgKe97zsv59mLSkg3CCoB+lv\nf5snJm7dmu73iZrrJoVoq9X3ZyCqZtHbt0vKp4bh4zD6+njJoRNOaMZXMtO7DRw8GLboDfDNdvIQ\nJxi9vXxVuX8//z/tTG9g9pX31FTzZI8ji2BEHcbUVPzVWGiKiqTi3m9mJozD8GmrBcIJRlcXv/81\n1wA33ZTtM+jCd96BR9+TQZ9/WZcGMWd6V7VL6uyzgb/+6+T36+1tbdfXy4qYaQEgE/cKJS6S0gdE\nmrbaEKxfD/zGb7h/btYxXIIxOWk/QIHWdlOfqCZrDcMUDD0Ipimc56GoLikX2hFkHZja6TD0+59+\nOvBrv5bt93XhO8TgbMZSVY2kQjiMBQuA170u+f1sgtHV1bpmFlD9SKpjBSOrw8jL2rXAO9/p/rkp\nGLaoRTuGo0eTZ4v7CsbKlcCqVX7br4lGUmXWL4D2OAygnjUMAHj1q4GPfjS7oGvBCPE9m4XvKkZS\noRyGL319PEkv2n0ZrWMoVX2HUfsaho9g5ClihybJYQD82PCw/QBdtoxbJs86y18wrr569lLhPixb\nxr+zfz/HbO0QDH0HRN/Pmff9gPo6jFtvzff7ulMqtMNw3W8ijiqtJaW3wxUR+9Dby+dSVDCidYzx\n8eZKzb7ozzI1FXYfueh4h5Gm6F0GPoKxYIH7/rwvfCHw3e/yv30H0q6u9Pc36OoCLrkEuP9+/v9c\ncRhZj5V21jBCENJhmPui6pGUr8OwRVK+6N9Lchi7dqWfAyaRVApCT9wrgzVrmq21cQ7DtbzI4CCw\ncSP/u+gr7xe9CPj+9/nfZQtGnWsYvpFUFQUjxNV8HSIps8biU8PIE0m5BCPqMB5/PF07NCBFb2/G\nxrjDwHZi1slh2A7Cnh73wfniFwP33ddchbdowfje9/jf7XAYRcz0dtHVxe+RdWAy20l9I6kqXciE\n6pIC8he9Fy3i83tqiv9fZFttGQ5DHwvRaDzqMEQwCkQvbW67Cm9n0TsJ3xqG6+BcvpznCjz8cPGC\nccklwEMPhRtI0lDEarU+75l1YCLibRwd5e2NG1zEYcRD1Gyt1e3cWQdrG1m6pMpyGGefne61JZLy\nxBVHAdlXqy2DtWv9ahhxB+dLX8qxVNGCsXQpcOqpvDx2OyOpMibuAfkEA+CB4eDB5PWFqlzDyLs0\nCJDfYQDNWCrN7Ql8qUoNI9qJKA6jQHwFo2oO47jj+CAcGcnmMAAWjO98p5zuIV3H6PRISr9nnmOl\nr49XvE1aKaC7m/9U6bjUXVJ5lwYB8he9gdmCEXo/tcNhEHG3oYkZSSkF/PjH6QVDu7k8gpaGWgvG\nwID9Z2ZGWbWiNxEvEbJrV3bBeMlLuFNqZKQ8wUjK5UNTdpeUfs+8DsNHMIiAxx4rd38mEdJh5I2k\ngOZ9vUPfPAko32EsWcJRdLRT0Yyk9u7lZfXjbqVro6eHX6O3N3nV3BDUWjB8I6kqXckBzTqGK2pJ\niqROOAFYt45bXosWjEsvbY/DKLtLCsgvGNph+OynM8/M/j5FYLbVtnumN9AaSYWkbIexciXf+jmK\n6TCyxFFA87OUdW52tGAoVb1ICmgKRlaHAXAsddddxQvGqafydm7b1vlF79/8TeDcc7P/vq/DqCI9\nPSwW09P5xTmEw9DLZhQVSWVZrTZP5GNLQ0yHkaXgDTTPCxGMBFwLDwJNwdAzJ8taMM8XPRcjTjCS\nBsjBQe6UKnogJeJY6u6721fDKKvo/a53ue+X4UNfHx+XdRSM7m7uPAxRYA7pMIqIpKKr1fpGUqHP\ntRAOQ8dcIhgJ+DiMKroLINlhLFiQfDXzkpewgyrjyvtFL+LlmTt5pncItMMocz+Forubz6kQ2x66\n6B1aMObP53bdmZn4pUtCOgwbpsPIUvAGWNx7ekQwEokTDG05q9ZSqzEFI+3EPc2KFXyAlSUYQOfX\nMPLi2yVVRbRghBicQxW9tcMIfdGnB9nJyXQOI7RgaIehVHaHAfD+FsFIoJMdhk8NA+A6RhmCcdFF\nPKCUKRjd3XwiJZ3UVaLONQwzkspL1YveQLNYnMZhhD7XdGfTvn0sHFnvxdMRDoOIbiCinUT0cOPP\n5cbPriei7UT0OBFdZjy+nogeIaIniOgjca/vKxhVdRg7d7Itti0K6BNJAcCf/AnfNKdoentZNMqO\nWnQsVVbROy91rmHo9swQ33FIh1G0YLTTYQA8ht1/P3fNZW2LLVMwil7e/G+UUn9jPkBE5wB4HYBz\nAKwFcDcRnaGUUgA+AeBqpdQmIvo6Eb1CKfUN2wsnRVKTk3zAVdFhrF4N7NnDVza2AmNPj1/h8ZRT\nwm+bi1tvtd8Ot0h0LFVW0Tsv2mGU+b2Eorubb1EbymGMjbFDjGtbjaPISApoxtZxDsPstizCYQBc\nx7jvvuxxFNBZkZRt2Hs1gNuVUlNKqacBbAdwMRGtBNCvlNrUeN7nAFzheuE4wdAZ5YED1XQYPT28\nJpRrEPSNpMrk7LPLF1/dKVWXSKoTahihit7j49yi29XFE9LSUrTD0Ot+Ae7t6+pqzoAv0mHkFYyO\niKQa/D4RbSaiTxGRvqv0GgA7jOfsajy2BsBO4/GdjcesxAkGwAfE0FA1HQbAsZRrEEyauDdXMCOp\nOghG3WsYoYveeb43PdO7yEjq2LFk96PdUpEOY9Om+ghGrkiKiL4JwFxSiwAoAH8G4OMA3qeUUkT0\n/wB8GMBv53k/k/37b8TNN/MXPjg4iMHBwVk/X7CA7xZXRYcBsGDs3m3/2bnnZrsq6zR0JFUXwejr\n46tiaavlK/K8glF0JDU8nLx9WjCKdBijo9km7WniIqmNGzdio76BTgByCYZS6uWeT/0HAF9t/HsX\ngHXGz9Y2HnM9bmX+/Bvx/ve737AODsO2XAAAvOlN5W5LVdEOo4xFFkOgBxRxGGEcRtFF76o4jK4u\n4Iwzsr9GnMOIXkxv2LAh+xuh2C6plcZ/XwPgsca/7wDweiLqIaJTAJwO4EGl1B4Ah4noYiIiAG8C\n8BXX68fFUUDTYVRZMOpw1dxO6ljDMP+uEz093FYbcuJenu/NXBqkKMFI4zCKmLgH8Dh22mn5xKg2\nkVQCHySiCwDMAHgawO8CgFJqKxF9EcBWAJMA3tbokAKAtwP4DIBeAF9XSt3pevEkwejpYYdR5Uiq\nDlfN7aSONQygnoKhFx8MOQ8jhGAUtXhoWodRlMsdGMhXvwDK7ZIqTDCUUs5gRSn1AQAfsDz+EICf\n9Xl9cRidTx1rGEB9axhA2HkYeb637m5+neeeKzaSarfDuPxy4PnPz/caneIwCsVHMPbsqa7DuOQS\n4B3vaPdWVJu6RVJ1dxhAdRwGwHWM3buLa6v1dRhF1tHOO4//5GFgoPXmTEXR0YJR5aL3smXAm9/c\n7q2oNnUrete5hlE1hwGwYOzd2/4uqaNHuWuxqp2Ln/60fcWIIuhowRgerq7DEJKpWyQlDoMJUfQG\nWDBCbVOUNDWMw4erN5HWJMtM+qzUdvFBH8EAquswhGTqFknVuYah929V5mEAxQuGr8M4dKgeDrcM\nRDCEyiJdUuUR0mGEjKQWLCgmbukkh1EmHS8YEknVFzOSqsMVntQwmJAOowh3AaTrkhKH0aTjBUMc\nRn0xi951chh1jKSq6jCKFgwfh3HokDgMTccKhj5QizrghOLp6+OTemamuh0qJp3gMEKcL93dvFLt\n6Gh+wSjqgk83xfg4jMOHxWFoaisYAwPxP1+wgA/+rDclEdpPXx+frL73B2k3da5hhCx6EzXbUcVh\ndBa1HU59IimpX9SbhQubglEHFi7k467MNsdQhHQYAO+HI0fyfXf9/cUKhjiM9HS0YEj9ot709dWr\n4NjbC2zdWg83FEULRqgr6VAOo6hzWBxGNmo7cW/p0vifi2DUHzOSqgunntruLchGdzc7pFAR7oIF\n+QXj0kuLG6jTOgwRDKa2gpH0BUokVX8WLuSru+XL270lnY8WjFD09nIklWeNo1NPLU6AtWD4Ooy6\nuNyiqW0klYQ4jPrT15f/KlXwo7s7bL0gRCRVJFoAfBxGUXfbqyMdLRjiMOqN7jaq6qDTSZx0EnDN\nNeFeL0TRu0j0dvk4DEAchqa2kVQSp53GVlKoLzoikZO1eJYsAd797nCvpyOpqguGj8Mw/57rdKxg\nvPzl/EeoL+Iw6ou+vUBVvztxGNno2EhKqD/6ZK3qoCO40Q6jqnNSxGFkQwRDqCxdXXxlJ4JRP6pe\n9CPdakUAAAciSURBVBaHkQ0RDKHSLFxY3UFHcFOXorc4jHSIYAiVpq9Pru7qSG8vLxpZVcHQx5Sv\nwxDBYEQwhErT11fdQUdw4zvPoV34OgzdeCEXLYwIhlBpJJKqJ1VvWEhbwxCHwYhgCJVGHEY9qYtg\n+NYwxGEwIhhCpRHBqCd1iaSSHIb+HOIwGBEModLoe0wI9aJTHAYRH39yDDIiGEKlEYdRTzrFYQAs\nfuIwGBEModKIYNSTqjuMNILW2ysOQyOCIVQaEYx6UnXBEIeRDREModJIW209qUsk5bN9P/MzwAkn\nFLs9daFjV6sVOoPf+72wd4ITyqHqDmPePF6rzMdhfO1rxW9PXRDBECrNOee0ewuELFTdYQC8bVXe\nvioikZQgCMGpusMAeNuquvx6VRHBEAQhOHUQDFk6Pz0iGIIgBKcOkdT69cDAQLu3ol5IDUMQhOBo\nh1HlyOfOO9u9BfVDHIYgCMFZsACYP587kYTOQb5OQRCC09tb7ThKyIYIhiAIwenvB1atavdWCKEh\npVS7tyE1RKTquN2CMJdQild7FaoDEUEplflbEYchCEIhiFh0HiIYgiAIghciGIIgCIIXIhiCIAiC\nFyIYgiAIghciGIIgCIIXuQSDiH6diB4jomkiWh/52fVEtJ2IHieiy4zH1xPRI0T0BBF9xHi8h4hu\nb/zOfUR0Up5tEwRBEMKS12E8CuDXAHzHfJCIzgHwOgDnAHglgI8T/W+T3ScAXK2UOhPAmUT0isbj\nVwM4oJQ6A8BHAHww57bNCTZu3NjuTagMsi+ayL5oIvsiHLkEQym1TSm1HUC04/rVAG5XSk0ppZ4G\nsB3AxUS0EkC/UmpT43mfA3CF8Tufbfz7SwB+Mc+2zRXkZGgi+6KJ7Ismsi/CUVQNYw2AHcb/dzUe\nWwNgp/H4zsZjs35HKTUN4BARHVfQ9gmCIAgpSVzenIi+CWCF+RAABeDPlFJfLWrD0OpaBEEQhHai\nlMr9B8C3Aaw3/n8dgD81/n8ngEsArATwuPH46wF8wnxO49/zAOyLeT8lf+SP/JE/8if9nzxjfcgb\nKJmO4A4AtxHRTeCo6XQADyqlFBEdJqKLAWwC8CYAf2f8zlUAHgDwWgDfcr1RnsWzBEEQhGzkEgwi\nugLARwEcD+BrRLRZKfVKpdRWIvoigK0AJgG8zVhe9u0APgOgF8DXlVL6vlefBvB5ItoOYAjsPgRB\nEISKUMvlzQVBEITyqd1MbyK6nIh+3Jj496ft3p6yIKK1RPQtIvoRET1KRO9oPD5ARHcR0TYi+gYR\nLW33tpYFEXUR0cNEdEfj/3NyXxDRUiL618Yk2R8R0SVzeF+8qzGZ+BEiuq0xIXhO7Asi+jQR7SWi\nR4zHnJ/dNbk6jloJBhF1AbgZwCsAnAfgSiI6u71bVRpTAK5VSp0H4IUA3t747NcBuFspdRa47nN9\nG7exbN4Jjj01c3Vf/C043j0HwPkAfow5uC+IaDWAPwA34DwfHLlfibmzL24Fj40m1s9OROfCPbna\nSa0EA8DFALYrpZ5RSk0CuB084a/jUUrtUUptbvz7GIDHAazF7AmPn0VzImRHQ0RrAfwfAJ8yHp5z\n+4KIlgB4sVLqVgBoTJY9jDm4LxrMA7CIiOYD6APPAZsT+0Ip9V0AByMPuz77q2CZXJ30HnUTjOiE\nQHPi35yBiJ4H4AIA9wNYoZTaC7CoADixfVtWKjcB+GNwq6BmLu6LUwDsJ6JbG/HcLUS0EHNwXyil\nfgrgwwCeBQvFYaXU3ZiD+8LgRMdnd02ujqVugjHnIaLF4KVT3tlwGtGuhY7vYiCiXwawt+G44mx0\nx+8LcOyyHsDHlFLrAQyDY4i5eFwsA19RnwxgNdhp/Cbm4L6IIddnr5tg7AJgrmK7tvHYnKBhs78E\n4PNKqa80Ht5LRCsaP18JYF+7tq9ELgXwKiJ6EsAXALyMiD4PYM8c3Bc7AexQSv2g8f9/AwvIXDwu\nfgnAk0qpA43lhf4dwIswN/eFxvXZdwFYZzzPayytm2BsAnA6EZ1MRD3guRp3tHmbyuQfAWxVSv2t\n8dgdAN7c+PdVAL4S/aVOQyn1bqXUSUqpU8HHwLeUUm8E8FXMvX2xF8AOIjqz8dAvAvgR5uBxAY6i\nXkBEvY0C7i+CmyLm0r4gtE6ifnPj3+ZnvwPA6xtdZKegMbk68cXrNg+DiC4Hd4V0Afi0Uuqv2rxJ\npUBElwK4B7ykvJ7m/27wl/xF8NXCMwBep5Q61K7tLBsieimAP1JKvaqxWOWc2xdEdD64+N8N4EkA\nbwEXf+fivrgBfBExCeCHAH4bQD/mwL4gon8GMAhgOYC9AG4A8B8A/hWWz05E14NvKzEJjrjvSnyP\nugmGIAiC0B7qFkkJgiAIbUIEQxAEQfBCBEMQBEHwQgRDEARB8EIEQxAEQfBCBEMQBEHwQgRDEARB\n8EIEQxAEQfDi/wM7pKZpBnDf7QAAAABJRU5ErkJggg==\n", 359 | "text/plain": [ 360 | "" 361 | ] 362 | }, 363 | "metadata": {}, 364 | "output_type": "display_data" 365 | } 366 | ], 367 | "source": [ 368 | "plot_images(plt, results, (rows, rows), (2, rows))\n", 369 | "plt.figure()\n", 370 | "plt.plot(lda.loglikelihoods)\n", 371 | "plt.figure()\n", 372 | "plt.plot(np.diff(lda.loglikelihoods)[-100:])" 373 | ] 374 | }, 375 | { 376 | "cell_type": "code", 377 | "execution_count": null, 378 | "metadata": { 379 | "collapsed": true 380 | }, 381 | "outputs": [], 382 | "source": [] 383 | } 384 | ], 385 | "metadata": { 386 | "kernelspec": { 387 | "display_name": "Python 3", 388 | "language": "python", 389 | "name": "python3" 390 | }, 391 | "language_info": { 392 | "codemirror_mode": { 393 | "name": "ipython", 394 | "version": 3 395 | }, 396 | "file_extension": ".py", 397 | "mimetype": "text/x-python", 398 | "name": "python", 399 | "nbconvert_exporter": "python", 400 | "pygments_lexer": "ipython3", 401 | "version": "3.5.1" 402 | }, 403 | "widgets": { 404 | "state": {}, 405 | "version": "1.1.2" 406 | } 407 | }, 408 | "nbformat": 4, 409 | "nbformat_minor": 0 410 | } 411 | --------------------------------------------------------------------------------