├── predikon ├── __init__.py └── models.py ├── requirements.txt ├── tests ├── requirements.txt ├── data │ ├── outcomes.csv │ ├── weights.csv │ └── data.csv └── test_models.py ├── .gitignore ├── .travis.yml ├── .github └── workflows │ └── python-publish.yml ├── LICENSE.txt ├── setup.py ├── README.md ├── docs └── usage.md └── notebooks └── example.ipynb /predikon/__init__.py: -------------------------------------------------------------------------------- 1 | from predikon.models import * 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy>=1.0.0 2 | scikit-learn>=0.16.1 3 | -------------------------------------------------------------------------------- /tests/requirements.txt: -------------------------------------------------------------------------------- 1 | pytest>=4.5.0 2 | pytest-cov 3 | coveralls 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | *.pyc 3 | .coverage 4 | *.egg-info 5 | *.DS_STORE 6 | .ipynb_checkpoints 7 | notebooks/prepare-data.ipynb 8 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - '3.5' 4 | - '3.6' 5 | - '3.7' 6 | - '3.8' 7 | install: 8 | - pip install -r requirements.txt 9 | - pip install -r tests/requirements.txt 10 | - pip install -e . 11 | cache: 12 | - pip 13 | script: 14 | - pytest -vx --cov=predikon/ 15 | after_success: 16 | - coveralls 17 | -------------------------------------------------------------------------------- /tests/data/outcomes.csv: -------------------------------------------------------------------------------- 1 | 0.4311 2 | 0.6195 3 | 0.5465 4 | 0.2746 5 | 0.6571 6 | 0.3371 7 | 0.3947 8 | 0.5801 9 | 0.4763 10 | 0.6327 11 | 0.5804 12 | 0.7215 13 | 0.6678 14 | 0.2552 15 | 0.0803 16 | 0.2824 17 | 0.5442 18 | 0.6796 19 | 0.3109 20 | 0.3720 21 | 0.5099 22 | 0.2601 23 | 0.3270 24 | 0.5187 25 | 0.3512 26 | 0.4092 27 | 0.2590 28 | 0.2848 29 | 0.5772 30 | 0.5776 31 | 0.6551 32 | 0.5567 33 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflows will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | name: Upload Python Package 5 | 6 | on: 7 | release: 8 | types: [created] 9 | 10 | jobs: 11 | deploy: 12 | 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v2 17 | - name: Set up Python 18 | uses: actions/setup-python@v2 19 | with: 20 | python-version: '3.x' 21 | - name: Install dependencies 22 | run: | 23 | python -m pip install --upgrade pip 24 | pip install setuptools wheel twine 25 | - name: Build and publish 26 | env: 27 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 28 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 29 | run: | 30 | python setup.py sdist bdist_wheel 31 | twine upload dist/* 32 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2020 Alexander Immer & Victor Kristof 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from os import path 2 | from setuptools import find_packages, setup 3 | 4 | CURRENT_DIR = path.abspath(path.dirname(__file__)) 5 | 6 | 7 | def read_me(filename): 8 | with open(path.join(CURRENT_DIR, filename), encoding='utf-8') as f: 9 | return f.read() 10 | 11 | 12 | def requirements(filename): 13 | with open(path.join(CURRENT_DIR, filename)) as f: 14 | return f.read().splitlines() 15 | 16 | 17 | AUTHORS = "A. Immer, V. Kristof" 18 | NAME = "predikon" 19 | PACKAGES = find_packages() 20 | DESCR = "Predikon: Sub-Matrix Factorization for Real-Time Vote Prediction" 21 | LONG_DESCR = read_me('README.md') 22 | LONG_DESCR_TYPE = 'text/markdown' 23 | REQUIREMENTS = requirements('requirements.txt') 24 | VERSION = "0.4" 25 | URL = "https://github.com/indy-lab/predikon-lib" 26 | # DOWNLOAD_URL = 'https://github.com/indy-lab/predikon/archive/v0.2.tar.gz' 27 | LICENSE = "MIT" 28 | 29 | 30 | setup( 31 | author=AUTHORS, 32 | name=NAME, 33 | version=VERSION, 34 | description=DESCR, 35 | long_description=LONG_DESCR, 36 | long_description_content_type=LONG_DESCR_TYPE, 37 | install_requires=REQUIREMENTS, 38 | url=URL, 39 | # download_url=DOWNLOAD_URL, 40 | license=LICENSE, 41 | packages=PACKAGES, 42 | zip_safe=False, 43 | python_requires=">=3.5", 44 | ) 45 | -------------------------------------------------------------------------------- /tests/data/weights.csv: -------------------------------------------------------------------------------- 1 | 790 2 | 2188 3 | 531 4 | 692 5 | 536 6 | 793 7 | 1045 8 | 790 9 | 4518 10 | 1070 11 | 595 12 | 1021 13 | 262 14 | 268 15 | 188 16 | 1114 17 | 238 18 | 381 19 | 271 20 | 1230 21 | 936 22 | 108 23 | 3173 24 | 57 25 | 1801 26 | 1555 27 | 667 28 | 977 29 | 1946 30 | 1487 31 | 283 32 | 1796 33 | 2314 34 | 2923 35 | 3101 36 | 2003 37 | 271 38 | 90 39 | 198 40 | 368 41 | 1082 42 | 692 43 | 1205 44 | 2569 45 | 624 46 | 788 47 | 95 48 | 690 49 | 1119 50 | 1456 51 | 104 52 | 457 53 | 100 54 | 843 55 | 80 56 | 226 57 | 27 58 | 616 59 | 731 60 | 392 61 | 747 62 | 210 63 | 1865 64 | 1141 65 | 589 66 | 117 67 | 435 68 | 87 69 | 746 70 | 2777 71 | 296 72 | 215 73 | 426 74 | 87 75 | 461 76 | 301 77 | 514 78 | 26 79 | 382 80 | 366 81 | 875 82 | 126 83 | 984 84 | 805 85 | 773 86 | 672 87 | 419 88 | 915 89 | 398 90 | 377 91 | 1252 92 | 2219 93 | 439 94 | 874 95 | 4873 96 | 480 97 | 162 98 | 759 99 | 1391 100 | 370 101 | 3514 102 | 251 103 | 331 104 | 146 105 | 1396 106 | 385 107 | 1187 108 | 367 109 | 651 110 | 273 111 | 881 112 | 336 113 | 644 114 | 313 115 | 1000 116 | 1022 117 | 1006 118 | 155 119 | 463 120 | 329 121 | 239 122 | 267 123 | 97 124 | 215 125 | 652 126 | 189 127 | 331 128 | 1477 129 | 1572 130 | 228 131 | 446 132 | 1061 133 | 413 134 | 289 135 | 488 136 | 1349 137 | 167 138 | 975 139 | 135 140 | 550 141 | 271 142 | 182 143 | 3674 144 | 167 145 | 869 146 | 622 147 | 589 148 | 898 149 | 142 150 | 313 151 | 12875 152 | 448 153 | 388 154 | 574 155 | 330 156 | 399 157 | 233 158 | 339 159 | 449 160 | 116 161 | 126 162 | 850 163 | 125 164 | 75 165 | 527 166 | 553 167 | 267 168 | 453 169 | 2419 170 | 194 171 | 2283 172 | 188 173 | 877 174 | 70 175 | 659 176 | 689 177 | 1056 178 | 404 179 | 631 180 | 228 181 | 440 182 | 317 183 | 210 184 | 1776 185 | 178 186 | 655 187 | 245 188 | 217 189 | 473 190 | 258 191 | 432 192 | 1777 193 | 776 194 | 157 195 | 787 196 | 157 197 | 884 198 | 1097 199 | 217 200 | 350 201 | 505 202 | 103 203 | 1030 204 | 575 205 | 207 206 | 454 207 | 290 208 | 788 209 | 3682 210 | 128 211 | 768 212 | 94 213 | 92 214 | 1008 215 | 437 216 | 1999 217 | 90 218 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Predikon 2 | _Sub-Matrix Factorization for Real-Time Vote Prediction_ 3 | 4 | [![Build Status](https://travis-ci.com/indy-lab/predikon-lib.svg?branch=master)](https://travis-ci.com/indy-lab/predikon-lib) 5 | [![Coverage Status](https://coveralls.io/repos/github/indy-lab/predikon/badge.svg?branch=master)](https://coveralls.io/github/indy-lab/predikon?branch=master) 6 | [![PyPI](https://img.shields.io/pypi/v/predikon?color=blue)](https://pypi.org/project/predikon/) 7 | [![Downloads](https://pepy.tech/badge/predikon)](https://pepy.tech/project/predikon) 8 | 9 | The `predikon` library is the Python library for the algorithm proposed in 10 | 11 | > Alexander Immer\*, Victor Kristof\*, Matthias Grossglauser, Patrick Thiran, [*Sub-Matrix Factorization for Real-Time Vote Prediction*](https://infoscience.epfl.ch/record/278872), KDD 2020 12 | 13 | The `predikon` algorithm enables you to predict aggregate vote outcomes (e.g., national) from partial outcomes (e.g., regional) that are revealed sequentially. 14 | See the [usage documentation](docs/usage.md) more details on how to use this library or read the paper linked above for more details on how the algorithm works. 15 | 16 | It is the algorithm powering [predikon.ch](http://www.predikon.ch), a platform for real-time vote prediction in Switzerland. 17 | 18 | ## Installation 19 | 20 | To install the Predikon library from PyPI, run 21 | 22 | ```bash 23 | pip install predikon 24 | ``` 25 | 26 | ## Getting Started 27 | 28 | Given a dataset `Y` of historical vote results collected in an array of `R` regions and `V` votes, given a vector `y` of partial results, and given an optional weighting `w` per region (e.g., the number of valid votes in each region), it is easy to predict the unobserved entries of `y` after observing at least one regional result (one entry of `y`) of an ongoing referendum or election: 29 | 30 | ```python 31 | from predikon import LogisticSubSVD 32 | model = LogisticSubSVD(Y, w) 33 | pred = model.fit_predict(y) 34 | # All unobserved entries in `y` are now filled in. 35 | ``` 36 | 37 | You can then obtain a prediction for the aggregate outcome (assuming the weights are the number of valid votes in this example) as: 38 | 39 | ```python 40 | N = w.sum() # Total number of votes. 41 | ypred = pred.dot(w) / N 42 | ytrue = y.dot(w) / N 43 | print(abs(ypred - ytrue)) 44 | ``` 45 | 46 | Have a look at the [example notebook](notebooks/example.ipynb) for a complete example of how to use the `predikon` library (with Swiss referenda). 47 | 48 | ## Going Further 49 | 50 | You can find further information in: 51 | 52 | - The [example notebook](notebooks/example.ipynb) using Swiss referenda 53 | - The [usage documentation](docs/usage.md) describing the set up in more details 54 | - The [scientific paper](https://infoscience.epfl.ch/record/278872) introducing the algorithm 55 | 56 | And don't hesitate to **reach out us** if you have questions or to **open issues**! 57 | 58 | ## Requirements 59 | 60 | - Python 3.5 and above 61 | - [NumPy](https://numpy.org) 1.0.0 and above 62 | - [scikit-learn](https://scikit-learn.org) 0.16.1 and above 63 | -------------------------------------------------------------------------------- /docs/usage.md: -------------------------------------------------------------------------------- 1 | # Usage 2 | 3 | ![problem_structure](https://user-images.githubusercontent.com/7715036/86534364-346d1980-bed8-11ea-8956-439354c87285.png) 4 | 5 | 6 | ## Historical Votes 7 | 8 | Given R regions and V votes, we store a dataset of historical votes in a vote matrix (or a tensor) 9 | 10 | consisting of the (fully) observed vote outcomes. 11 | In the case of a binary outcome, e.g., a referendum, each entry of the vote corresponds to the fraction of "yes" in the r-th region and v-th vote. 12 | Alternatively, in the case of multiple outcomes, e.g., an election, each entry of the vote tensor is a vector that corresponds to the fraction of votes for each party. 13 | (We assume that the sum of the entries in each vector equals 1). 14 | For K parties, the vote tensor will be of shape R x V x K. 15 | In the code, the matrix 16 | 17 | is denoted by `M_historical` and is used to initialize models. 18 | 19 | ## Weighting of Regions 20 | 21 | Additionally, we support some weighting to account for disparities between regions. 22 | For example, the weighting may be the number of valid votes or the population in each region. 23 | The weighting vector is called `weighting` and is required when initializing models, together with the historical outcomes. 24 | If no such data is available, one can pass `weighting=None` to ignore weighting. 25 | 26 | ## Real-Time Predictions 27 | 28 | To predict the outcomes of an ongoing election in real-time, the algorithm predicts the unobserved entries of the partially observed vector (or matrix) 29 | 30 | For binary outcomes, this is an R-dimensional vector, and this is an R x K matrix for multiple outcomes. 31 | In the code, the partially observed vectors `m_current` contains both actual, observed values in entries where the regional was available and `np.nan` in entries for unobserved regions. 32 | These unobserved entries are then filled in by the method of choice by calling 33 | `model.fit_predict(m_current)`. 34 | 35 | ## Putting It Together 36 | 37 | Assuming you have the above quantities ready for a particular vote (i.e., `M_historical`, `weighting`, and `m_current`), inferring the unobserved entries after observing at least one regional outcome of an ongoing election is easy: 38 | ```python 39 | from predikon import LogisticSubSVD 40 | model = LogisticSubSVD(M_historical, weighting) 41 | m_predict = model.fit_predict(m_current) 42 | # All unobserved entries in `m_current` are now filled in. 43 | ``` 44 | 45 | ## Choosing the Right Model (Hyperparameters) 46 | 47 | For binary outcomes, i.e., for referenda, the preferred method is `LogisticSubSVD`. 48 | For multiple possible outcomes, i.e., for elections, the preferred method is `LogisticTensorSubSVD`. 49 | Both further have two important hyperparameters, `l2_reg` and `n_dim`: 50 | - `l2_reg` controls the strength of the L2 regularization of the parameters 51 | - `n_dim` controls the number of latent dimensions used for the model 52 | 53 | In many cases, not much historical data is available and thus the number of dimensions can be set to a low value, e.g., between 5 and 10. 54 | The `l2_reg` parameter defaults to `1e-5` but should be tuned for the problem at hand, e.g., using cross-validation. 55 | Gaussian versions of our model (`GaussianSubSVD` and `GaussianTensorSubSVD`) are also available, and they require the same hyperparameters. 56 | 57 | We provide additionally two baselines for comparison and model selection. 58 | The standard (weighted) averaging (`WeightedAveraging`) and probabilistic matrix factorization (`MatrixFactorisation`) for binary outcomes. 59 | -------------------------------------------------------------------------------- /notebooks/example.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Example: Swiss Referenda\n", 8 | "\n", 9 | "We propose in this notebook an example of how to use the `predikon` library to make vote predictions.\n", 10 | "The data is a subsample (10%) of Swiss referenda results.\n", 11 | "The full dataset can be found in the [submatrix-factorization](https://github.com/indy-lab/submatrix-factorization/blob/master/data/munvoteinfo.pkl) repo." 12 | ] 13 | }, 14 | { 15 | "cell_type": "markdown", 16 | "metadata": {}, 17 | "source": [ 18 | "## Imports" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 1, 24 | "metadata": { 25 | "lines_to_next_cell": 2, 26 | "title": "Import" 27 | }, 28 | "outputs": [], 29 | "source": [ 30 | "import numpy as np\n", 31 | "from predikon import LogisticSubSVD, GaussianSubSVD, WeightedAveraging\n", 32 | "\n", 33 | "DATA_PATH = '../tests/data/'" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "## Load Data\n", 41 | "\n", 42 | "Each entry `data[i,j]` is the percentage of \"yes\" in region `i` for referendum `j`.\n", 43 | "A region in this dataset is a Swiss municipality.\n", 44 | "The `weights` are the number of valid votes in each municipality.\n", 45 | "The `outcomes` are the aggregate national outcomes for each referendum." 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 2, 51 | "metadata": { 52 | "lines_to_next_cell": 2, 53 | "title": "Load data." 54 | }, 55 | "outputs": [], 56 | "source": [ 57 | "data = np.loadtxt(f'{DATA_PATH}/data.csv', dtype=np.float, delimiter=',')\n", 58 | "weights = np.loadtxt(f'{DATA_PATH}/weights.csv', dtype=np.int, delimiter=',')\n", 59 | "outcomes = np.loadtxt(f'{DATA_PATH}/outcomes.csv', dtype=np.float, delimiter=',')" 60 | ] 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "metadata": {}, 65 | "source": [ 66 | "## Prepare Data\n", 67 | "\n", 68 | "The matrix `Y` contains historical data up to vote `V`.\n", 69 | "The vector `y` contains the vote results for the vote we would like to make predictions." 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 3, 75 | "metadata": { 76 | "title": "Prepare data." 77 | }, 78 | "outputs": [ 79 | { 80 | "name": "stdout", 81 | "output_type": "stream", 82 | "text": [ 83 | "Number of regions: 217\n", 84 | "Number of votes: 31\n" 85 | ] 86 | } 87 | ], 88 | "source": [ 89 | "Y, y = data[:, :-1], data[:, -1]\n", 90 | "ytrue = outcomes[-1]\n", 91 | "R, V = Y.shape\n", 92 | "print(f'Number of regions: {R:>3}')\n", 93 | "print(f'Number of votes: {V:>3}')" 94 | ] 95 | }, 96 | { 97 | "cell_type": "markdown", 98 | "metadata": {}, 99 | "source": [ 100 | "## Set Observations\n", 101 | "\n", 102 | "Set which regions are observed.\n", 103 | "The unobserved regional results are `nan`." 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": 4, 109 | "metadata": { 110 | "title": "Sample observations." 111 | }, 112 | "outputs": [], 113 | "source": [ 114 | "# Fix the seed for reproducibility.\n", 115 | "np.random.seed(200)\n", 116 | "# Random permutation of the regions.\n", 117 | "inds = np.random.permutation(R)\n", 118 | "# Proportion of observed results.\n", 119 | "p = 0.1\n", 120 | "# Number of observations (10 %).\n", 121 | "n = int(np.ceil(R * p))\n", 122 | "# Set observations.\n", 123 | "obs = inds[:n]\n", 124 | "# Define new vector of (partial) regional results.\n", 125 | "ynew = np.array([np.nan] * R)\n", 126 | "ynew[obs] = y[obs]" 127 | ] 128 | }, 129 | { 130 | "cell_type": "markdown", 131 | "metadata": {}, 132 | "source": [ 133 | "## Evaluate Models\n", 134 | "\n", 135 | "We evaluate three models:\n", 136 | "1. A weighted average baseline\n", 137 | "2. Our algorithm with a Gaussian likelihood\n", 138 | "3. Our algorithm with a Bernoulli likelihood\n", 139 | "\n", 140 | "We set the latent dimensions `D=10` and the regularizer `reg=1e-5`.\n", 141 | "We report the predicted aggregated outcome, and we compare it against the true aggregate outcome.\n", 142 | "An aggregate outcome is the weighted average of the regional observations and the regional predictions, where the weight is the number of valid votes in each region." 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": 5, 148 | "metadata": { 149 | "lines_to_next_cell": 2, 150 | "title": "Evaluate models." 151 | }, 152 | "outputs": [ 153 | { 154 | "name": "stdout", 155 | "output_type": "stream", 156 | "text": [ 157 | "Weighted Averaging\n", 158 | " Predicted outcome: 52.13%\n", 159 | " True outcome: 55.67%\n", 160 | " Absolute diff.: 3.5426\n", 161 | "\n", 162 | "GaussianSubSVD (dim=10,l2=1e-05)\n", 163 | " Predicted outcome: 54.32%\n", 164 | " True outcome: 55.67%\n", 165 | " Absolute diff.: 1.3529\n", 166 | "\n", 167 | "Logistic SubSVD (dim=10,l2=1e-05)\n", 168 | " Predicted outcome: 54.30%\n", 169 | " True outcome: 55.67%\n", 170 | " Absolute diff.: 1.3713\n", 171 | "\n" 172 | ] 173 | } 174 | ], 175 | "source": [ 176 | "# Hyperparameters: number of latent dimensions and regularizers.\n", 177 | "D, reg = 10, 1e-5\n", 178 | "# Define models.\n", 179 | "base = WeightedAveraging(Y, weighting=weights)\n", 180 | "gaus = GaussianSubSVD(Y, weighting=weights, n_dim=D, add_bias=True, l2_reg=reg)\n", 181 | "bern = LogisticSubSVD(Y, weighting=weights, n_dim=D, add_bias=True, l2_reg=reg)\n", 182 | "\n", 183 | "for model in [base, gaus, bern]:\n", 184 | " print(model)\n", 185 | " # Predict missing results.\n", 186 | " pred = model.fit_predict(ynew)\n", 187 | " # Compute aggregate outcome. \n", 188 | " ypred = 1/np.sum(weights) * np.sum(weights.dot(pred))\n", 189 | " print(f' Predicted outcome: {ypred*100:.2f}%')\n", 190 | " print(f' True outcome: {ytrue*100:.2f}%')\n", 191 | " print(f' Absolute diff.: {np.abs(ypred - ytrue)*100:.4f}\\n')" 192 | ] 193 | } 194 | ], 195 | "metadata": { 196 | "jupytext": { 197 | "cell_metadata_filter": "-all", 198 | "main_language": "python", 199 | "notebook_metadata_filter": "-all" 200 | }, 201 | "kernelspec": { 202 | "display_name": "Python 3", 203 | "language": "python", 204 | "name": "python3" 205 | }, 206 | "language_info": { 207 | "codemirror_mode": { 208 | "name": "ipython", 209 | "version": 3 210 | }, 211 | "file_extension": ".py", 212 | "mimetype": "text/x-python", 213 | "name": "python", 214 | "nbconvert_exporter": "python", 215 | "pygments_lexer": "ipython3", 216 | "version": "3.8.5" 217 | } 218 | }, 219 | "nbformat": 4, 220 | "nbformat_minor": 4 221 | } 222 | -------------------------------------------------------------------------------- /tests/test_models.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import numpy as np 3 | 4 | from predikon import (Model, WeightedAveraging, MatrixFactorisation, 5 | GaussianSubSVD, BayesianSubSVD, LogisticSubSVD, 6 | GaussianTensorSubSVD, LogisticTensorSubSVD) 7 | 8 | 9 | """Setup methods""" 10 | 11 | 12 | def susq(A): 13 | # sum of squares 14 | return np.sum(np.square(A)) 15 | 16 | 17 | def get_M_w_vec(): 18 | M = np.random.randn(3, 4) 19 | w = np.abs(np.random.randn(3)) 20 | return M, w 21 | 22 | 23 | def get_M_w_mat(): 24 | # Region x Vote x Party 25 | M = np.random.randn(3, 4, 2) 26 | w = np.abs(np.random.randn(3)) 27 | return M, w 28 | 29 | 30 | """Programmatical Tests""" 31 | 32 | 33 | def test_observed_indexes_vec(): 34 | M, w = get_M_w_vec() 35 | m = np.array([0, np.nan, 0]) 36 | obs_ixs = np.array([0, 2]) 37 | unobs_ixs = np.array([1]) 38 | model = Model(M, w) 39 | obs, unobs = model.get_obs_ixs(m) 40 | assert np.all(np.equal(obs, obs_ixs)) 41 | assert np.all(np.equal(unobs, unobs_ixs)) 42 | 43 | 44 | def test_observed_indexes_mat(): 45 | M, w = get_M_w_mat() 46 | m = np.array([0, np.nan, 0]) 47 | m = np.stack([m, m]).T 48 | obs_ixs = np.array([0, 2]) 49 | unobs_ixs = np.array([1]) 50 | model = Model(M, w) 51 | obs, unobs = model.get_obs_ixs(m) 52 | assert np.all(np.equal(obs, obs_ixs)) 53 | assert np.all(np.equal(unobs, unobs_ixs)) 54 | 55 | 56 | def test_prediction_not_nan_vec(): 57 | Models = [MatrixFactorisation, GaussianSubSVD, LogisticSubSVD] 58 | M, w = get_M_w_vec() 59 | m = np.array([0.0, 0.3, np.nan]) 60 | w = np.array([2, 7, 2]) 61 | for MODEL in Models: 62 | model = MODEL(M, w, n_dim=1) 63 | pred = model.fit_predict(m) 64 | assert not (np.isnan(pred[-1])) 65 | # separately test for Bayesian 66 | model = BayesianSubSVD(M, w, n_dim=1) 67 | pred, pred_std = model.fit_predict(m) 68 | assert not (np.isnan(pred[-1])) 69 | 70 | 71 | def test_prediction_not_nan_mat(): 72 | Models = [GaussianTensorSubSVD, LogisticTensorSubSVD] 73 | M, w = get_M_w_mat() 74 | m = np.array([0.0, 0.3, np.nan]) 75 | m = np.stack([m, 1-m]).T 76 | for MODEL in Models: 77 | model = MODEL(M, w) 78 | pred = model.fit_predict(m) 79 | assert not any(np.isnan(pred[-1])) 80 | 81 | 82 | def test_prediction_not_nan_vec_unreg(): 83 | Models = [GaussianSubSVD, LogisticSubSVD] 84 | M, w = get_M_w_vec() 85 | m = np.array([0.0, 0.3, np.nan]) 86 | w = np.array([2, 7, 2]) 87 | for MODEL in Models: 88 | model = MODEL(M, w, n_dim=1, l2_reg=0) 89 | pred = model.fit_predict(m) 90 | assert not (np.isnan(pred[-1])) 91 | 92 | 93 | def test_prediction_not_nan_mat_unreg(): 94 | Models = [GaussianTensorSubSVD, LogisticTensorSubSVD] 95 | M, w = get_M_w_mat() 96 | m = np.array([0.0, 0.3, np.nan]) 97 | m = np.stack([m, 1-m]).T 98 | for MODEL in Models: 99 | model = MODEL(M, w) 100 | pred = model.fit_predict(m) 101 | assert not any(np.isnan(pred[-1])) 102 | 103 | 104 | def test_prediction_fill_nan_only(): 105 | models = [GaussianSubSVD, LogisticSubSVD] 106 | M, w = get_M_w_vec() 107 | m = np.array([0.0, 0.3, np.nan]) 108 | w = np.array([2, 7, 2]) 109 | 110 | for model in models: 111 | model = model(M, w, n_dim=1, l2_reg=1e-5) 112 | pred = model.fit_predict(m) 113 | assert not (np.isnan(pred[-1])) 114 | assert np.allclose(pred[:2], m[:2]) 115 | 116 | # Bayesian model with std 117 | model = BayesianSubSVD(M, w, n_dim=1) 118 | pred, pred_std = model.fit_predict(m) 119 | assert not (np.isnan(pred[-1])) 120 | assert np.allclose(pred[:2], m[:2]) 121 | # observed entries have no uncertainty! 122 | assert np.allclose(pred_std[:2], np.zeros_like(m[:2])) 123 | 124 | 125 | """Methodological Tests""" 126 | 127 | 128 | def test_averaging(): 129 | M, w = get_M_w_vec() 130 | m = np.array([0.0, 0.3, np.nan]) 131 | model = WeightedAveraging(M, weighting=None) 132 | pred = model.fit_predict(m) 133 | assert pred[-1] == 0.15 134 | 135 | 136 | def test_averaging_mat(): 137 | M, w = get_M_w_mat() 138 | m = np.array([0.0, 0.3, np.nan]) 139 | m = np.stack([m, m]).T 140 | model = WeightedAveraging(M, weighting=None) 141 | pred = model.fit_predict(m) 142 | assert np.all(pred[-1] == np.array([0.15, 0.15])) 143 | 144 | 145 | def test_weighted_averaging(): 146 | M, w = get_M_w_vec() 147 | m = np.array([0.0, 0.3, np.nan]) 148 | w = np.array([2, 7, 2]) 149 | res = 0.3 * 7 / 9 150 | model = WeightedAveraging(M, w) 151 | pred = model.fit_predict(m) 152 | assert pred[-1] == res 153 | 154 | 155 | def test_weighted_averaging_mat(): 156 | M, w = get_M_w_mat() 157 | m = np.array([0.0, 0.3, np.nan]) 158 | m = np.stack([m, 1-m]).T 159 | w = np.array([2, 7, 2]) 160 | res = np.array([0.3 * 7 / 9, 2 / 9 + 0.7 * 7/9]) 161 | model = WeightedAveraging(M, w) 162 | pred = model.fit_predict(m) 163 | # should predict regions x parties 164 | assert pred.shape == (M.shape[0], M.shape[2]) 165 | assert np.all(pred[-1] == res) 166 | 167 | 168 | def test_mf_converges(): 169 | np.random.seed(235) 170 | M, w = get_M_w_vec() 171 | m = np.array([0.0, 0.3, 0.7]) 172 | w = np.array([2, 7, 2]) 173 | model = MatrixFactorisation(M, w, n_dim=1) 174 | Ms = np.concatenate([M, m.reshape(-1, 1)], 1) 175 | err_init = susq(Ms - model.U @ model.V.T) + susq(model.U) + susq(model.V) 176 | _ = model.fit_predict(m) 177 | err_train = susq(Ms - model.U @ model.V.T) + susq(model.U) + susq(model.V) 178 | assert err_train < err_init 179 | 180 | 181 | """Representation Tests""" 182 | 183 | 184 | def test_repr_vec(): 185 | # Get data. 186 | M, w = get_M_w_vec() 187 | # Set hyperparameters. 188 | d, l2 = 10, 0.1 189 | # Define expected representations. 190 | wa_repr = 'Weighted Averaging' 191 | mf_repr = 'Matrix Factorization (dim=10, lam_V=0.1, lam_U=0.1)' 192 | gaus_repr = 'GaussianSubSVD (dim=10, l2=0.1)' 193 | bern_repr = 'LogisticSubSVD (dim=10, l2=0.1)' 194 | bays_repr = 'BayesianSubSVD (dim=10)' 195 | # Test representations. 196 | MF = MatrixFactorisation 197 | repr2model = { 198 | wa_repr: WeightedAveraging(M), 199 | mf_repr: MF(M, w, n_dim=d, lam_V=l2, lam_U=l2), 200 | gaus_repr: GaussianSubSVD(M, w, n_dim=d, l2_reg=l2), 201 | bern_repr: LogisticSubSVD(M, w, n_dim=d, l2_reg=l2), 202 | bays_repr: BayesianSubSVD(M, w, n_dim=d) 203 | } 204 | for repr_, model in repr2model.items(): 205 | assert repr_ == model.__repr__() 206 | 207 | 208 | def test_repr_mat(): 209 | # Get data. 210 | M, w = get_M_w_mat() 211 | # Set hyperparameters. 212 | d, l2 = 10, 0.1 213 | # Define expected representations. 214 | gaus_repr = 'GaussianTensorSubSVD (dim=10, l2=0.1)' 215 | bern_repr = 'LogisticTensorSubSVD (dim=10, l2=0.1)' 216 | # Test representations. 217 | repr2model = { 218 | gaus_repr: GaussianTensorSubSVD(M, w, n_dim=d, l2_reg=l2), 219 | bern_repr: LogisticTensorSubSVD(M, w, n_dim=d, l2_reg=l2) 220 | } 221 | for repr_, model in repr2model.items(): 222 | assert repr_ == model.__repr__() 223 | 224 | 225 | """Exception Tests""" 226 | 227 | 228 | def test_unallowed_weighting_length(): 229 | with pytest.raises(ValueError, match=r".*dimension.*"): 230 | M, w = get_M_w_vec() 231 | w = w[:-1] 232 | _ = Model(M, w) 233 | 234 | 235 | def test_unallowed_weighting_shape(): 236 | with pytest.raises(ValueError, match=r".*dimension.*"): 237 | M, w = get_M_w_vec() 238 | w = np.ones((len(w), len(w))) 239 | _ = Model(M, w) 240 | -------------------------------------------------------------------------------- /predikon/models.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from sklearn.linear_model import BayesianRidge, LogisticRegression, Ridge 3 | 4 | LARGE_FLOAT = 1e9 5 | 6 | 7 | class Model: 8 | """ 9 | Base class for real-time predictive models for regional results. 10 | 11 | Generally, subclasses of this model can be used in the following way: 12 | ``` 13 | import numpy as np 14 | R, V = 100, 10 15 | # M is Region x Votes or Region x Votes x Parties. 16 | M_historical = np.random.randn(R, V) 17 | m_current = np.random.randn(R) 18 | # TODO: set some nans etc. 19 | # Weighting is R dimensional associated with each region. 20 | weightings = np.abs(np.random.randn(R)) 21 | m = Model(M_historical, weighting) 22 | pred = m.fit_predict(m_current) 23 | # Pred has filled nans with predictions. 24 | ``` 25 | 26 | Attributes: 27 | M_historical: RxV or RxVxP dimensional np.array with outcomes in [0,1], 28 | where R is the number of regions, V the number of votes, and P the 29 | number of parties (non-binary outcome). 30 | weighting: R dimensional np.array array with weights associated to each 31 | region; if `weighting=None` the algorithm is unweighted (equivalent 32 | to np.ones(R)). 33 | """ 34 | 35 | def __init__(self, M_historical, weighting=None): 36 | R = M_historical.shape[0] 37 | if weighting is None: 38 | weighting = np.ones(R) 39 | if weighting.ndim != 1: 40 | raise ValueError( 41 | 'Weighting must be an np.array of dimension {}'.format(R) 42 | ) 43 | if len(weighting) != M_historical.shape[0]: 44 | raise ValueError( 45 | 'Weighting must be an np.array of dimension {}'.format(R) 46 | ) 47 | self.M_historical = M_historical 48 | # Make weights sum to R. 49 | self.weighting = weighting / np.sum(weighting) * R 50 | 51 | def fit_predict(self, m_current): 52 | """Fit the model and predict the unobserved values. 53 | 54 | Fit the model to the current observations and predict the missing 55 | entries of `m_current` as indicated by the np.nans. 56 | 57 | Arguments: 58 | m_current : R or RxP dimensional np.array with outcomes in [0,1] 59 | and unobserved entries which are given as np.nan. 60 | """ 61 | raise NotImplementedError 62 | 63 | def get_obs_ixs(self, m_current): 64 | """Returns the indices of observed and unobserved regions. 65 | 66 | Turn the array `m_current` into two index-lists of observed and 67 | unobserved regions by detecting unobserved nan-values. 68 | 69 | Arguments: 70 | m_current : R or RxP dimensional np.array with outcomes in [0,1] 71 | and unobserved entries which are given as np.nan 72 | """ 73 | if m_current.ndim == 1: 74 | assert len(m_current) == self.M_historical.shape[0] 75 | # R - vector. 76 | unobserved = np.isnan(m_current) 77 | elif m_current.ndim == 2: 78 | R, P = m_current.shape 79 | assert ( 80 | R == self.M_historical.shape[0] 81 | and P == self.M_historical.shape[2] 82 | ) 83 | # R x P dim. 84 | unobserved = np.any(np.isnan(m_current), axis=-1) 85 | 86 | obs, unobs = np.where(~unobserved), np.where(unobserved) 87 | return obs, unobs 88 | 89 | def __repr__(self): 90 | return 'Predikon BaseModel' 91 | 92 | 93 | class WeightedAveraging(Model): 94 | """Describes a weighted average baseline. 95 | 96 | Same as `Averaging` but additionally weighs the observed entries in 97 | `m_current` by the given weights. This model is preferred over Averaging 98 | if population or counts of votes are available. 99 | """ 100 | 101 | def fit_predict(self, m_current): 102 | """See base class.""" 103 | obs, unobs = self.get_obs_ixs(m_current) 104 | wts = self.weighting[obs] 105 | N = wts.sum() 106 | if m_current.ndim == 2: 107 | pred = (m_current[obs] * wts.reshape(-1, 1)).sum(axis=0) / N 108 | else: 109 | pred = (m_current[obs] * wts).sum(axis=-1) / N 110 | pred_col = np.ones_like(m_current) * pred 111 | pred_col[obs] = m_current[obs] 112 | return pred_col 113 | 114 | def __repr__(self): 115 | return 'Weighted Averaging' 116 | 117 | 118 | class MatrixFactorisation(Model): 119 | r"""Describes a matrix factorization model. 120 | 121 | Probabilistic matrix factorization model that implements 122 | alternating least-squares to minimize the loss 123 | ``L(U, V) = 1/2 \|M - UV^T\|_2^2 + lam_U/2 \|U\|_2^2 + lam_V/2 \|V\|_2^2,`` 124 | where U, V contain the latent factors and M is the concatenation of 125 | `M_historical` and `m_current`. The matrices `U` and `V` are updated in an 126 | alternating fashion. For the updates, see `update_U` and `update_V`. 127 | Variables are initialized uniformly between 0 and 1/(n_dim+1) `U` is 128 | a `Region x (n_dim+1)` and `V` a `Votes x (n_dim+1)` matrix. The 129 | dimensionality is increased because we include a bias per vote as in _Etter 130 | et al (2016)_. 131 | 132 | Matrix factorization for regional vote prediction is proposed 133 | in _Online Collaborative Prediction of Regional Vote Results_ 134 | by Etter et al., DSAA 2016_. 135 | 136 | Attributes: 137 | n_iter: Number of alternating iterations which determines how often 138 | `U`, and `V` are updated each. 139 | n_dim: Number of latent dimensions; does not include the bias. 140 | lam_U: Regularizer for latent factors `U`. 141 | lam_V: Regularizer for latent factors `V`. 142 | """ 143 | 144 | def __init__( 145 | self, 146 | M_historical, 147 | weighting, 148 | n_iter=20, 149 | n_dim=25, 150 | lam_U=1e-1, 151 | lam_V=1e-1, 152 | ): 153 | super().__init__(M_historical, weighting) 154 | self.n_iter_cache = n_iter 155 | if len(M_historical.shape) > 2: 156 | raise ValueError('Tensor not factorisable.') 157 | # Initiatialize like netflix prize papers. 158 | U = np.random.rand(len(M_historical), n_dim + 1) / (n_dim + 1) 159 | U[:, -1] = 1.0 160 | V = np.random.rand(len(M_historical[0]) + 1, n_dim + 1) / (n_dim + 1) 161 | self.U, self.V = U, V 162 | self.lam_U = lam_U 163 | self.lam_V = lam_V 164 | self.n_dim = n_dim 165 | 166 | def fit_predict(self, m_current): 167 | """See base class.""" 168 | obs, unobs = self.get_obs_ixs(m_current) 169 | for i in range(self.n_iter_cache): 170 | observed = np.zeros_like(m_current, dtype=bool) 171 | observed[obs] = True 172 | self.update_U(m_current, observed) 173 | self.update_V(m_current, observed) 174 | pred = self.U @ self.V[-1, :] 175 | pred[obs] = m_current[obs] 176 | return pred 177 | 178 | def update_U(self, m, obs): 179 | # (1) Update representations of fully observed regions. 180 | M = np.concatenate([self.M_historical[obs], m[obs].reshape(-1, 1)], 1) 181 | U = self.U[obs, :-1] 182 | V, b = self.V[:, :-1], self.V[:, -1] 183 | ones = np.ones(len(U)) 184 | B = ((M - np.outer(ones, b)) @ V).T 185 | A = V.T @ V + self.lam_U * np.eye(self.n_dim) 186 | self.U[obs, :-1] = np.linalg.solve(A, B).T 187 | 188 | # (2) Update representations of other regions. 189 | M = self.M_historical[~obs] 190 | U = self.U[~obs, :-1] 191 | # Only take prior vote representations to update. 192 | V, b = self.V[:-1, :-1], self.V[:-1, -1] 193 | ones = np.ones(len(U)) 194 | B = ((M - np.outer(ones, b)) @ V).T 195 | A = V.T @ V + self.lam_U * np.eye(self.n_dim) 196 | self.U[~obs, :-1] = np.linalg.solve(A, B).T 197 | 198 | def update_V(self, m, obs): 199 | # (1) Update all prior vote representations (fully observed ones). 200 | B = (self.M_historical.T @ self.U).T 201 | eye = np.eye(self.n_dim + 1) 202 | # Don't regularize bias. 203 | eye[-1, -1] = 0 204 | A = self.U.T @ self.U + self.lam_V * eye 205 | self.V[:-1] = np.linalg.solve(A, B).T 206 | 207 | # (2) Update the new vote representation. 208 | U = self.U[obs] 209 | B = (m[obs].reshape(-1, 1).T @ U).T 210 | A = U.T @ U + self.lam_V * eye 211 | self.V[-1:] = np.linalg.solve(A, B).T 212 | 213 | def __repr__(self): 214 | repr_ = 'Matrix Factorization' 215 | repr_ += ' (dim={}'.format(str(self.n_dim)) 216 | repr_ += ', lam_V={}'.format(str(self.lam_V)) 217 | repr_ += ', lam_U={})'.format(str(self.lam_U)) 218 | return repr_ 219 | 220 | 221 | class SubSVD(Model): 222 | r"""Describes the base model for the main algorithm of this library. 223 | 224 | This is the basic model as proposed in _Sub-Matrix Factorization for 225 | Real-Time Vote Prediction KDD'20 by A. Immer and V. Kristof et al. The 226 | `M_historical` fully observed matrix is decomposed using the SVD which is 227 | the optimal solution to non-regularized Matrix Factorization. In this case, 228 | `M_historical` is `Regions x Votes`. A generalized linear model (GLM) is 229 | applied to the low-rank regional representations in `self.U`. 230 | 231 | Here, we implement a Gaussian, Bernoulli, and Categorical likelihoods due 232 | to their relevance in political forecasting. Let U denote the regional 233 | features obtained due to the SubSVD. We minimize the following loss now 234 | with w_i being `self.weighting[i]` and further assuming 235 | `self.weighting.sum() == len(self.weighting)`: 236 | ``l(w) = - sum_{i \\in Observed} w_i \log p(y|U_i^T w + b) 237 | + l2_reg/ 2 \|w\|_2^2,`` 238 | where U are the regional features and w the parameter of the GLM. 239 | b is either 0 if `add_bias=False` or otherwise an optimized parameter. 240 | 241 | Attributes: 242 | n_dim: Number of dimensions in the low-rank representation of 243 | `M_historical`; determines the dimensionality of latent regional 244 | features. 245 | add_bias: Determines whether a bias should be added (default: True). 246 | l2_reg: L2-regularizer, in line with `lam_U/V` in the 247 | MatrixFactorization class. 248 | keep_svals: Determines whether to keep the singular values factored 249 | into the regional representations in `self.U`, i.e., the features 250 | for the GLM. If `l2_reg==0/None` this has no effect. Otherwise it 251 | determines the feature importances (default: True. 252 | """ 253 | 254 | def __init__( 255 | self, 256 | M_historical, 257 | weighting=None, 258 | n_dim=10, 259 | add_bias=True, 260 | l2_reg=1e-5, 261 | keep_svals=True, 262 | ): 263 | 264 | if M_historical.ndim > 2: 265 | raise ValueError('Tensor not factorizable. Use TensorSubSVD.') 266 | super().__init__(M_historical, weighting) 267 | U, s, _ = np.linalg.svd(M_historical) 268 | self.n_dim = n_dim 269 | self.U = U[:, :n_dim] * s[None, :n_dim] if keep_svals else U[:, :n_dim] 270 | self.l2_reg = l2_reg 271 | self.add_bias = add_bias 272 | 273 | 274 | class GaussianSubSVD(SubSVD): 275 | """WeightedSubSVD model with Gaussian likelihood in the GLM. 276 | 277 | The Gaussian likelihood has unit variance. 278 | 279 | Attributes: 280 | See base class. 281 | """ 282 | 283 | def fit_predict(self, m_current): 284 | obs, _ = self.get_obs_ixs(m_current) 285 | Uo, mo, wo = self.U[obs], np.copy(m_current)[obs], self.weighting[obs] 286 | if self.l2_reg is None or self.l2_reg == 0: 287 | self.l2_reg = 1 / LARGE_FLOAT 288 | ridge = Ridge(alpha=self.l2_reg, fit_intercept=self.add_bias) 289 | ridge.fit(Uo, mo, sample_weight=wo) 290 | pred = ridge.predict(self.U) 291 | pred[obs] = m_current[obs] 292 | return pred 293 | 294 | def __repr__(self): 295 | repr_ = 'GaussianSubSVD' 296 | repr_ += ' (dim={}'.format(str(self.n_dim)) 297 | repr_ += ', l2={})'.format(str(self.l2_reg)) 298 | return repr_ 299 | 300 | 301 | class BayesianSubSVD(SubSVD): 302 | """Bayesian version of the GaussianSubSVD model. 303 | 304 | The observation noise and prior precision are automatically learned using 305 | empirical Bayes. 306 | 307 | Attributes: 308 | See base class. 309 | """ 310 | 311 | def __init__( 312 | self, 313 | M_historical, 314 | weighting=None, 315 | n_dim=10, 316 | add_bias=True, 317 | keep_svals=True, 318 | alpha_init=None, 319 | lambda_init=None, 320 | alpha_1=1e-6, 321 | alpha_2=1e-6, 322 | lambda_1=1e-6, 323 | lambda_2=1e-6, 324 | ): 325 | super().__init__( 326 | M_historical, 327 | weighting=weighting, 328 | n_dim=n_dim, 329 | add_bias=add_bias, 330 | keep_svals=keep_svals, 331 | ) 332 | self.alpha_init = alpha_init 333 | self.lambda_init = lambda_init 334 | self.alpha_1 = alpha_1 335 | self.alpha_2 = alpha_2 336 | self.lambda_1 = lambda_1 337 | self.lambda_2 = lambda_2 338 | 339 | def fit_predict(self, m_current): 340 | obs, _ = self.get_obs_ixs(m_current) 341 | Uo, mo, wo = self.U[obs], np.copy(m_current)[obs], self.weighting[obs] 342 | ridge = BayesianRidge( 343 | fit_intercept=self.add_bias, 344 | alpha_init=self.alpha_init, 345 | lambda_init=self.lambda_init, 346 | alpha_1=self.alpha_1, 347 | alpha_2=self.alpha_2, 348 | lambda_1=self.lambda_1, 349 | lambda_2=self.lambda_2, 350 | ) 351 | ridge.fit(Uo, mo, sample_weight=wo) 352 | pred, pred_std = ridge.predict(self.U, return_std=True) 353 | pred[obs] = m_current[obs] 354 | # standard deviation of observed regions is 0 355 | pred_std[obs] = 0.0 356 | self.model = ridge 357 | return pred, pred_std 358 | 359 | def __repr__(self): 360 | repr_ = 'BayesianSubSVD' 361 | repr_ += ' (dim={})'.format(str(self.n_dim)) 362 | return repr_ 363 | 364 | 365 | class LogisticSubSVD(SubSVD): 366 | """SubSVD model with Bernoulli likelihood in the GLM. 367 | 368 | Logistic refers to both Bernoulli and categorical likelihoods. 369 | Categorical is used in the Tensor case. Here, we have binary outcomes. 370 | 371 | Attributes: 372 | See base class. 373 | """ 374 | 375 | @staticmethod 376 | def transform_problem(Uo, mo, wo): 377 | """Transform observations p in [0,1] to a binary classificaiton problem 378 | by turning p into a weight and having both observation 0 and 1. 379 | 380 | To do so, we repeat the dataset for label y==1 and y==0 and use the 381 | probabilities as weights. This is equal to cross-entropy regression. 382 | """ 383 | 384 | n = Uo.shape[0] 385 | wo = np.tile(wo, 2) 386 | y = np.zeros(2 * n) 387 | y[:n] = 1 388 | wts = np.tile(mo, 2) 389 | wts[n:] = 1 - wts[n:] 390 | wts = wts * wo 391 | X = np.tile(Uo, (2, 1)) 392 | return X, y, wts 393 | 394 | def fit_predict(self, m_current): 395 | """See base class.""" 396 | obs, unobs = self.get_obs_ixs(m_current) 397 | Uo, mo, wo = self.U[obs], m_current[obs], self.weighting[obs] 398 | C = LARGE_FLOAT if self.l2_reg == 0 else 1 / self.l2_reg 399 | logreg = LogisticRegression( 400 | C=C, 401 | fit_intercept=self.add_bias, 402 | solver='liblinear', 403 | tol=1e-6, 404 | max_iter=500, 405 | ) 406 | X, y, wts = self.transform_problem(Uo, mo, wo) 407 | logreg.fit(X, y, sample_weight=wts) 408 | pred = logreg.predict_proba(self.U)[:, 1] 409 | pred[obs] = m_current[obs] 410 | return pred 411 | 412 | def __repr__(self): 413 | repr_ = 'LogisticSubSVD' 414 | repr_ += ' (dim={}'.format(str(self.n_dim)) 415 | repr_ += ', l2={})'.format(str(self.l2_reg)) 416 | return repr_ 417 | 418 | 419 | class TensorSubSVD(Model): 420 | """Describes the base model for the multiple outcome cases. 421 | 422 | Corresponding base model to SubSVD but applicable to non-binary elections. 423 | For non-binary outcomes, we assume a tensor of `Regions x Votes x Parties`. 424 | _Parties_ refers to the amount of options a voter has. 425 | 426 | In comparison to SubSVD, the tensor needs to be collapsed first to apply 427 | the SVD. This is simply done by forming a `Regions x (Votes * Parties)` 428 | matrix (reshaping). The GLM that is applied works equivalently as before: 429 | Using the regional feature vector, a linear model predicts the 430 | _Parties_-dimensional outcome vector. 431 | 432 | In the subclasses, we have either a multivariate Gaussian likelihood 433 | or a categorical likelihood. 434 | 435 | Attributes: 436 | n_dim: 437 | Number of retained dimensions in the low-rank representation of 438 | `M_historical`; determines the dimensionality of latent regional 439 | features. 440 | add_bias: 441 | Determines whether a bias should be added (default: True). 442 | l2_reg: 443 | L2-regularizer, in line with `lam_U/V` in the MatrixFactorization 444 | class. 445 | keep_svals: Determines whether to keep the singular values factored 446 | into the regional representations in `self.U` which are the 447 | features for the GLM. If `l2_reg==0/None` this has no effect. 448 | Otherwise it determines the feature importances. 449 | """ 450 | 451 | def __init__( 452 | self, 453 | M_historical, 454 | weighting, 455 | n_dim=10, 456 | add_bias=True, 457 | l2_reg=1e-5, 458 | keep_svals=True, 459 | ): 460 | if len(M_historical.shape) < 3: 461 | raise ValueError('Requires Tensor') 462 | super().__init__(M_historical, weighting) 463 | M_historical = M_historical.reshape(M_historical.shape[0], -1) 464 | U, s, _ = np.linalg.svd(M_historical) 465 | self.U = U[:, :n_dim] * s[None, :n_dim] if keep_svals else U[:, :n_dim] 466 | self.l2_reg = l2_reg 467 | self.add_bias = add_bias 468 | self.n_dim = n_dim 469 | 470 | 471 | class GaussianTensorSubSVD(TensorSubSVD): 472 | """TensorSubSVD with multivariate Gaussian likelihood for the GLM.""" 473 | 474 | def fit_predict(self, m_current): 475 | obs, _ = self.get_obs_ixs(m_current) 476 | Uo, mo, wo = self.U[obs], m_current[obs], self.weighting[obs] 477 | if self.l2_reg == 0: 478 | self.l2_reg = 1 / LARGE_FLOAT 479 | ridge = Ridge(alpha=self.l2_reg, fit_intercept=self.add_bias) 480 | ridge.fit(Uo, mo, sample_weight=wo) 481 | pred = ridge.predict(self.U) 482 | pred[obs] = m_current[obs] 483 | return pred 484 | 485 | def __repr__(self): 486 | repr_ = 'GaussianTensorSubSVD' 487 | repr_ += ' (dim={}'.format(str(self.n_dim)) 488 | repr_ += ', l2={})'.format(str(self.l2_reg)) 489 | return repr_ 490 | 491 | 492 | class LogisticTensorSubSVD(TensorSubSVD): 493 | """TensorSubSVD with Categorical likelihood with _Parties_ categories. 494 | All parameters are as in TensorSubSVD but we additionally initialize 495 | the logistic model (categorical GLM) for warmstarts. 496 | """ 497 | 498 | def __init__( 499 | self, 500 | M_historical, 501 | weighting, 502 | n_dim=10, 503 | add_bias=True, 504 | l2_reg=1e-5, 505 | keep_svals=True, 506 | ): 507 | super().__init__( 508 | M_historical, weighting, n_dim, add_bias, l2_reg, keep_svals 509 | ) 510 | C = LARGE_FLOAT if self.l2_reg == 0 else 1 / self.l2_reg 511 | self.model = LogisticRegression( 512 | C=C, 513 | fit_intercept=add_bias, 514 | tol=1e-6, 515 | solver='newton-cg', 516 | max_iter=5000, 517 | multi_class='multinomial', 518 | n_jobs=4, 519 | warm_start=True, 520 | ) 521 | 522 | @staticmethod 523 | def transform_problem(Uo, mo, wo): 524 | """Transform non-categorical floating outcomes to categorical outcomes 525 | and corresponding weights (see LogisticSubSVD.transform_problem). 526 | 527 | The _Parties_-dimensional rows of `mo` need to sum to 1 but each entry 528 | can be a float in `[0,1]`. 529 | Then, we make _Parties_ observations out of this that are categorical 530 | and each is weighted by the probability indicated as in `mo`. 531 | """ 532 | n, k = mo.shape 533 | wo = np.tile(wo, k) 534 | # Classes 0*n, 1*n, ..., k*n. 535 | y = np.arange(k).repeat(n) 536 | # Weights are probabilities of respective class. 537 | wts = mo.reshape(-1, order='F') 538 | # Repeat data. 539 | X = np.tile(Uo, (k, 1)) 540 | wts = wts * wo 541 | return X, y, wts 542 | 543 | def fit_predict(self, m_current): 544 | obs, unobs = self.get_obs_ixs(m_current) 545 | Uo, mo, wo = self.U[obs], m_current[obs], self.weighting[obs] 546 | X, y, wts = self.transform_problem(Uo, mo, wo) 547 | self.model.fit(X, y, sample_weight=wts) 548 | pred = self.model.predict_proba(self.U) 549 | pred[obs] = m_current[obs] 550 | return pred 551 | 552 | def __repr__(self): 553 | repr_ = 'LogisticTensorSubSVD' 554 | repr_ += ' (dim={}'.format(str(self.n_dim)) 555 | repr_ += ', l2={})'.format(str(self.l2_reg)) 556 | return repr_ 557 | -------------------------------------------------------------------------------- /tests/data/data.csv: -------------------------------------------------------------------------------- 1 | 0.7136,0.5607,0.4511,0.6182,0.6123,0.5466,0.6359,0.6285,0.4170,0.6597,0.4103,0.6571,0.6604,0.2518,0.0583,0.2385,0.3739,0.8278,0.2881,0.3456,0.4049,0.1657,0.3557,0.5547,0.5234,0.2569,0.4601,0.3099,0.4837,0.6242,0.6428,0.7002 2 | 0.3463,0.5947,0.4565,0.4442,0.5976,0.3761,0.3559,0.4945,0.4835,0.6336,0.5494,0.6513,0.5903,0.2186,0.0784,0.3205,0.5092,0.7634,0.3772,0.3826,0.4047,0.3163,0.2693,0.6088,0.3652,0.4178,0.4928,0.5713,0.5445,0.5581,0.6228,0.6000 3 | 0.3221,0.6029,0.5028,0.6145,0.7097,0.4765,0.5061,0.5760,0.3673,0.4385,0.4782,0.6715,0.6853,0.1422,0.0460,0.1853,0.3747,0.7083,0.2826,0.3182,0.5444,0.1034,0.1796,0.5259,0.3259,0.4382,0.2181,0.2812,0.3835,0.3966,0.6706,0.5880 4 | 0.5556,0.6199,0.6639,0.8294,0.6926,0.5862,0.5303,0.7565,0.4740,0.6707,0.6003,0.9098,0.6748,0.2656,0.0667,0.1598,0.5582,0.5950,0.5761,0.4781,0.5024,0.2098,0.3189,0.5096,0.4487,0.5302,0.2201,0.3706,0.3950,0.6415,0.6667,0.5845 5 | 0.5233,0.7638,0.6720,0.8745,0.6221,0.3639,0.5342,0.6863,0.4698,0.6095,0.6727,0.9039,0.6983,0.1662,0.0250,0.1381,0.5387,0.6288,0.4578,0.4373,0.4683,0.2174,0.2704,0.3736,0.3263,0.6157,0.2438,0.3339,0.4816,0.5992,0.7469,0.6384 6 | 0.7723,0.5660,0.7235,0.7133,0.5137,0.6066,0.6082,0.7236,0.1674,0.7222,0.5530,0.7157,0.7389,0.1877,0.0640,0.1493,0.5656,0.7644,0.2127,0.2757,0.5072,0.1725,0.1883,0.4974,0.5284,0.2687,0.4506,0.1922,0.5455,0.7271,0.5626,0.7676 7 | 0.3826,0.6098,0.6449,0.6594,0.6682,0.3491,0.3460,0.5653,0.5341,0.6821,0.6316,0.7663,0.7429,0.2736,0.0901,0.2872,0.5794,0.7185,0.1877,0.3619,0.6069,0.3381,0.3281,0.4771,0.2322,0.3033,0.4505,0.4911,0.6365,0.5870,0.7116,0.5396 8 | 0.3333,0.6517,0.4954,0.5785,0.6514,0.2992,0.3147,0.4349,0.4650,0.4897,0.5427,0.6606,0.6400,0.2133,0.0606,0.2907,0.4797,0.6365,0.3650,0.4749,0.4651,0.2440,0.2879,0.4808,0.3095,0.3717,0.3842,0.5959,0.4770,0.4283,0.6724,0.5043 9 | 0.3878,0.5986,0.5413,0.5007,0.6950,0.3463,0.3465,0.5703,0.4868,0.6237,0.5519,0.7222,0.6727,0.2921,0.0929,0.3358,0.5040,0.6883,0.2402,0.2936,0.6126,0.2701,0.3264,0.4706,0.2793,0.3122,0.4140,0.5080,0.6805,0.5382,0.7079,0.5760 10 | 0.3383,0.6008,0.4766,0.4249,0.6476,0.3197,0.2707,0.4036,0.3666,0.5282,0.4644,0.5909,0.6271,0.2755,0.0476,0.2966,0.4225,0.7427,0.2724,0.3685,0.4478,0.2471,0.2947,0.4609,0.2000,0.3277,0.3926,0.6518,0.5338,0.6117,0.6810,0.6049 11 | 0.2039,0.6502,0.4235,0.4613,0.6985,0.1702,0.2590,0.4074,0.5273,0.5537,0.4554,0.6550,0.5956,0.2801,0.0621,0.3656,0.4092,0.6261,0.3476,0.3792,0.4358,0.2069,0.3457,0.4685,0.2056,0.4328,0.4584,0.5856,0.5271,0.5846,0.6504,0.5488 12 | 0.3935,0.6219,0.4028,0.6380,0.6345,0.4200,0.5041,0.6940,0.4624,0.5245,0.4889,0.6250,0.6015,0.1383,0.0374,0.2101,0.3068,0.6454,0.3542,0.3333,0.4035,0.1301,0.2657,0.5974,0.3397,0.4491,0.2080,0.3645,0.3984,0.4173,0.6923,0.5534 13 | 0.1327,0.5587,0.2436,0.4043,0.6897,0.2640,0.3061,0.2756,0.3704,0.4257,0.5016,0.4093,0.4636,0.2500,0.0673,0.4047,0.4256,0.7162,0.3048,0.4332,0.2949,0.2282,0.3485,0.6250,0.2442,0.3494,0.4392,0.6261,0.3169,0.4785,0.5762,0.5810 14 | 0.4426,0.7345,0.5983,0.8256,0.8116,0.3706,0.5537,0.6937,0.6471,0.5714,0.5902,0.8681,0.7160,0.1357,0.0088,0.1068,0.4118,0.5987,0.3980,0.4740,0.4024,0.1868,0.2612,0.4717,0.5354,0.6157,0.1832,0.4179,0.4783,0.5102,0.8000,0.5733 15 | 0.6000,0.5889,0.5607,0.4051,0.6164,0.2446,0.3657,0.4733,0.4118,0.3857,0.4109,0.2158,0.5690,0.0851,0.0435,0.1854,0.4706,0.6996,0.1726,0.1881,0.6687,0.1898,0.1254,0.4858,0.3208,0.4101,0.1020,0.6575,0.5786,0.4259,0.6753,0.2877 16 | 0.2479,0.6567,0.3854,0.5399,0.7385,0.2767,0.3211,0.4255,0.4572,0.5306,0.5607,0.7023,0.5367,0.2031,0.0595,0.2723,0.4847,0.6569,0.2688,0.4610,0.4848,0.2330,0.4003,0.5863,0.2984,0.4174,0.4024,0.6030,0.4672,0.4501,0.6888,0.5070 17 | 0.7706,0.5892,0.4694,0.6538,0.6771,0.5422,0.6797,0.4629,0.2160,0.5489,0.4536,0.6185,0.6635,0.1370,0.0219,0.2381,0.3692,0.7227,0.1705,0.3710,0.3854,0.1516,0.3008,0.4912,0.6983,0.3306,0.3512,0.2857,0.4619,0.6593,0.6336,0.7331 18 | 0.2122,0.6091,0.3587,0.4644,0.7033,0.2571,0.2087,0.4128,0.4434,0.4188,0.3725,0.7028,0.4825,0.1478,0.0317,0.1803,0.3802,0.7029,0.2812,0.4752,0.4282,0.1960,0.3158,0.5597,0.1837,0.3484,0.3364,0.7293,0.5279,0.4553,0.5935,0.4688 19 | 0.3529,0.5714,0.3540,0.3668,0.5897,0.3445,0.5129,0.3026,0.2577,0.4876,0.3272,0.5682,0.4560,0.1117,0.0561,0.3346,0.3051,0.7231,0.3491,0.5090,0.2576,0.2888,0.4953,0.6413,0.3476,0.4491,0.3851,0.6218,0.3198,0.5371,0.5394,0.5864 20 | 0.3357,0.6283,0.6217,0.5870,0.6701,0.3180,0.3300,0.6156,0.4795,0.6541,0.6126,0.7640,0.6926,0.2172,0.0709,0.2500,0.5438,0.6975,0.2693,0.4313,0.6268,0.2689,0.2925,0.5437,0.2487,0.3556,0.3999,0.5411,0.5939,0.5994,0.6292,0.5000 21 | 0.3534,0.6071,0.5666,0.5186,0.6816,0.3342,0.3027,0.5442,0.5036,0.5447,0.5609,0.6808,0.6849,0.2572,0.0665,0.2740,0.5764,0.7160,0.3294,0.4241,0.5309,0.2690,0.2963,0.5492,0.2714,0.3514,0.4596,0.4908,0.5372,0.4785,0.6261,0.4619 22 | 0.6279,0.7886,0.5325,0.7549,0.6522,0.3697,0.5979,0.6386,0.6182,0.6087,0.6029,0.8396,0.7303,0.1429,0.0763,0.0968,0.5269,0.7909,0.2738,0.4561,0.5926,0.1860,0.3704,0.3429,0.3768,0.4153,0.3534,0.3077,0.5625,0.6143,0.7075,0.6542 23 | 0.4407,0.6326,0.6343,0.6258,0.6979,0.3468,0.3528,0.6626,0.5179,0.7252,0.6334,0.7780,0.7301,0.2966,0.0913,0.2681,0.5930,0.6871,0.2175,0.3668,0.6446,0.2569,0.3620,0.4521,0.2916,0.3467,0.4392,0.4976,0.6786,0.5745,0.6712,0.4940 24 | 0.3818,0.4516,0.4225,0.5645,0.6939,0.3867,0.3617,0.5246,0.2857,0.5769,0.5570,0.8545,0.5417,0.2667,0.0185,0.2698,0.5949,0.7164,0.4898,0.5070,0.4000,0.3091,0.4375,0.6140,0.3438,0.3387,0.4103,0.6389,0.5254,0.8167,0.5636,0.5686 25 | 0.3129,0.6366,0.4517,0.5730,0.7590,0.3140,0.3261,0.5348,0.5475,0.5611,0.5818,0.7032,0.6781,0.2619,0.0849,0.2920,0.5300,0.6638,0.3045,0.4085,0.5370,0.1979,0.4665,0.5326,0.2968,0.3812,0.4427,0.5144,0.5546,0.4788,0.6519,0.4895 26 | 0.3515,0.6977,0.6077,0.6099,0.5926,0.2612,0.2703,0.5948,0.5854,0.5325,0.6688,0.6718,0.7587,0.2139,0.0818,0.2731,0.6110,0.6126,0.2467,0.3430,0.6385,0.2134,0.1774,0.4472,0.1777,0.4837,0.4295,0.5428,0.6692,0.4846,0.7512,0.5152 27 | 0.3483,0.6455,0.3660,0.6352,0.6211,0.4719,0.4461,0.5497,0.4240,0.4664,0.3613,0.5633,0.6297,0.1143,0.0235,0.1909,0.2890,0.6244,0.3119,0.4135,0.4448,0.1534,0.1870,0.5122,0.2317,0.4929,0.1534,0.4126,0.2940,0.2442,0.6508,0.5948 28 | 0.4108,0.6145,0.6792,0.5768,0.6311,0.3704,0.2891,0.6510,0.4961,0.6000,0.6441,0.7852,0.7151,0.2965,0.0689,0.3156,0.6147,0.6312,0.2333,0.1866,0.5928,0.2867,0.1590,0.4961,0.2956,0.4177,0.2548,0.5359,0.6098,0.5407,0.6922,0.5159 29 | 0.4470,0.7168,0.4629,0.8047,0.6954,0.4481,0.5222,0.5527,0.4670,0.5599,0.5344,0.7423,0.6912,0.1595,0.0479,0.1761,0.3469,0.6546,0.3691,0.3474,0.4820,0.1251,0.2921,0.4431,0.3679,0.4993,0.2840,0.3784,0.5319,0.4861,0.7439,0.6618 30 | 0.5785,0.7667,0.5809,0.7919,0.6277,0.3731,0.5308,0.7247,0.5107,0.6117,0.6092,0.8680,0.7657,0.1611,0.0638,0.1603,0.4486,0.6790,0.2939,0.4158,0.5536,0.1350,0.2970,0.4248,0.3991,0.4761,0.3452,0.3031,0.5734,0.4990,0.7053,0.6094 31 | 0.5503,0.6288,0.6910,0.6102,0.6036,0.3166,0.3581,0.6311,0.3934,0.6845,0.6576,0.8721,0.5945,0.2917,0.0670,0.2511,0.5726,0.7184,0.2328,0.3944,0.6884,0.4278,0.3402,0.4909,0.3575,0.3170,0.4797,0.5315,0.6488,0.6273,0.7521,0.6256 32 | 0.3118,0.7259,0.6490,0.7347,0.7743,0.2514,0.1909,0.6889,0.6555,0.5925,0.6718,0.8341,0.7329,0.2581,0.0586,0.1843,0.7187,0.5123,0.3715,0.4822,0.6433,0.2528,0.1983,0.4731,0.1982,0.6043,0.2992,0.6033,0.7187,0.4365,0.7609,0.3573 33 | 0.3495,0.5430,0.5439,0.5221,0.5604,0.3578,0.3442,0.5339,0.4328,0.5375,0.5461,0.7265,0.6437,0.2763,0.0616,0.2890,0.4857,0.7390,0.2541,0.3234,0.5290,0.2852,0.3260,0.4951,0.2523,0.3003,0.4688,0.5325,0.6086,0.6060,0.6429,0.6277 34 | 0.5060,0.5886,0.6756,0.6728,0.6592,0.2410,0.3320,0.6881,0.5421,0.8201,0.7243,0.8814,0.6807,0.2632,0.0783,0.2407,0.7288,0.6319,0.4046,0.3689,0.5822,0.2938,0.2724,0.5254,0.3610,0.4422,0.4236,0.4228,0.6090,0.6760,0.6706,0.4737 35 | 0.4674,0.6260,0.6778,0.5647,0.6591,0.4028,0.4030,0.6724,0.5079,0.7093,0.6359,0.7631,0.7241,0.2756,0.0847,0.2846,0.5885,0.7001,0.2355,0.3245,0.6537,0.2961,0.3451,0.4740,0.2914,0.2924,0.4701,0.4720,0.6681,0.6200,0.6835,0.5300 36 | 0.3333,0.6179,0.6145,0.6354,0.6402,0.3475,0.3216,0.5448,0.5089,0.5624,0.6429,0.7660,0.6811,0.3404,0.0814,0.2924,0.6303,0.7077,0.2970,0.3309,0.5320,0.3302,0.2609,0.5132,0.3164,0.3603,0.4780,0.5128,0.6823,0.5592,0.6961,0.5486 37 | 0.3816,0.5939,0.4357,0.5830,0.5957,0.2584,0.5101,0.4917,0.3235,0.4565,0.4053,0.6383,0.6128,0.1402,0.0368,0.1410,0.2690,0.6206,0.2632,0.3741,0.5263,0.1048,0.1862,0.5804,0.4286,0.4613,0.2156,0.4012,0.2727,0.1860,0.7541,0.5817 38 | 0.3708,0.4940,0.4091,0.4545,0.5897,0.3646,0.3750,0.3608,0.3059,0.5455,0.5203,0.6377,0.5275,0.1687,0.1184,0.3902,0.4757,0.8000,0.4468,0.3942,0.3933,0.2727,0.2647,0.6583,0.2673,0.2530,0.4100,0.5000,0.6098,0.5545,0.5904,0.7128 39 | 0.4026,0.7128,0.4545,0.8721,0.5660,0.3855,0.5135,0.5775,0.5932,0.6078,0.5376,0.9648,0.6406,0.1442,0.0112,0.0680,0.3333,0.6647,0.4737,0.4126,0.4433,0.1646,0.2143,0.4321,0.2958,0.5459,0.2718,0.3240,0.5146,0.4624,0.7640,0.6029 40 | 0.6241,0.6152,0.7112,0.8097,0.6448,0.5213,0.4399,0.8341,0.3553,0.6757,0.6306,0.9161,0.6574,0.1748,0.0746,0.0836,0.6159,0.6238,0.5083,0.5088,0.4708,0.1519,0.2062,0.3883,0.4307,0.5569,0.2544,0.3077,0.4321,0.6364,0.6721,0.5490 41 | 0.2566,0.5156,0.4451,0.3772,0.6504,0.2735,0.2693,0.3851,0.5521,0.5120,0.4542,0.5810,0.5736,0.2872,0.0714,0.3899,0.4504,0.7344,0.2747,0.3172,0.3899,0.2717,0.3122,0.6063,0.1731,0.3738,0.4444,0.5452,0.4864,0.5816,0.5362,0.6271 42 | 0.1156,0.5884,0.4312,0.4333,0.6311,0.2626,0.2103,0.3807,0.3851,0.4957,0.3920,0.4543,0.5511,0.2411,0.0445,0.2749,0.4127,0.7540,0.3146,0.4325,0.3118,0.2310,0.2814,0.4724,0.1405,0.4061,0.3835,0.6787,0.5806,0.6907,0.6993,0.6780 43 | 0.3358,0.6557,0.4728,0.4741,0.6853,0.2547,0.2723,0.4418,0.4967,0.6042,0.4894,0.5437,0.6140,0.2199,0.0605,0.2503,0.4947,0.6182,0.2571,0.3330,0.4261,0.2436,0.2863,0.4951,0.1969,0.4665,0.3208,0.6995,0.5711,0.5100,0.7029,0.5776 44 | 0.3510,0.6426,0.4272,0.6203,0.6857,0.4291,0.4129,0.5999,0.4236,0.4310,0.4535,0.5615,0.6509,0.1076,0.0293,0.1628,0.4093,0.6147,0.3395,0.3402,0.4826,0.1343,0.1854,0.5628,0.2015,0.5333,0.1464,0.3584,0.3026,0.3170,0.6740,0.5405 45 | 0.3050,0.5599,0.5610,0.4859,0.6538,0.2843,0.2356,0.4737,0.5326,0.5374,0.5162,0.6410,0.6597,0.3183,0.0816,0.3170,0.4251,0.7390,0.3066,0.3880,0.4685,0.1996,0.3446,0.4796,0.1958,0.3298,0.4111,0.5793,0.6481,0.5173,0.7357,0.6375 46 | 0.3907,0.6622,0.4524,0.5048,0.4743,0.2390,0.2133,0.3917,0.5558,0.4620,0.4897,0.4256,0.6336,0.2485,0.0684,0.2951,0.5020,0.6845,0.2851,0.4049,0.4754,0.2092,0.1953,0.4620,0.0867,0.4544,0.4182,0.6157,0.5937,0.4605,0.6844,0.5935 47 | 0.2553,0.7216,0.5116,0.7701,0.8194,0.3367,0.2857,0.3690,0.3148,0.7083,0.7113,0.8171,0.5618,0.2143,0.0247,0.1111,0.6136,0.7391,0.2571,0.3846,0.5974,0.3725,0.3881,0.4648,0.2000,0.3196,0.4681,0.4554,0.4643,0.6623,0.6923,0.4459 48 | 0.5735,0.5693,0.5956,0.4102,0.6061,0.4066,0.3264,0.4962,0.4751,0.6717,0.4890,0.4962,0.6537,0.3143,0.0634,0.3756,0.4923,0.6893,0.2487,0.2323,0.3912,0.2926,0.2208,0.5615,0.3162,0.3055,0.3384,0.4713,0.5901,0.5826,0.6721,0.6742 49 | 0.3012,0.6893,0.3795,0.5179,0.6646,0.2292,0.2091,0.3935,0.4486,0.5292,0.4173,0.6844,0.5117,0.1734,0.0366,0.2139,0.4757,0.6172,0.2656,0.4662,0.4696,0.1785,0.1697,0.4859,0.1394,0.5291,0.2366,0.6845,0.5125,0.5251,0.6979,0.5084 50 | 0.2785,0.7111,0.5755,0.6522,0.7920,0.2494,0.2297,0.5679,0.6305,0.5963,0.6724,0.7512,0.6722,0.1960,0.0650,0.2111,0.5977,0.5673,0.3153,0.4787,0.6184,0.2394,0.2459,0.4431,0.2328,0.4820,0.3475,0.5322,0.5305,0.3749,0.6868,0.4064 51 | 0.5570,0.5521,0.5645,0.5638,0.6667,0.3667,0.2973,0.4526,0.3400,0.5507,0.6634,0.7188,0.5354,0.1224,0.0326,0.1733,0.6418,0.8125,0.3099,0.3733,0.6167,0.3889,0.2500,0.5146,0.2500,0.2340,0.4000,0.5615,0.6162,0.5575,0.6629,0.6988 52 | 0.4016,0.6175,0.5698,0.5630,0.5914,0.3458,0.2944,0.4910,0.4674,0.5330,0.6296,0.6939,0.6426,0.2303,0.0569,0.3123,0.5648,0.7171,0.2210,0.3455,0.5617,0.3247,0.3125,0.5679,0.3349,0.3109,0.4702,0.5323,0.5043,0.7549,0.6487,0.4708 53 | 0.8444,0.6111,0.5352,0.6275,0.5968,0.4455,0.5000,0.5192,0.2619,0.6667,0.4348,0.5802,0.6146,0.2090,0.0455,0.2414,0.4878,0.8148,0.4267,0.3218,0.5758,0.1648,0.1325,0.5051,0.3607,0.3619,0.4153,0.4122,0.5714,0.5432,0.6364,0.7769 54 | 0.3210,0.5727,0.5414,0.4333,0.7330,0.3600,0.3232,0.5654,0.4819,0.5925,0.4395,0.5789,0.6813,0.2218,0.0700,0.3293,0.4138,0.7305,0.2531,0.3714,0.5829,0.2526,0.4018,0.5559,0.3208,0.3264,0.4128,0.6097,0.6371,0.5975,0.7149,0.6138 55 | 0.5227,0.8025,0.4655,0.8953,0.6585,0.3418,0.4783,0.3333,0.4324,0.5581,0.5833,0.9104,0.6744,0.0484,0.0000,0.0556,0.3273,0.4943,0.5000,0.4789,0.5625,0.2889,0.1897,0.2857,0.2037,0.7037,0.2299,0.3958,0.5574,0.4200,0.8684,0.6338 56 | 0.5034,0.5837,0.4624,0.3333,0.6000,0.3590,0.3694,0.4358,0.2547,0.4324,0.4510,0.3436,0.5231,0.0781,0.0297,0.2125,0.4141,0.6461,0.2600,0.2108,0.4456,0.2944,0.1780,0.6305,0.1295,0.3450,0.2099,0.4007,0.2331,0.4062,0.5174,0.4762 57 | 0.4545,0.7667,0.5854,0.6522,0.6897,0.7097,0.6829,0.5000,0.3000,0.7812,0.3636,0.5278,0.5806,0.2258,0.0870,0.5946,0.5172,0.7857,0.3182,0.4262,0.5667,0.0400,0.2500,0.5652,0.6667,0.5385,0.3846,0.3023,0.6207,0.5600,0.7368,0.8529 58 | 0.5526,0.6216,0.3175,0.7140,0.7959,0.5319,0.5444,0.6176,0.2845,0.6768,0.4443,0.6797,0.7171,0.1569,0.0218,0.2180,0.2700,0.7053,0.3043,0.3248,0.4753,0.1040,0.3753,0.5738,0.4253,0.3720,0.2935,0.3303,0.4115,0.6094,0.6816,0.5830 59 | 0.1745,0.6086,0.2656,0.3285,0.6238,0.1981,0.1632,0.2595,0.2870,0.3185,0.2895,0.4523,0.4959,0.0966,0.0621,0.3271,0.4313,0.5786,0.2925,0.4446,0.2340,0.2222,0.2455,0.5729,0.1489,0.4739,0.3689,0.6941,0.3254,0.3556,0.6298,0.5855 60 | 0.6067,0.5875,0.5698,0.5099,0.7248,0.4132,0.2838,0.3986,0.5385,0.6176,0.5269,0.6429,0.7013,0.4513,0.0752,0.3130,0.5605,0.7191,0.2191,0.2253,0.2663,0.3055,0.2026,0.4314,0.1233,0.3565,0.2609,0.5233,0.7163,0.5449,0.6047,0.7887 61 | 0.6537,0.5756,0.6860,0.8272,0.6107,0.5512,0.5463,0.8059,0.3272,0.7061,0.6722,0.8939,0.6043,0.2603,0.0916,0.1162,0.5499,0.6456,0.4603,0.4576,0.5140,0.2337,0.2545,0.4919,0.4923,0.4893,0.2598,0.3219,0.4457,0.7244,0.6611,0.6552 62 | 0.3143,0.5385,0.5413,0.4645,0.6199,0.3320,0.1388,0.3725,0.4759,0.4931,0.4411,0.5721,0.6009,0.2393,0.0670,0.2612,0.5167,0.7330,0.2533,0.3316,0.5059,0.2704,0.2046,0.5288,0.1134,0.3798,0.3602,0.6469,0.6548,0.5658,0.7200,0.6950 63 | 0.8087,0.6383,0.6419,0.6982,0.5982,0.4809,0.6323,0.6332,0.2229,0.6615,0.5095,0.7364,0.7384,0.1760,0.0837,0.1856,0.5209,0.7334,0.2949,0.3390,0.5287,0.1608,0.2763,0.4850,0.5752,0.3867,0.3684,0.2904,0.5286,0.6475,0.6431,0.7060 64 | 0.2688,0.5743,0.4524,0.4443,0.6083,0.2928,0.2506,0.4287,0.4610,0.4925,0.4412,0.5798,0.5766,0.2768,0.0702,0.2751,0.4064,0.6977,0.3303,0.3283,0.4432,0.2560,0.3506,0.5060,0.1879,0.3481,0.4268,0.6492,0.5532,0.6091,0.6510,0.6062 65 | 0.3455,0.6508,0.4493,0.5468,0.6367,0.3431,0.3929,0.5202,0.4556,0.5095,0.5418,0.6468,0.5326,0.2629,0.0425,0.3475,0.4081,0.7211,0.3505,0.4722,0.3895,0.2298,0.3731,0.6098,0.3546,0.4099,0.4008,0.6105,0.4884,0.5401,0.7043,0.5426 66 | 0.2981,0.6038,0.5169,0.4651,0.4462,0.4261,0.4175,0.5054,0.3400,0.5077,0.4806,0.7143,0.6339,0.2101,0.0510,0.2752,0.4800,0.7685,0.2667,0.2797,0.5495,0.3505,0.3235,0.5327,0.2658,0.3238,0.4359,0.5500,0.6102,0.5674,0.6667,0.5217 67 | 0.6063,0.8031,0.5561,0.8834,0.6946,0.4494,0.6281,0.6798,0.4825,0.6727,0.6090,0.8584,0.7574,0.1101,0.0389,0.1809,0.3913,0.6981,0.3827,0.3732,0.4234,0.1330,0.3048,0.3792,0.5654,0.5101,0.3265,0.3180,0.5740,0.5752,0.7143,0.6267 68 | 0.3725,0.7097,0.5143,0.8280,0.4630,0.3301,0.3226,0.4035,0.7297,0.5000,0.4167,0.8111,0.5510,0.1186,0.0208,0.1455,0.7879,0.6869,0.5065,0.4583,0.3973,0.0462,0.1778,0.3365,0.1552,0.5444,0.2062,0.4750,0.3667,0.5814,0.6931,0.5233 69 | 0.2378,0.6198,0.4483,0.4329,0.5959,0.2520,0.2735,0.4594,0.5433,0.4384,0.4928,0.6108,0.5789,0.2660,0.0445,0.3705,0.4148,0.7453,0.3522,0.4712,0.3357,0.2639,0.1802,0.5848,0.2459,0.4922,0.4261,0.5876,0.5317,0.5026,0.6294,0.5740 70 | 0.3938,0.6851,0.5562,0.5743,0.6913,0.2411,0.2950,0.5967,0.5785,0.6700,0.6013,0.6080,0.6781,0.2687,0.0581,0.3444,0.5604,0.6102,0.2821,0.4073,0.5218,0.2291,0.3012,0.5597,0.2785,0.4592,0.4225,0.5194,0.5866,0.5773,0.7181,0.4986 71 | 0.3318,0.5759,0.4825,0.5224,0.6578,0.3323,0.3527,0.4672,0.3444,0.5282,0.4848,0.5571,0.6039,0.1992,0.0725,0.2967,0.4796,0.6937,0.3237,0.3270,0.5236,0.2390,0.2920,0.5397,0.2863,0.2544,0.3882,0.6358,0.5282,0.5815,0.6842,0.6653 72 | 0.3987,0.5327,0.4450,0.5648,0.6462,0.3099,0.3409,0.5086,0.5360,0.6031,0.6130,0.7786,0.5826,0.1969,0.1081,0.2415,0.6040,0.7478,0.4375,0.4569,0.4639,0.2500,0.2337,0.5622,0.3026,0.3250,0.4024,0.5314,0.5938,0.6503,0.6552,0.5273 73 | 0.4586,0.6056,0.3735,0.7023,0.5866,0.5047,0.6246,0.5224,0.3772,0.6271,0.5078,0.7674,0.6387,0.1298,0.0400,0.1579,0.4717,0.6781,0.3322,0.4263,0.4664,0.2000,0.4520,0.5435,0.5784,0.3531,0.3098,0.3729,0.4393,0.6606,0.6515,0.6727 74 | 0.3238,0.4333,0.4907,0.6095,0.5467,0.2843,0.3125,0.4639,0.2881,0.5625,0.5948,0.6742,0.6667,0.1880,0.0864,0.2716,0.5158,0.7387,0.4149,0.3421,0.5478,0.1795,0.4464,0.5410,0.1979,0.2247,0.3761,0.5035,0.5690,0.6383,0.7191,0.8131 75 | 0.2607,0.5754,0.4755,0.4318,0.6600,0.3554,0.3854,0.4076,0.4488,0.4730,0.4336,0.6330,0.5490,0.2348,0.0608,0.2883,0.4135,0.7609,0.2388,0.4090,0.4689,0.2746,0.3440,0.5854,0.2202,0.3316,0.4434,0.6278,0.5268,0.6153,0.6464,0.5473 76 | 0.3163,0.6534,0.5302,0.6164,0.6261,0.2739,0.2622,0.4154,0.4259,0.5259,0.5479,0.6962,0.5865,0.2648,0.0635,0.3663,0.5271,0.7112,0.3667,0.4681,0.4577,0.2195,0.3566,0.5846,0.1749,0.4526,0.3867,0.6446,0.5699,0.5708,0.7671,0.5168 77 | 0.6939,0.6157,0.5594,0.6802,0.6079,0.5211,0.6561,0.6439,0.1890,0.6683,0.4333,0.5734,0.7134,0.1565,0.0356,0.1922,0.4067,0.7113,0.2288,0.3820,0.4017,0.1464,0.2977,0.4854,0.7069,0.3574,0.3327,0.2630,0.4305,0.6894,0.6479,0.6788 78 | 0.8000,0.5000,0.6190,0.3333,0.5652,0.4737,0.5200,0.7600,0.5000,0.7600,0.4074,0.4545,0.5417,0.3158,0.0556,0.3600,0.3750,0.8462,0.3529,0.4783,0.4138,0.2727,0.3182,0.5263,0.3913,0.2778,0.4000,0.7000,0.6500,0.4000,0.8182,0.7143 79 | 0.3149,0.5910,0.3714,0.5149,0.5251,0.3243,0.2924,0.4275,0.4880,0.5571,0.5904,0.5934,0.5960,0.2091,0.1017,0.2967,0.5234,0.7827,0.3553,0.3819,0.3512,0.3131,0.2097,0.6881,0.2992,0.4347,0.4680,0.5622,0.3927,0.5517,0.6908,0.5665 80 | 0.3279,0.5872,0.3993,0.5105,0.6323,0.2572,0.1953,0.4708,0.6916,0.4058,0.4615,0.5500,0.6268,0.1667,0.0899,0.2734,0.4888,0.6422,0.2500,0.3599,0.5147,0.2090,0.2682,0.4699,0.1702,0.3870,0.4104,0.6049,0.5986,0.3885,0.6517,0.6517 81 | 0.4942,0.7449,0.6556,0.8500,0.6547,0.4643,0.5650,0.6163,0.4167,0.6327,0.6352,0.8736,0.7105,0.1546,0.0429,0.1588,0.3992,0.7045,0.3826,0.3966,0.5122,0.1943,0.2070,0.4116,0.3663,0.4429,0.2983,0.3502,0.5379,0.5917,0.7512,0.6966 82 | 0.4394,0.7786,0.6735,0.8462,0.7000,0.3000,0.3261,0.6279,0.7736,0.6111,0.6415,0.9394,0.6917,0.0714,0.0211,0.1290,0.4486,0.4694,0.5505,0.5302,0.5259,0.2078,0.1376,0.4014,0.2368,0.6111,0.2436,0.4051,0.4479,0.4886,0.7615,0.5161 83 | 0.6356,0.6917,0.5696,0.8456,0.6978,0.5604,0.6578,0.6333,0.3613,0.7606,0.5673,0.8374,0.7338,0.1992,0.0536,0.1758,0.3766,0.7667,0.2404,0.2705,0.4264,0.1977,0.4525,0.3541,0.6059,0.4139,0.4258,0.2745,0.5213,0.6620,0.6977,0.6912 84 | 0.3986,0.6752,0.5244,0.5830,0.7202,0.2696,0.2754,0.5934,0.6443,0.6226,0.5441,0.6218,0.6148,0.2679,0.0558,0.2719,0.5458,0.6697,0.2785,0.3918,0.5293,0.2157,0.3558,0.5574,0.2567,0.4949,0.3971,0.5145,0.6355,0.5489,0.6915,0.4667 85 | 0.2612,0.5850,0.4950,0.4379,0.5758,0.3375,0.2659,0.4254,0.4509,0.4337,0.4621,0.5827,0.5837,0.2908,0.0717,0.2988,0.3854,0.7538,0.2725,0.3460,0.4607,0.2046,0.3944,0.5505,0.2097,0.4961,0.3944,0.6028,0.5662,0.5832,0.6658,0.6772 86 | 0.2355,0.6113,0.3959,0.5181,0.5443,0.2717,0.2756,0.3503,0.4394,0.4364,0.5622,0.6093,0.6161,0.1655,0.0420,0.3096,0.4370,0.7174,0.3171,0.4031,0.4296,0.2448,0.3046,0.5719,0.2010,0.3159,0.4658,0.5399,0.4372,0.5226,0.6553,0.4629 87 | 0.4744,0.5651,0.6997,0.5714,0.6891,0.3951,0.3784,0.5732,0.4757,0.6653,0.6592,0.8092,0.7275,0.2322,0.0590,0.3129,0.6220,0.6455,0.3156,0.2865,0.4971,0.3537,0.2161,0.4684,0.3826,0.3522,0.3228,0.5075,0.5947,0.6217,0.6446,0.6075 88 | 0.3799,0.6670,0.5066,0.5583,0.6616,0.3366,0.3356,0.5622,0.5251,0.6606,0.5664,0.6184,0.6319,0.2556,0.0794,0.3122,0.5380,0.7023,0.3500,0.3549,0.4380,0.3170,0.3390,0.5583,0.3179,0.4146,0.4733,0.5354,0.5636,0.6359,0.7089,0.5313 89 | 0.3429,0.5538,0.4592,0.4401,0.6733,0.2274,0.3106,0.4848,0.4699,0.6108,0.4916,0.6246,0.5591,0.2993,0.0805,0.3178,0.4385,0.6353,0.3288,0.3009,0.3343,0.2931,0.3704,0.6617,0.4068,0.3218,0.4683,0.5140,0.4678,0.6489,0.5678,0.5681 90 | 0.3480,0.5912,0.4386,0.5895,0.6053,0.3190,0.2405,0.4172,0.4021,0.4978,0.5243,0.7757,0.6429,0.1854,0.0333,0.2409,0.5255,0.6912,0.2398,0.4606,0.5685,0.2365,0.3528,0.5238,0.2596,0.3333,0.4075,0.5577,0.4892,0.6902,0.6655,0.5157 91 | 0.3315,0.6512,0.5517,0.4965,0.6283,0.3688,0.3715,0.4689,0.4709,0.5112,0.6067,0.7029,0.6252,0.2984,0.0938,0.2945,0.5781,0.7183,0.3183,0.4411,0.4555,0.2697,0.3303,0.5761,0.3166,0.3687,0.4724,0.6213,0.5521,0.5267,0.6463,0.5572 92 | 0.3242,0.6323,0.4616,0.5541,0.7618,0.3585,0.3580,0.5425,0.5489,0.5938,0.5539,0.6527,0.7064,0.2592,0.1117,0.3181,0.5159,0.6719,0.2620,0.3840,0.5462,0.2395,0.4228,0.5375,0.2880,0.3631,0.5115,0.4653,0.5085,0.5005,0.6497,0.5140 93 | 0.6139,0.6791,0.5593,0.7268,0.5167,0.3967,0.5726,0.6237,0.4864,0.6148,0.5986,0.8787,0.7482,0.2238,0.0776,0.1543,0.4620,0.7091,0.3279,0.3942,0.5169,0.1525,0.2643,0.4161,0.4057,0.4359,0.3731,0.3844,0.5147,0.6404,0.6985,0.6573 94 | 0.6811,0.5632,0.4785,0.5607,0.6301,0.5550,0.6774,0.5795,0.3578,0.7454,0.4116,0.5230,0.5312,0.2718,0.0750,0.4397,0.4435,0.6766,0.4404,0.2726,0.3041,0.1652,0.4141,0.6159,0.7645,0.4841,0.3069,0.4253,0.3952,0.5848,0.6725,0.6880 95 | 0.3315,0.6815,0.6203,0.6982,0.7173,0.2992,0.2568,0.6369,0.5941,0.6021,0.6558,0.8041,0.7235,0.3107,0.0790,0.2717,0.6498,0.5902,0.3069,0.4094,0.5548,0.2740,0.2640,0.5169,0.2495,0.5318,0.4047,0.5618,0.6540,0.5283,0.7195,0.4411 96 | 0.5193,0.5977,0.6730,0.7462,0.6592,0.3826,0.3991,0.6502,0.4686,0.7703,0.6766,0.7770,0.6760,0.3367,0.0732,0.3260,0.6688,0.6698,0.3333,0.3383,0.5216,0.3461,0.2735,0.5410,0.4460,0.4320,0.4721,0.3831,0.5660,0.6573,0.6694,0.5245 97 | 0.2710,0.5753,0.2727,0.4737,0.6415,0.2357,0.2256,0.2685,0.4444,0.2025,0.4195,0.5686,0.4762,0.1692,0.0250,0.3378,0.3782,0.6647,0.3540,0.3861,0.2937,0.1462,0.4254,0.5776,0.1939,0.4155,0.4577,0.7403,0.4683,0.3193,0.6148,0.6333 98 | 0.5190,0.7471,0.7130,0.8881,0.6570,0.5216,0.4171,0.8333,0.5168,0.7015,0.6452,0.9303,0.6935,0.2121,0.0674,0.0846,0.5504,0.4708,0.6344,0.6168,0.5747,0.1894,0.1979,0.4046,0.3033,0.7246,0.1543,0.4335,0.4175,0.6339,0.7342,0.4941 99 | 0.5627,0.6865,0.5466,0.6357,0.7127,0.4391,0.5076,0.6640,0.4173,0.6556,0.4909,0.7550,0.5866,0.2579,0.0557,0.3186,0.5611,0.5844,0.4198,0.5309,0.4192,0.1752,0.3171,0.5445,0.5697,0.6419,0.2141,0.5339,0.4811,0.5479,0.6943,0.5586 100 | 0.5652,0.5288,0.4752,0.6250,0.7081,0.4959,0.4757,0.5521,0.3136,0.7216,0.3965,0.5432,0.6313,0.1672,0.0327,0.2112,0.3488,0.7136,0.2881,0.3596,0.4532,0.0929,0.2711,0.5530,0.2789,0.3273,0.3101,0.3099,0.4745,0.6586,0.6628,0.7577 101 | 0.3718,0.6006,0.5047,0.6119,0.7186,0.3238,0.3217,0.5524,0.5357,0.6036,0.6081,0.7558,0.7202,0.2424,0.1057,0.2785,0.5860,0.6788,0.2890,0.3641,0.5962,0.2700,0.3718,0.5746,0.2899,0.3750,0.4559,0.4890,0.5760,0.4890,0.6299,0.4646 102 | 0.6538,0.6653,0.5093,0.8142,0.6989,0.4384,0.5968,0.6179,0.5323,0.4946,0.5263,0.8686,0.6198,0.0638,0.0625,0.1667,0.2941,0.7169,0.3698,0.4628,0.4519,0.1562,0.2612,0.4714,0.5140,0.5514,0.2661,0.4340,0.5109,0.5556,0.8269,0.7895 103 | 0.3200,0.5662,0.3385,0.5503,0.6355,0.2343,0.2933,0.4400,0.2825,0.5446,0.5457,0.7170,0.6313,0.1614,0.0476,0.2578,0.5086,0.6329,0.3722,0.4116,0.4652,0.2661,0.3659,0.5000,0.2753,0.4426,0.4670,0.4776,0.4113,0.4894,0.6126,0.4167 104 | 0.4400,0.7533,0.5882,0.8889,0.6615,0.3609,0.6146,0.6744,0.6000,0.5246,0.6558,0.8741,0.7143,0.2235,0.0581,0.0947,0.5463,0.6452,0.4848,0.4219,0.4630,0.1398,0.1875,0.4099,0.3676,0.5274,0.2588,0.3015,0.5529,0.4588,0.7698,0.6050 105 | 0.4431,0.6171,0.4959,0.5988,0.5959,0.5384,0.5607,0.5340,0.3062,0.5022,0.4498,0.5743,0.6118,0.1061,0.0352,0.1779,0.4005,0.6668,0.3039,0.3007,0.4467,0.1778,0.2267,0.5728,0.5040,0.4156,0.2693,0.3026,0.3169,0.3706,0.6386,0.5883 106 | 0.3533,0.5738,0.4542,0.5010,0.7050,0.3439,0.4029,0.5388,0.3813,0.5969,0.4945,0.4941,0.5644,0.1504,0.0367,0.3460,0.5385,0.6905,0.2305,0.3467,0.5357,0.1811,0.4319,0.5427,0.3188,0.3324,0.3759,0.5361,0.4719,0.6453,0.6630,0.5452 107 | 0.2875,0.6102,0.3430,0.5093,0.7288,0.2746,0.3703,0.4712,0.4319,0.5160,0.4993,0.6094,0.5485,0.1633,0.0455,0.3226,0.4133,0.6980,0.3517,0.4107,0.3993,0.2278,0.4354,0.5996,0.3038,0.4297,0.4745,0.5630,0.4783,0.5337,0.6481,0.5230 108 | 0.3864,0.5249,0.4589,0.5226,0.7150,0.3402,0.3017,0.4823,0.4205,0.6524,0.5911,0.8189,0.7493,0.2350,0.1058,0.2313,0.6297,0.7326,0.2447,0.3602,0.6439,0.3156,0.4582,0.4695,0.2628,0.2513,0.5257,0.4324,0.6603,0.6057,0.5655,0.5714 109 | 0.6131,0.7406,0.6581,0.8877,0.6049,0.4516,0.4206,0.8287,0.3855,0.6541,0.6218,0.8975,0.6936,0.2854,0.0530,0.1191,0.6068,0.4672,0.6519,0.6161,0.5782,0.1927,0.2191,0.3589,0.3230,0.6761,0.1613,0.4505,0.4239,0.6098,0.7215,0.5184 110 | 0.4623,0.6174,0.7357,0.5426,0.6000,0.4563,0.4565,0.6033,0.4426,0.7204,0.5725,0.7299,0.7455,0.3688,0.0936,0.3947,0.6313,0.7235,0.2558,0.2135,0.5409,0.3688,0.2087,0.5695,0.3673,0.3346,0.3232,0.5321,0.6387,0.5379,0.6846,0.6066 111 | 0.5556,0.6267,0.6429,0.7236,0.6250,0.4755,0.5643,0.7541,0.2932,0.7759,0.6249,0.8113,0.7828,0.2400,0.0437,0.1441,0.5187,0.6427,0.2738,0.3217,0.6195,0.1371,0.2640,0.5476,0.5204,0.3498,0.3208,0.3549,0.5884,0.6114,0.7256,0.5840 112 | 0.3891,0.7139,0.5497,0.6208,0.8069,0.2849,0.2334,0.6151,0.4848,0.5193,0.5923,0.8423,0.5391,0.2145,0.0346,0.2432,0.5141,0.6622,0.3541,0.5559,0.6953,0.2230,0.2321,0.4771,0.2319,0.4924,0.3081,0.6077,0.6122,0.5104,0.7584,0.3918 113 | 0.6690,0.6796,0.4476,0.6362,0.6671,0.4705,0.5679,0.5963,0.4393,0.7045,0.4704,0.6000,0.5756,0.2879,0.0691,0.3595,0.4705,0.6463,0.5381,0.5219,0.3429,0.1797,0.3777,0.5523,0.5854,0.5949,0.3089,0.4576,0.3830,0.5893,0.6592,0.6284 114 | 0.2440,0.7205,0.3675,0.3989,0.7756,0.2559,0.1758,0.4082,0.4958,0.6049,0.2932,0.5845,0.4341,0.2063,0.0442,0.2010,0.3571,0.6709,0.4710,0.4815,0.2581,0.1923,0.3068,0.5843,0.0694,0.4956,0.3087,0.7725,0.5242,0.6656,0.7857,0.6927 115 | 0.4203,0.5710,0.5137,0.4893,0.6667,0.3930,0.3772,0.5520,0.5475,0.6030,0.5493,0.6412,0.6300,0.3018,0.0657,0.3560,0.5062,0.6742,0.3206,0.2968,0.3424,0.3134,0.2792,0.6025,0.3701,0.3370,0.4681,0.5069,0.6105,0.6402,0.6214,0.5089 116 | 0.3923,0.6191,0.4840,0.5449,0.6966,0.3363,0.3190,0.4956,0.4065,0.5890,0.5797,0.6372,0.6317,0.2113,0.0729,0.3018,0.5709,0.6810,0.2857,0.4022,0.5514,0.3046,0.3547,0.5322,0.3298,0.3562,0.4546,0.5185,0.5317,0.5440,0.6498,0.4644 117 | 0.4985,0.6464,0.6039,0.6433,0.7014,0.2957,0.3436,0.6484,0.4614,0.7053,0.6899,0.8096,0.6881,0.2255,0.0681,0.2442,0.7011,0.6089,0.3653,0.4365,0.5718,0.2857,0.2727,0.5142,0.3997,0.4451,0.4071,0.4528,0.6015,0.6107,0.6826,0.4106 118 | 0.4262,0.7584,0.5464,0.8403,0.5417,0.3784,0.4267,0.6316,0.6923,0.5918,0.6048,0.8860,0.7536,0.2609,0.0385,0.1111,0.4085,0.5862,0.4953,0.4476,0.4205,0.1429,0.1744,0.4122,0.1667,0.4931,0.2763,0.4057,0.5824,0.5303,0.7360,0.6038 119 | 0.2190,0.6013,0.4381,0.4644,0.5961,0.2876,0.2483,0.4829,0.3926,0.4621,0.4796,0.5523,0.6905,0.2348,0.0943,0.3142,0.4325,0.7269,0.3019,0.3072,0.3976,0.2199,0.3035,0.5125,0.2268,0.3978,0.4459,0.6322,0.5723,0.5068,0.6129,0.6941 120 | 0.4386,0.5508,0.6441,0.4507,0.6800,0.4164,0.3151,0.4863,0.3188,0.6645,0.5669,0.5628,0.6656,0.4106,0.0959,0.3935,0.5545,0.6694,0.2898,0.1993,0.3175,0.2543,0.2603,0.5714,0.3000,0.3173,0.3549,0.4645,0.7129,0.6131,0.4874,0.7303 121 | 0.4250,0.5826,0.4975,0.4771,0.5000,0.2163,0.3301,0.5060,0.4125,0.7328,0.5356,0.5326,0.6145,0.3171,0.0514,0.3838,0.4355,0.7237,0.3775,0.4162,0.3842,0.2254,0.3089,0.5708,0.2584,0.2650,0.4621,0.5407,0.4472,0.7448,0.5955,0.6614 122 | 0.3562,0.5740,0.6613,0.6018,0.6308,0.3185,0.3232,0.6667,0.4099,0.6953,0.6241,0.7684,0.7093,0.3242,0.0933,0.3263,0.6863,0.6856,0.5231,0.4309,0.5714,0.3191,0.2400,0.5182,0.3852,0.4260,0.4349,0.4108,0.5632,0.6389,0.6383,0.4854 123 | 0.6133,0.6238,0.8370,0.4536,0.5738,0.3093,0.3061,0.4574,0.3846,0.7119,0.5591,0.4045,0.5714,0.2115,0.0112,0.2025,0.6066,0.8000,0.2133,0.1562,0.6552,0.2283,0.0979,0.5862,0.1493,0.4141,0.2432,0.4228,0.5882,0.6667,0.7245,0.4111 124 | 0.4921,0.6917,0.5536,0.8571,0.6875,0.5607,0.7064,0.4949,0.4091,0.7021,0.5775,0.8682,0.7129,0.1719,0.0435,0.1606,0.3931,0.6717,0.3867,0.4295,0.5876,0.1667,0.2756,0.3626,0.6053,0.5153,0.2995,0.3586,0.4180,0.5413,0.7806,0.6581 125 | 0.5062,0.7105,0.4821,0.8328,0.6958,0.4083,0.5556,0.5597,0.5291,0.5149,0.5661,0.8628,0.6875,0.1649,0.0446,0.1787,0.3016,0.6248,0.4308,0.4646,0.4743,0.1750,0.2147,0.4463,0.2978,0.5680,0.2674,0.3985,0.4565,0.4425,0.7464,0.6686 126 | 0.5625,0.5792,0.6406,0.8294,0.6736,0.5197,0.4451,0.7686,0.3879,0.6853,0.5843,0.9548,0.6980,0.2162,0.1040,0.1353,0.5359,0.6364,0.5988,0.4425,0.4561,0.1667,0.2014,0.4000,0.3154,0.5385,0.2043,0.3521,0.4667,0.5794,0.6196,0.6906 127 | 0.4472,0.6158,0.4181,0.6865,0.6129,0.3456,0.4360,0.5213,0.3250,0.6039,0.5735,0.8523,0.5891,0.1287,0.0379,0.2596,0.4236,0.7485,0.2691,0.4440,0.5250,0.2389,0.3417,0.5531,0.3227,0.4052,0.4066,0.5117,0.4483,0.5396,0.6962,0.4052 128 | 0.3761,0.5806,0.5169,0.5320,0.6798,0.3496,0.3256,0.6068,0.5369,0.5686,0.5740,0.7546,0.6814,0.2792,0.0866,0.2862,0.5557,0.6678,0.2552,0.3448,0.6135,0.3200,0.3315,0.5370,0.2303,0.3416,0.3652,0.5606,0.6624,0.5485,0.6800,0.5377 129 | 0.6385,0.5333,0.3779,0.6083,0.6841,0.6420,0.7278,0.6533,0.2205,0.8059,0.4286,0.7286,0.4927,0.2960,0.0567,0.3406,0.5007,0.7652,0.4784,0.3448,0.2811,0.1862,0.4233,0.6687,0.7845,0.4559,0.3585,0.4712,0.4389,0.6251,0.6818,0.7255 130 | 0.3318,0.5755,0.3360,0.4905,0.6693,0.2446,0.3072,0.3442,0.3709,0.5391,0.5372,0.6645,0.5433,0.1554,0.0564,0.3616,0.4508,0.6919,0.2708,0.4804,0.3690,0.2766,0.3641,0.5525,0.1795,0.3062,0.5200,0.5512,0.4715,0.4431,0.5939,0.5055 131 | 0.2118,0.6258,0.3103,0.3553,0.6682,0.2424,0.1727,0.2528,0.4231,0.4081,0.3219,0.3852,0.5292,0.2344,0.0336,0.3069,0.3927,0.6821,0.3156,0.5292,0.3133,0.2140,0.3565,0.4950,0.2423,0.4032,0.3743,0.6779,0.4277,0.5893,0.7063,0.6322 132 | 0.5314,0.7322,0.6209,0.8357,0.5864,0.4417,0.6374,0.6494,0.4934,0.5719,0.6168,0.8380,0.7530,0.1830,0.0386,0.1043,0.4179,0.6837,0.3730,0.3642,0.4949,0.1146,0.2505,0.4167,0.3938,0.5075,0.3149,0.3136,0.5527,0.5708,0.7419,0.6748 133 | 0.4525,0.5455,0.4557,0.5449,0.6593,0.3086,0.3876,0.4806,0.3564,0.6818,0.4592,0.6414,0.5353,0.1380,0.0471,0.2244,0.5000,0.7273,0.2472,0.3557,0.5312,0.2534,0.3737,0.5896,0.3231,0.3013,0.4101,0.4938,0.4743,0.6024,0.6186,0.4638 134 | 0.6577,0.6293,0.4980,0.6127,0.7111,0.4317,0.6097,0.5789,0.3721,0.7070,0.4925,0.6758,0.5502,0.2892,0.1365,0.3568,0.5238,0.7326,0.3768,0.4688,0.4511,0.1967,0.5217,0.5035,0.6714,0.4781,0.3224,0.3832,0.4715,0.6646,0.6615,0.6591 135 | 0.4197,0.5740,0.5985,0.4272,0.6794,0.3851,0.3369,0.4864,0.4731,0.5799,0.4321,0.5146,0.6441,0.3478,0.0665,0.3885,0.5000,0.6937,0.2500,0.1951,0.3397,0.2178,0.2676,0.5484,0.1732,0.3939,0.3305,0.5568,0.7159,0.5399,0.6424,0.7158 136 | 0.4643,0.5631,0.4946,0.5611,0.6599,0.4045,0.4413,0.5337,0.4425,0.6266,0.6448,0.7411,0.6379,0.2384,0.0602,0.3550,0.5597,0.7389,0.2565,0.3224,0.6105,0.3126,0.4289,0.5745,0.4640,0.2562,0.5125,0.4865,0.5388,0.5623,0.6402,0.5435 137 | 0.3981,0.6096,0.6726,0.5203,0.6491,0.3006,0.3178,0.4493,0.4592,0.5877,0.5282,0.7323,0.6588,0.2791,0.0405,0.2754,0.5235,0.6543,0.3692,0.3931,0.5455,0.3070,0.2454,0.6012,0.1020,0.2345,0.3889,0.5342,0.6154,0.6776,0.6014,0.6172 138 | 0.3235,0.6065,0.4632,0.4737,0.6567,0.3016,0.2285,0.4577,0.3780,0.4982,0.4547,0.6317,0.5946,0.2861,0.0551,0.3086,0.5143,0.7317,0.2896,0.4187,0.4840,0.2476,0.2628,0.5656,0.1885,0.4089,0.3585,0.4505,0.6039,0.5159,0.6563,0.5753 139 | 0.5522,0.7500,0.7475,0.8662,0.6667,0.4067,0.5741,0.6667,0.5319,0.7966,0.6273,0.8261,0.7143,0.1391,0.0385,0.1122,0.5588,0.7351,0.4836,0.4409,0.4750,0.2581,0.2213,0.3082,0.3467,0.5038,0.2662,0.2885,0.4956,0.6111,0.7970,0.5873 140 | 0.6541,0.6008,0.4925,0.7418,0.7784,0.6049,0.6000,0.6555,0.3789,0.6646,0.5075,0.7202,0.7179,0.1667,0.0422,0.2047,0.3472,0.7365,0.3754,0.3509,0.5000,0.0863,0.3515,0.5813,0.4563,0.4043,0.3550,0.3125,0.5224,0.5610,0.6722,0.7015 141 | 0.3425,0.6655,0.5043,0.5129,0.7222,0.3679,0.4093,0.4654,0.4967,0.5733,0.6295,0.7273,0.6709,0.3117,0.0976,0.2807,0.5442,0.6375,0.2569,0.3405,0.5101,0.3045,0.2800,0.6246,0.3771,0.4014,0.4615,0.5620,0.6256,0.4598,0.7070,0.5702 142 | 0.3595,0.5513,0.4750,0.4636,0.7190,0.3204,0.2353,0.4420,0.5673,0.4917,0.3649,0.3576,0.6750,0.3333,0.0185,0.4303,0.4167,0.6379,0.2326,0.3592,0.4908,0.2195,0.3812,0.6054,0.2353,0.4052,0.3438,0.5983,0.5460,0.6036,0.6594,0.7124 143 | 0.2951,0.6401,0.5206,0.5709,0.6591,0.3395,0.3229,0.5495,0.5686,0.5595,0.6066,0.6925,0.7064,0.2419,0.0962,0.3153,0.5449,0.6663,0.2862,0.3659,0.5291,0.2773,0.3391,0.5106,0.2674,0.4076,0.4491,0.4554,0.5710,0.5513,0.6362,0.5094 144 | 0.4218,0.6719,0.5664,0.5043,0.6028,0.3230,0.3721,0.4173,0.4228,0.6338,0.6529,0.8596,0.5594,0.1944,0.0145,0.2656,0.5000,0.7517,0.3232,0.3162,0.5328,0.2768,0.3333,0.5039,0.2766,0.3516,0.3412,0.5027,0.5972,0.7069,0.6991,0.5312 145 | 0.6857,0.5429,0.5326,0.6178,0.6423,0.5955,0.7251,0.6806,0.3062,0.8043,0.4459,0.6171,0.5597,0.3210,0.0719,0.3917,0.5461,0.7266,0.4286,0.2962,0.3162,0.1704,0.3741,0.6139,0.7577,0.5015,0.3714,0.3687,0.4449,0.6747,0.6229,0.6718 146 | 0.4356,0.6084,0.5299,0.5307,0.6614,0.3338,0.3799,0.4576,0.3897,0.5596,0.5884,0.7349,0.6730,0.2294,0.0480,0.3033,0.5694,0.6959,0.2564,0.3721,0.6041,0.2737,0.3681,0.5155,0.4256,0.3170,0.4382,0.5495,0.5309,0.5956,0.6352,0.5075 147 | 0.5253,0.6171,0.4603,0.5483,0.6532,0.2891,0.2577,0.4798,0.3516,0.6128,0.5995,0.5191,0.6522,0.1685,0.0598,0.2751,0.5453,0.6541,0.2549,0.3827,0.5618,0.1757,0.2581,0.5688,0.3010,0.4057,0.4065,0.5630,0.6210,0.5892,0.6554,0.5192 148 | 0.3533,0.6410,0.5307,0.6368,0.6698,0.3315,0.3651,0.5463,0.4562,0.5830,0.6366,0.8012,0.5623,0.2093,0.0643,0.2672,0.5912,0.7161,0.3547,0.4771,0.4811,0.2787,0.3709,0.5863,0.4267,0.3441,0.4842,0.5841,0.5762,0.5854,0.6887,0.4537 149 | 0.4750,0.7532,0.6174,0.8540,0.5833,0.3643,0.5299,0.5804,0.6061,0.5806,0.5946,0.8992,0.7273,0.1286,0.1957,0.1161,0.4800,0.5328,0.3846,0.4231,0.4634,0.0472,0.1096,0.3182,0.2526,0.5223,0.2258,0.4497,0.4130,0.5156,0.8493,0.6028 150 | 0.3592,0.5492,0.4822,0.4027,0.6261,0.3299,0.2927,0.4303,0.4183,0.4025,0.4308,0.5764,0.5321,0.2973,0.0586,0.3268,0.3806,0.6875,0.1649,0.3553,0.3702,0.2275,0.3832,0.5238,0.1098,0.3062,0.4296,0.6185,0.5690,0.6036,0.5720,0.6912 151 | 0.4295,0.5551,0.6026,0.5461,0.7345,0.3754,0.3485,0.5924,0.5118,0.6313,0.5814,0.7125,0.6713,0.2543,0.0919,0.3231,0.5398,0.6938,0.2205,0.3501,0.6068,0.2570,0.4005,0.5079,0.2816,0.2943,0.4349,0.5078,0.6425,0.5679,0.6759,0.5641 152 | 0.3546,0.4484,0.5326,0.4698,0.6048,0.4018,0.3363,0.5011,0.4065,0.5989,0.5262,0.5402,0.6261,0.3372,0.1296,0.3372,0.5743,0.7622,0.4563,0.2980,0.4652,0.2784,0.2414,0.4509,0.2839,0.2712,0.5176,0.4552,0.5822,0.5958,0.5718,0.6325 153 | 0.4604,0.7827,0.6667,0.7674,0.6485,0.3862,0.5025,0.6745,0.5528,0.6115,0.6114,0.8772,0.7632,0.1642,0.0459,0.1355,0.4688,0.5237,0.4965,0.5078,0.4886,0.1510,0.1938,0.4087,0.2917,0.6756,0.1826,0.3380,0.3939,0.5758,0.7978,0.5977 154 | 0.2004,0.7121,0.4264,0.3626,0.6016,0.2735,0.2467,0.4846,0.5446,0.3750,0.3043,0.4140,0.3835,0.2219,0.0431,0.3002,0.3208,0.5392,0.4636,0.4732,0.4162,0.2097,0.2686,0.6421,0.1777,0.5493,0.3254,0.6965,0.4987,0.5678,0.7763,0.6522 155 | 0.3514,0.7954,0.5671,0.8827,0.7297,0.3673,0.4458,0.6176,0.5870,0.6549,0.6334,0.8810,0.7647,0.1793,0.0401,0.1655,0.4698,0.6136,0.5458,0.4954,0.5213,0.1132,0.1953,0.3510,0.2316,0.6320,0.2166,0.3624,0.4685,0.4677,0.7619,0.5347 156 | 0.5758,0.6313,0.3786,0.6656,0.7119,0.5137,0.5662,0.5688,0.2626,0.6471,0.4094,0.7009,0.6466,0.1262,0.0349,0.2101,0.2862,0.7112,0.2939,0.3137,0.4942,0.0958,0.2753,0.5467,0.3427,0.3676,0.3492,0.4467,0.4571,0.5533,0.6494,0.6429 157 | 0.2222,0.6264,0.4710,0.5587,0.7850,0.2609,0.2917,0.3122,0.5591,0.3733,0.5512,0.6667,0.5498,0.1050,0.0386,0.3305,0.4751,0.7814,0.4366,0.5343,0.3904,0.2186,0.3320,0.7089,0.1704,0.4313,0.4694,0.6314,0.6282,0.2805,0.6940,0.5175 158 | 0.2296,0.6818,0.4329,0.5387,0.7055,0.2181,0.1847,0.4556,0.5257,0.5528,0.5535,0.6056,0.5655,0.1197,0.0535,0.2085,0.4699,0.6211,0.4444,0.5265,0.5214,0.1908,0.2996,0.4938,0.2167,0.5172,0.3436,0.6000,0.4669,0.3980,0.6623,0.4549 159 | 0.4711,0.7126,0.4516,0.8591,0.7281,0.4181,0.4214,0.5620,0.5936,0.5767,0.4895,0.8448,0.6295,0.1572,0.0350,0.1215,0.3149,0.6636,0.4338,0.4226,0.5224,0.1471,0.2453,0.3975,0.2948,0.5264,0.2953,0.4450,0.5503,0.4812,0.7351,0.6528 160 | 0.2841,0.5897,0.4925,0.4079,0.6452,0.3182,0.2174,0.3086,0.4390,0.4032,0.4587,0.6111,0.6024,0.1250,0.0571,0.2759,0.4085,0.6932,0.3469,0.3974,0.3947,0.2258,0.3544,0.4603,0.1552,0.4054,0.3535,0.7000,0.4844,0.4744,0.7969,0.5714 161 | 0.3953,0.6122,0.7193,0.6033,0.6548,0.3125,0.3475,0.6000,0.4627,0.6000,0.6645,0.8911,0.7500,0.2537,0.0924,0.2443,0.4741,0.7313,0.2385,0.4609,0.6356,0.3131,0.2031,0.4386,0.4524,0.2828,0.4834,0.4852,0.6822,0.5763,0.5957,0.6015 162 | 0.5299,0.7065,0.6624,0.8708,0.7057,0.3549,0.5292,0.7195,0.5455,0.6376,0.5728,0.8529,0.7219,0.1885,0.0460,0.1505,0.4505,0.6237,0.4357,0.4261,0.5542,0.1712,0.3000,0.4174,0.4000,0.5672,0.2619,0.3957,0.6069,0.5079,0.7654,0.5408 163 | 0.5400,0.7635,0.6667,0.7967,0.6226,0.3929,0.3485,0.4828,0.7632,0.5472,0.5882,0.7333,0.5929,0.1868,0.0214,0.0606,0.3770,0.6017,0.3535,0.2871,0.4891,0.1875,0.1548,0.3065,0.1964,0.3929,0.2797,0.3977,0.5930,0.4638,0.8320,0.7030 164 | 0.5000,0.6125,0.7692,0.6786,0.6471,0.2857,0.0690,0.5116,0.7368,0.7273,0.7174,0.7059,0.6667,0.2000,0.0333,0.2941,0.7027,0.5571,0.3243,0.3784,0.4444,0.2000,0.1714,0.5000,0.0000,0.3827,0.1719,0.5263,0.8462,0.4483,0.7500,0.7429 165 | 0.2163,0.5986,0.4532,0.4602,0.7506,0.2069,0.2459,0.4199,0.5749,0.5486,0.4502,0.5983,0.5474,0.2571,0.0841,0.3071,0.4372,0.6548,0.3651,0.4378,0.4063,0.2314,0.3899,0.5700,0.2350,0.4378,0.4513,0.5797,0.4789,0.5283,0.6080,0.5191 166 | 0.3738,0.5497,0.5043,0.6362,0.7137,0.3697,0.3864,0.5368,0.5163,0.6851,0.6275,0.7392,0.7286,0.2892,0.1423,0.2713,0.5466,0.6522,0.2923,0.3780,0.5085,0.2773,0.4617,0.5726,0.3698,0.3508,0.5050,0.4176,0.5817,0.4949,0.5329,0.5225 167 | 0.2350,0.5900,0.4598,0.5252,0.7569,0.2545,0.2326,0.3295,0.5354,0.3822,0.5361,0.6390,0.4727,0.2102,0.0570,0.3319,0.3504,0.8030,0.4196,0.4609,0.3906,0.1689,0.2996,0.6195,0.1688,0.3615,0.4039,0.7417,0.5052,0.3852,0.6962,0.4370 168 | 0.6101,0.7344,0.5856,0.7466,0.5466,0.4162,0.5831,0.6566,0.5063,0.5700,0.5805,0.7888,0.6852,0.1199,0.0222,0.1957,0.4562,0.7278,0.3732,0.4335,0.5380,0.1404,0.3097,0.4433,0.4461,0.4645,0.3346,0.3540,0.5666,0.5298,0.6754,0.6563 169 | 0.3131,0.6193,0.4700,0.4954,0.6799,0.3047,0.3353,0.5831,0.5346,0.5822,0.5346,0.5902,0.6468,0.2341,0.0638,0.3132,0.5102,0.7468,0.2887,0.3371,0.4846,0.2758,0.3589,0.5526,0.2807,0.3937,0.4384,0.5698,0.5957,0.5251,0.6780,0.5633 170 | 0.5425,0.6552,0.3320,0.4586,0.6332,0.2374,0.2718,0.3607,0.3536,0.4498,0.4252,0.4532,0.6448,0.2214,0.0194,0.3081,0.3850,0.6900,0.2374,0.3969,0.5000,0.1296,0.4211,0.5941,0.3671,0.3468,0.3721,0.6936,0.4618,0.5528,0.6184,0.5802 171 | 0.5711,0.5046,0.5600,0.5838,0.7019,0.4132,0.4141,0.6624,0.4149,0.8098,0.6572,0.7929,0.6924,0.2759,0.0960,0.3238,0.6325,0.7542,0.2912,0.2095,0.5134,0.2993,0.4365,0.5935,0.5038,0.2722,0.5546,0.3562,0.6120,0.7335,0.5931,0.5156 172 | 0.3412,0.5787,0.3448,0.5625,0.7683,0.3176,0.2558,0.3793,0.4051,0.5349,0.5266,0.6992,0.6114,0.2636,0.0471,0.2400,0.5333,0.6885,0.3399,0.4161,0.3969,0.2500,0.3577,0.5155,0.2692,0.4365,0.4019,0.5638,0.3645,0.5420,0.6420,0.4690 173 | 0.4572,0.6800,0.6169,0.4570,0.7319,0.3277,0.3172,0.4035,0.3601,0.5741,0.5390,0.4944,0.7324,0.2430,0.0443,0.3597,0.5665,0.6254,0.3086,0.2857,0.4683,0.2575,0.1429,0.5196,0.2424,0.4149,0.2307,0.4716,0.5806,0.4918,0.6263,0.5778 174 | 0.3235,0.7619,0.4737,0.8814,0.7667,0.4328,0.6364,0.4318,0.5926,0.5417,0.6119,0.9167,0.6471,0.1923,0.0145,0.1471,0.2619,0.7945,0.4694,0.3857,0.4694,0.1176,0.2381,0.4571,0.4468,0.5246,0.3421,0.3827,0.6875,0.5000,0.7910,0.6212 175 | 0.3356,0.6623,0.5237,0.5511,0.6722,0.3202,0.2946,0.4415,0.4944,0.5519,0.6042,0.5737,0.6473,0.2017,0.0705,0.3357,0.5036,0.7299,0.3399,0.4090,0.4988,0.2105,0.3400,0.5484,0.3129,0.4181,0.4341,0.5437,0.5475,0.4805,0.6386,0.5688 176 | 0.5924,0.6046,0.6349,0.4967,0.6677,0.3521,0.2468,0.4619,0.4941,0.5599,0.5215,0.4624,0.6245,0.2401,0.0606,0.2794,0.5644,0.6236,0.3799,0.3504,0.4662,0.2636,0.1439,0.4954,0.2614,0.4381,0.2564,0.5278,0.6750,0.4324,0.6488,0.5376 177 | 0.3463,0.6170,0.5672,0.5052,0.5957,0.3030,0.2843,0.4773,0.5209,0.5694,0.5542,0.6573,0.6184,0.2908,0.0620,0.3294,0.5592,0.7096,0.3291,0.3771,0.4774,0.2723,0.3330,0.5746,0.3030,0.4166,0.4417,0.6006,0.6205,0.6344,0.6571,0.5546 178 | 0.3705,0.6164,0.5143,0.5743,0.6986,0.3128,0.2996,0.4434,0.4328,0.5208,0.5596,0.7388,0.5781,0.2617,0.0304,0.3333,0.5144,0.7078,0.2979,0.4241,0.3994,0.2121,0.2478,0.5849,0.2053,0.3699,0.4563,0.5746,0.5847,0.4613,0.6906,0.6460 179 | 0.4514,0.6115,0.2920,0.5364,0.7474,0.4372,0.5328,0.4949,0.2927,0.7419,0.3252,0.6196,0.4560,0.2215,0.0420,0.3501,0.3807,0.6488,0.3955,0.5828,0.3659,0.1726,0.5098,0.4976,0.5664,0.5816,0.2838,0.7152,0.4335,0.5519,0.7532,0.5704 180 | 0.6096,0.6078,0.6980,0.4538,0.4407,0.3093,0.3645,0.5463,0.3283,0.5254,0.5977,0.3529,0.6585,0.1937,0.0516,0.2147,0.4855,0.6847,0.1726,0.2378,0.5584,0.2500,0.1637,0.5500,0.3613,0.4141,0.2231,0.4112,0.5067,0.6791,0.6550,0.4415 181 | 0.6508,0.5403,0.6035,0.7908,0.5500,0.5935,0.5458,0.7778,0.3354,0.7263,0.6006,0.9065,0.5758,0.2618,0.0782,0.1214,0.6513,0.7427,0.4510,0.4937,0.4163,0.2900,0.3469,0.4963,0.5092,0.3756,0.3168,0.2972,0.3820,0.7063,0.5882,0.6988 182 | 0.2883,0.6000,0.3609,0.4310,0.6029,0.2294,0.2749,0.4088,0.4348,0.4044,0.5120,0.5677,0.5699,0.1990,0.0352,0.2865,0.3981,0.7424,0.3220,0.3668,0.3367,0.1888,0.3632,0.5168,0.2500,0.3214,0.4545,0.5347,0.4427,0.4716,0.6325,0.4769 183 | 0.3879,0.6909,0.7165,0.6213,0.7522,0.3006,0.3889,0.6276,0.4267,0.6639,0.6229,0.8000,0.6684,0.3566,0.0411,0.3650,0.6708,0.6692,0.2720,0.2360,0.6279,0.3358,0.2349,0.5739,0.3214,0.4152,0.2407,0.5205,0.4884,0.5781,0.6622,0.6797 184 | 0.4523,0.6113,0.8205,0.7119,0.7170,0.3561,0.3423,0.6875,0.5797,0.7310,0.7332,0.8641,0.7883,0.2903,0.1180,0.2480,0.6446,0.6453,0.2020,0.3337,0.6923,0.2726,0.2974,0.4317,0.2413,0.3265,0.4519,0.3991,0.6886,0.5730,0.6846,0.4874 185 | 0.4815,0.6126,0.4414,0.6730,0.6341,0.4895,0.5875,0.5526,0.3099,0.6538,0.5538,0.6900,0.6628,0.1062,0.0437,0.1940,0.3706,0.7081,0.3140,0.3496,0.5517,0.1078,0.3630,0.5812,0.4318,0.3164,0.2486,0.4638,0.4035,0.5984,0.6575,0.6759 186 | 0.2303,0.6424,0.4081,0.5220,0.6172,0.2585,0.2803,0.3652,0.4083,0.4955,0.5347,0.5794,0.5054,0.1731,0.0375,0.3627,0.4478,0.6764,0.4226,0.5465,0.4049,0.2500,0.3629,0.5670,0.2776,0.4163,0.3764,0.6434,0.4171,0.4371,0.6476,0.5179 187 | 0.4271,0.8069,0.6059,0.8251,0.5566,0.4545,0.5000,0.5850,0.5970,0.5000,0.5273,0.7328,0.7176,0.1744,0.0417,0.1644,0.2883,0.5165,0.4267,0.4424,0.5480,0.2207,0.1883,0.3930,0.2800,0.6468,0.2131,0.3500,0.5661,0.5524,0.7984,0.7339 188 | 0.3987,0.5463,0.4979,0.5967,0.5890,0.3909,0.4255,0.4856,0.4356,0.5491,0.5959,0.8613,0.5415,0.2339,0.0419,0.2167,0.5412,0.6869,0.3483,0.4118,0.4223,0.3684,0.3333,0.5934,0.3333,0.3256,0.5275,0.5730,0.5748,0.6230,0.6648,0.5291 189 | 0.4643,0.5664,0.3961,0.3733,0.6321,0.2930,0.2333,0.4094,0.4088,0.5000,0.3752,0.3564,0.6180,0.2258,0.0423,0.3477,0.4279,0.6185,0.2681,0.2151,0.4083,0.2549,0.3048,0.5416,0.2939,0.3904,0.2880,0.7072,0.6941,0.5064,0.5892,0.6693 190 | 0.2828,0.6404,0.4083,0.2892,0.7386,0.2731,0.1651,0.3333,0.4620,0.4540,0.2701,0.4767,0.3120,0.2297,0.0230,0.3579,0.3275,0.5943,0.3883,0.4237,0.4000,0.1420,0.2684,0.5294,0.1330,0.5202,0.1977,0.7119,0.5523,0.4615,0.6742,0.6550 191 | 0.3950,0.5677,0.6674,0.6316,0.5858,0.3241,0.2974,0.6561,0.4524,0.5688,0.6679,0.8862,0.7133,0.1861,0.0707,0.2108,0.6289,0.6935,0.2711,0.4488,0.7118,0.3342,0.2714,0.5181,0.2634,0.3362,0.4053,0.4850,0.6606,0.5582,0.7054,0.4941 192 | 0.6239,0.6223,0.4672,0.5982,0.6805,0.4930,0.6230,0.5773,0.3329,0.7239,0.4711,0.6590,0.5966,0.2721,0.0525,0.3771,0.5112,0.7374,0.4067,0.4947,0.3929,0.2136,0.3660,0.5581,0.6707,0.4556,0.3678,0.4622,0.4867,0.5401,0.6742,0.6063 193 | 0.3802,0.5619,0.4302,0.4471,0.6706,0.3758,0.3339,0.4399,0.4472,0.6088,0.5395,0.7455,0.6195,0.2116,0.0604,0.2689,0.4912,0.7184,0.2953,0.3267,0.5668,0.3324,0.3650,0.5335,0.3148,0.2831,0.4114,0.6201,0.5697,0.5743,0.6923,0.5667 194 | 0.6111,0.6364,0.5484,0.4854,0.6145,0.4400,0.2805,0.3761,0.2951,0.4886,0.5678,0.5882,0.7387,0.2212,0.0577,0.3580,0.5119,0.6915,0.4030,0.2195,0.4286,0.8261,0.2478,0.5912,0.3273,0.3846,0.1875,0.5118,0.4364,0.4886,0.6042,0.6296 195 | 0.5188,0.7352,0.6200,0.8971,0.6726,0.3588,0.6024,0.6554,0.5630,0.6027,0.6247,0.8692,0.7378,0.1395,0.1052,0.1722,0.3746,0.6347,0.4698,0.4213,0.5051,0.1196,0.1734,0.4056,0.3779,0.5652,0.2422,0.4136,0.5479,0.5722,0.7908,0.5768 196 | 0.4510,0.8129,0.6105,0.8485,0.6610,0.3852,0.4070,0.6750,0.4737,0.5085,0.4425,0.8750,0.6730,0.1609,0.0448,0.0617,0.3218,0.7379,0.3864,0.5392,0.4085,0.1791,0.1860,0.4080,0.1719,0.4645,0.4217,0.2500,0.5862,0.3088,0.6486,0.7083 197 | 0.4543,0.6487,0.4329,0.4930,0.6458,0.2534,0.3304,0.5276,0.4360,0.5679,0.4939,0.5590,0.5580,0.1772,0.0300,0.3158,0.5244,0.6490,0.3450,0.4344,0.5734,0.2018,0.3684,0.5651,0.3475,0.3724,0.4049,0.6006,0.5357,0.5332,0.6899,0.4809 198 | 0.3055,0.6211,0.4740,0.5626,0.6014,0.2465,0.2624,0.3921,0.4603,0.3800,0.5679,0.5890,0.6160,0.1862,0.0661,0.2660,0.4980,0.6159,0.2751,0.3955,0.5032,0.2265,0.2418,0.4697,0.2571,0.3804,0.4135,0.5588,0.4552,0.4687,0.6779,0.5221 199 | 0.4017,0.4813,0.5507,0.3824,0.5652,0.3750,0.1908,0.4557,0.3902,0.4271,0.4959,0.4967,0.6636,0.2857,0.0561,0.3813,0.5901,0.7477,0.1032,0.3554,0.5491,0.2097,0.2303,0.5000,0.2464,0.3175,0.3180,0.4063,0.6115,0.5357,0.6283,0.5755 200 | 0.3018,0.6176,0.5000,0.5233,0.6383,0.3343,0.3632,0.4261,0.3656,0.5440,0.5699,0.7500,0.4843,0.1905,0.0503,0.2270,0.4842,0.7061,0.2107,0.3227,0.5331,0.2684,0.4336,0.5916,0.2947,0.3448,0.4154,0.5812,0.4387,0.7414,0.6276,0.5039 201 | 0.3874,0.6204,0.3471,0.4944,0.7189,0.3128,0.2646,0.3276,0.5099,0.4439,0.4814,0.6988,0.5193,0.1911,0.0594,0.3565,0.3480,0.7115,0.3797,0.5340,0.3143,0.1686,0.4121,0.6565,0.3753,0.4228,0.4634,0.5957,0.6105,0.5529,0.6602,0.4332 202 | 0.2812,0.7333,0.2444,0.7821,0.8333,0.3279,0.4146,0.3404,0.5484,0.3529,0.3656,0.7612,0.5000,0.1250,0.0519,0.0714,0.1000,0.6000,0.4737,0.4211,0.1667,0.0682,0.4717,0.5152,0.1667,0.6092,0.1667,0.5111,0.3400,0.5227,0.8810,0.7222 203 | 0.3538,0.6740,0.4089,0.4646,0.5181,0.1966,0.2161,0.4094,0.5183,0.3636,0.4826,0.4601,0.6420,0.1770,0.0569,0.2572,0.5289,0.6407,0.2594,0.4036,0.4416,0.2132,0.2050,0.4812,0.1306,0.4920,0.3779,0.6326,0.5226,0.4779,0.7126,0.5963 204 | 0.4733,0.5947,0.5232,0.5726,0.6630,0.2698,0.3367,0.4630,0.4418,0.6033,0.5914,0.7052,0.6076,0.2004,0.0671,0.2755,0.5410,0.6724,0.3358,0.4424,0.5835,0.2567,0.3018,0.5905,0.3930,0.3623,0.4311,0.4810,0.5575,0.5474,0.6072,0.4523 205 | 0.5109,0.6075,0.4000,0.2525,0.6807,0.4010,0.3571,0.4417,0.3729,0.5789,0.2040,0.3109,0.4828,0.2069,0.0341,0.3247,0.4091,0.6583,0.2792,0.3522,0.3663,0.1419,0.2453,0.6233,0.2640,0.5829,0.2500,0.6592,0.6223,0.4752,0.6235,0.5737 206 | 0.2159,0.6015,0.3483,0.3850,0.6136,0.3141,0.2623,0.3538,0.4215,0.3942,0.4194,0.5286,0.5529,0.1657,0.0475,0.3483,0.3956,0.6690,0.4524,0.4073,0.3691,0.2522,0.3923,0.5330,0.2454,0.3469,0.4510,0.6341,0.4315,0.5151,0.5574,0.5847 207 | 0.1798,0.6000,0.2587,0.4653,0.7435,0.4015,0.4119,0.2644,0.3481,0.2246,0.2962,0.3070,0.5382,0.0823,0.0241,0.2977,0.1926,0.6802,0.2597,0.2625,0.2802,0.1070,0.2243,0.5391,0.2308,0.4244,0.1909,0.3597,0.2201,0.2234,0.5797,0.5748 208 | 0.2703,0.6223,0.4205,0.4103,0.7243,0.3102,0.2792,0.4498,0.3955,0.5690,0.4941,0.6218,0.5201,0.2248,0.0304,0.2750,0.3849,0.6697,0.2931,0.5130,0.5094,0.1904,0.2794,0.5544,0.2529,0.3384,0.3376,0.6653,0.6202,0.4436,0.7086,0.5647 209 | 0.4844,0.6288,0.7125,0.6764,0.6803,0.3845,0.3839,0.6779,0.5672,0.7380,0.7279,0.8639,0.7518,0.2837,0.0987,0.2457,0.6424,0.7017,0.2169,0.3307,0.6734,0.2885,0.2710,0.4271,0.2978,0.3149,0.4905,0.4312,0.7336,0.6078,0.6778,0.5469 210 | 0.5238,0.8333,0.3457,0.8871,0.6786,0.4091,0.5455,0.6707,0.5833,0.7037,0.5778,0.7283,0.6591,0.1696,0.0164,0.1463,0.5075,0.5745,0.4600,0.4846,0.5294,0.1688,0.1235,0.3985,0.4727,0.6986,0.1742,0.4783,0.4955,0.5942,0.8129,0.5980 211 | 0.3799,0.7015,0.5049,0.6790,0.7406,0.2927,0.2666,0.6036,0.6040,0.6115,0.6230,0.8316,0.6413,0.2354,0.0506,0.2474,0.5834,0.6119,0.3802,0.5413,0.5100,0.2931,0.3163,0.5737,0.3709,0.4976,0.3063,0.5542,0.5723,0.5257,0.7085,0.3837 212 | 0.5972,0.6737,0.6703,0.4384,0.5270,0.4762,0.3103,0.4717,0.4228,0.5000,0.3492,0.1553,0.6064,0.2410,0.0460,0.1711,0.4684,0.6385,0.2923,0.0758,0.5119,0.2929,0.1164,0.6216,0.3600,0.3579,0.2315,0.5372,0.5732,0.5738,0.5824,0.3800 213 | 0.3103,0.6081,0.1628,0.3065,0.6970,0.3382,0.2969,0.2083,0.4828,0.4375,0.3191,0.5000,0.5600,0.1356,0.0274,0.3103,0.2683,0.6974,0.2692,0.3553,0.2143,0.0645,0.2826,0.6232,0.2273,0.3200,0.4177,0.5676,0.2712,0.5122,0.5556,0.5769 214 | 0.4129,0.6077,0.3981,0.5481,0.7075,0.2944,0.3720,0.5012,0.3216,0.5497,0.4880,0.6016,0.6238,0.1587,0.0405,0.2704,0.4837,0.6709,0.2279,0.4198,0.4826,0.2830,0.3453,0.5207,0.2494,0.3883,0.3869,0.5626,0.5063,0.5753,0.6484,0.5227 215 | 0.7209,0.5844,0.6572,0.8429,0.6957,0.6228,0.6091,0.7893,0.3256,0.6911,0.6134,0.8921,0.5986,0.2022,0.0632,0.0938,0.6163,0.6887,0.5173,0.4458,0.4890,0.2485,0.3053,0.5034,0.4493,0.4559,0.2488,0.2840,0.5192,0.6961,0.6511,0.6667 216 | 0.3127,0.5883,0.3468,0.4880,0.6473,0.3235,0.3785,0.3758,0.3787,0.5861,0.5092,0.6044,0.5428,0.1782,0.0674,0.3202,0.4897,0.7046,0.2935,0.4165,0.3757,0.2898,0.4177,0.6101,0.3572,0.3622,0.4165,0.6093,0.4702,0.5673,0.5751,0.5260 217 | 0.3043,0.7792,0.7857,0.8197,0.9231,0.2273,0.3214,0.5385,0.6190,0.5385,0.6444,0.8000,0.8182,0.0345,0.1061,0.1379,0.6429,0.4638,0.6769,0.5952,0.4286,0.0435,0.0968,0.5200,0.1200,0.6000,0.1868,0.3846,0.5862,0.1923,0.7671,0.4651 218 | --------------------------------------------------------------------------------