├── Adult UCI Dataset Analysis.ipynb ├── Amazon Fine Food Review.ipynb ├── Basic Time Series Analysis.ipynb ├── Boston Data Analysis.ipynb ├── Boston Model Deployment.ipynb ├── Boston Neural Network Model.ipynb ├── Breast Cancer Dignostics.ipynb ├── Classification of organic Chemical Compound.ipynb ├── Complete Heart Disease Analysis with R.ipynb ├── Decision Tree.ipynb ├── Deep Learning ├── Heart Disease Analysis with R.ipynb ├── Heart Disease.ipynb ├── IRIS complete end to end model deployment.ipynb ├── Iris Data Set and ML .ipynb ├── Movie Recommender System.ipynb ├── Penguin Dataset.ipynb ├── README.md ├── Report (Musk , non-musk) ├── Seaborn.ipynb ├── Sonar Data set Neural Network model.ipynb ├── Sonar dataset.ipynb ├── Training a poem LSTM.ipynb ├── sonar.all-data └── wine .ipynb /Boston Neural Network Model.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 5, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "# Boston Housing Data set\n", 10 | "# Regression Model with help of Keras Library\n", 11 | "import numpy\n", 12 | "import pandas\n", 13 | "from keras.models import Sequential\n", 14 | "from keras.layers import Dense\n", 15 | "from keras.wrappers.scikit_learn import KerasRegressor\n", 16 | "from sklearn.model_selection import cross_val_score\n", 17 | "from sklearn.model_selection import KFold\n", 18 | "from sklearn.preprocessing import StandardScaler\n", 19 | "from sklearn.pipeline import Pipeline\n" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 6, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "from sklearn.datasets import load_boston" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 7, 34 | "metadata": {}, 35 | "outputs": [], 36 | "source": [ 37 | "Boston = load_boston()" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 9, 43 | "metadata": {}, 44 | "outputs": [ 45 | { 46 | "data": { 47 | "text/plain": [ 48 | "{'data': array([[6.3200e-03, 1.8000e+01, 2.3100e+00, ..., 1.5300e+01, 3.9690e+02,\n", 49 | " 4.9800e+00],\n", 50 | " [2.7310e-02, 0.0000e+00, 7.0700e+00, ..., 1.7800e+01, 3.9690e+02,\n", 51 | " 9.1400e+00],\n", 52 | " [2.7290e-02, 0.0000e+00, 7.0700e+00, ..., 1.7800e+01, 3.9283e+02,\n", 53 | " 4.0300e+00],\n", 54 | " ...,\n", 55 | " [6.0760e-02, 0.0000e+00, 1.1930e+01, ..., 2.1000e+01, 3.9690e+02,\n", 56 | " 5.6400e+00],\n", 57 | " [1.0959e-01, 0.0000e+00, 1.1930e+01, ..., 2.1000e+01, 3.9345e+02,\n", 58 | " 6.4800e+00],\n", 59 | " [4.7410e-02, 0.0000e+00, 1.1930e+01, ..., 2.1000e+01, 3.9690e+02,\n", 60 | " 7.8800e+00]]),\n", 61 | " 'target': array([24. , 21.6, 34.7, 33.4, 36.2, 28.7, 22.9, 27.1, 16.5, 18.9, 15. ,\n", 62 | " 18.9, 21.7, 20.4, 18.2, 19.9, 23.1, 17.5, 20.2, 18.2, 13.6, 19.6,\n", 63 | " 15.2, 14.5, 15.6, 13.9, 16.6, 14.8, 18.4, 21. , 12.7, 14.5, 13.2,\n", 64 | " 13.1, 13.5, 18.9, 20. , 21. , 24.7, 30.8, 34.9, 26.6, 25.3, 24.7,\n", 65 | " 21.2, 19.3, 20. , 16.6, 14.4, 19.4, 19.7, 20.5, 25. , 23.4, 18.9,\n", 66 | " 35.4, 24.7, 31.6, 23.3, 19.6, 18.7, 16. , 22.2, 25. , 33. , 23.5,\n", 67 | " 19.4, 22. , 17.4, 20.9, 24.2, 21.7, 22.8, 23.4, 24.1, 21.4, 20. ,\n", 68 | " 20.8, 21.2, 20.3, 28. , 23.9, 24.8, 22.9, 23.9, 26.6, 22.5, 22.2,\n", 69 | " 23.6, 28.7, 22.6, 22. , 22.9, 25. , 20.6, 28.4, 21.4, 38.7, 43.8,\n", 70 | " 33.2, 27.5, 26.5, 18.6, 19.3, 20.1, 19.5, 19.5, 20.4, 19.8, 19.4,\n", 71 | " 21.7, 22.8, 18.8, 18.7, 18.5, 18.3, 21.2, 19.2, 20.4, 19.3, 22. ,\n", 72 | " 20.3, 20.5, 17.3, 18.8, 21.4, 15.7, 16.2, 18. , 14.3, 19.2, 19.6,\n", 73 | " 23. , 18.4, 15.6, 18.1, 17.4, 17.1, 13.3, 17.8, 14. , 14.4, 13.4,\n", 74 | " 15.6, 11.8, 13.8, 15.6, 14.6, 17.8, 15.4, 21.5, 19.6, 15.3, 19.4,\n", 75 | " 17. , 15.6, 13.1, 41.3, 24.3, 23.3, 27. , 50. , 50. , 50. , 22.7,\n", 76 | " 25. , 50. , 23.8, 23.8, 22.3, 17.4, 19.1, 23.1, 23.6, 22.6, 29.4,\n", 77 | " 23.2, 24.6, 29.9, 37.2, 39.8, 36.2, 37.9, 32.5, 26.4, 29.6, 50. ,\n", 78 | " 32. , 29.8, 34.9, 37. , 30.5, 36.4, 31.1, 29.1, 50. , 33.3, 30.3,\n", 79 | " 34.6, 34.9, 32.9, 24.1, 42.3, 48.5, 50. , 22.6, 24.4, 22.5, 24.4,\n", 80 | " 20. , 21.7, 19.3, 22.4, 28.1, 23.7, 25. , 23.3, 28.7, 21.5, 23. ,\n", 81 | " 26.7, 21.7, 27.5, 30.1, 44.8, 50. , 37.6, 31.6, 46.7, 31.5, 24.3,\n", 82 | " 31.7, 41.7, 48.3, 29. , 24. , 25.1, 31.5, 23.7, 23.3, 22. , 20.1,\n", 83 | " 22.2, 23.7, 17.6, 18.5, 24.3, 20.5, 24.5, 26.2, 24.4, 24.8, 29.6,\n", 84 | " 42.8, 21.9, 20.9, 44. , 50. , 36. , 30.1, 33.8, 43.1, 48.8, 31. ,\n", 85 | " 36.5, 22.8, 30.7, 50. , 43.5, 20.7, 21.1, 25.2, 24.4, 35.2, 32.4,\n", 86 | " 32. , 33.2, 33.1, 29.1, 35.1, 45.4, 35.4, 46. , 50. , 32.2, 22. ,\n", 87 | " 20.1, 23.2, 22.3, 24.8, 28.5, 37.3, 27.9, 23.9, 21.7, 28.6, 27.1,\n", 88 | " 20.3, 22.5, 29. , 24.8, 22. , 26.4, 33.1, 36.1, 28.4, 33.4, 28.2,\n", 89 | " 22.8, 20.3, 16.1, 22.1, 19.4, 21.6, 23.8, 16.2, 17.8, 19.8, 23.1,\n", 90 | " 21. , 23.8, 23.1, 20.4, 18.5, 25. , 24.6, 23. , 22.2, 19.3, 22.6,\n", 91 | " 19.8, 17.1, 19.4, 22.2, 20.7, 21.1, 19.5, 18.5, 20.6, 19. , 18.7,\n", 92 | " 32.7, 16.5, 23.9, 31.2, 17.5, 17.2, 23.1, 24.5, 26.6, 22.9, 24.1,\n", 93 | " 18.6, 30.1, 18.2, 20.6, 17.8, 21.7, 22.7, 22.6, 25. , 19.9, 20.8,\n", 94 | " 16.8, 21.9, 27.5, 21.9, 23.1, 50. , 50. , 50. , 50. , 50. , 13.8,\n", 95 | " 13.8, 15. , 13.9, 13.3, 13.1, 10.2, 10.4, 10.9, 11.3, 12.3, 8.8,\n", 96 | " 7.2, 10.5, 7.4, 10.2, 11.5, 15.1, 23.2, 9.7, 13.8, 12.7, 13.1,\n", 97 | " 12.5, 8.5, 5. , 6.3, 5.6, 7.2, 12.1, 8.3, 8.5, 5. , 11.9,\n", 98 | " 27.9, 17.2, 27.5, 15. , 17.2, 17.9, 16.3, 7. , 7.2, 7.5, 10.4,\n", 99 | " 8.8, 8.4, 16.7, 14.2, 20.8, 13.4, 11.7, 8.3, 10.2, 10.9, 11. ,\n", 100 | " 9.5, 14.5, 14.1, 16.1, 14.3, 11.7, 13.4, 9.6, 8.7, 8.4, 12.8,\n", 101 | " 10.5, 17.1, 18.4, 15.4, 10.8, 11.8, 14.9, 12.6, 14.1, 13. , 13.4,\n", 102 | " 15.2, 16.1, 17.8, 14.9, 14.1, 12.7, 13.5, 14.9, 20. , 16.4, 17.7,\n", 103 | " 19.5, 20.2, 21.4, 19.9, 19. , 19.1, 19.1, 20.1, 19.9, 19.6, 23.2,\n", 104 | " 29.8, 13.8, 13.3, 16.7, 12. , 14.6, 21.4, 23. , 23.7, 25. , 21.8,\n", 105 | " 20.6, 21.2, 19.1, 20.6, 15.2, 7. , 8.1, 13.6, 20.1, 21.8, 24.5,\n", 106 | " 23.1, 19.7, 18.3, 21.2, 17.5, 16.8, 22.4, 20.6, 23.9, 22. , 11.9]),\n", 107 | " 'feature_names': array(['CRIM', 'ZN', 'INDUS', 'CHAS', 'NOX', 'RM', 'AGE', 'DIS', 'RAD',\n", 108 | " 'TAX', 'PTRATIO', 'B', 'LSTAT'], dtype='\n", 150 | "\n", 163 | "\n", 164 | " \n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " \n", 169 | " \n", 170 | " \n", 171 | " \n", 172 | " \n", 173 | " \n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | " \n", 178 | " \n", 179 | " \n", 180 | " \n", 181 | " \n", 182 | " \n", 183 | " \n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | " \n", 189 | " \n", 190 | " \n", 191 | " \n", 192 | " \n", 193 | " \n", 194 | " \n", 195 | " \n", 196 | " \n", 197 | " \n", 198 | " \n", 199 | " \n", 200 | " \n", 201 | " \n", 202 | " \n", 203 | " \n", 204 | " \n", 205 | " \n", 206 | " \n", 207 | " \n", 208 | " \n", 209 | " \n", 210 | " \n", 211 | " \n", 212 | " \n", 213 | " \n", 214 | " \n", 215 | " \n", 216 | " \n", 217 | " \n", 218 | " \n", 219 | " \n", 220 | " \n", 221 | " \n", 222 | " \n", 223 | " \n", 224 | " \n", 225 | " \n", 226 | " \n", 227 | " \n", 228 | " \n", 229 | " \n", 230 | " \n", 231 | " \n", 232 | " \n", 233 | " \n", 234 | " \n", 235 | " \n", 236 | " \n", 237 | " \n", 238 | " \n", 239 | " \n", 240 | " \n", 241 | " \n", 242 | " \n", 243 | " \n", 244 | " \n", 245 | " \n", 246 | " \n", 247 | " \n", 248 | " \n", 249 | " \n", 250 | " \n", 251 | " \n", 252 | " \n", 253 | " \n", 254 | " \n", 255 | " \n", 256 | " \n", 257 | " \n", 258 | " \n", 259 | " \n", 260 | " \n", 261 | " \n", 262 | " \n", 263 | " \n", 264 | " \n", 265 | " \n", 266 | " \n", 267 | " \n", 268 | " \n", 269 | " \n", 270 | "
CRIMZNINDUSCHASNOXRMAGEDISRADTAXPTRATIOBLSTATTarget
00.0063218.02.310.00.5386.57565.24.09001.0296.015.3396.904.9824.0
10.027310.07.070.00.4696.42178.94.96712.0242.017.8396.909.1421.6
20.027290.07.070.00.4697.18561.14.96712.0242.017.8392.834.0334.7
30.032370.02.180.00.4586.99845.86.06223.0222.018.7394.632.9433.4
40.069050.02.180.00.4587.14754.26.06223.0222.018.7396.905.3336.2
\n", 271 | "" 272 | ], 273 | "text/plain": [ 274 | " CRIM ZN INDUS CHAS NOX RM AGE DIS RAD TAX \\\n", 275 | "0 0.00632 18.0 2.31 0.0 0.538 6.575 65.2 4.0900 1.0 296.0 \n", 276 | "1 0.02731 0.0 7.07 0.0 0.469 6.421 78.9 4.9671 2.0 242.0 \n", 277 | "2 0.02729 0.0 7.07 0.0 0.469 7.185 61.1 4.9671 2.0 242.0 \n", 278 | "3 0.03237 0.0 2.18 0.0 0.458 6.998 45.8 6.0622 3.0 222.0 \n", 279 | "4 0.06905 0.0 2.18 0.0 0.458 7.147 54.2 6.0622 3.0 222.0 \n", 280 | "\n", 281 | " PTRATIO B LSTAT Target \n", 282 | "0 15.3 396.90 4.98 24.0 \n", 283 | "1 17.8 396.90 9.14 21.6 \n", 284 | "2 17.8 392.83 4.03 34.7 \n", 285 | "3 18.7 394.63 2.94 33.4 \n", 286 | "4 18.7 396.90 5.33 36.2 " 287 | ] 288 | }, 289 | "execution_count": 17, 290 | "metadata": {}, 291 | "output_type": "execute_result" 292 | } 293 | ], 294 | "source": [ 295 | "df.head()" 296 | ] 297 | }, 298 | { 299 | "cell_type": "code", 300 | "execution_count": 18, 301 | "metadata": {}, 302 | "outputs": [ 303 | { 304 | "data": { 305 | "text/plain": [ 306 | "(506, 14)" 307 | ] 308 | }, 309 | "execution_count": 18, 310 | "metadata": {}, 311 | "output_type": "execute_result" 312 | } 313 | ], 314 | "source": [ 315 | "df.shape" 316 | ] 317 | }, 318 | { 319 | "cell_type": "code", 320 | "execution_count": 21, 321 | "metadata": {}, 322 | "outputs": [], 323 | "source": [ 324 | "# split into input and output variables\n", 325 | "X= df.iloc[:,0:13]\n", 326 | "Y= df.iloc[:, 13]" 327 | ] 328 | }, 329 | { 330 | "cell_type": "code", 331 | "execution_count": 23, 332 | "metadata": {}, 333 | "outputs": [ 334 | { 335 | "name": "stdout", 336 | "output_type": "stream", 337 | "text": [ 338 | "WARNING:tensorflow:From C:\\Users\\HP\\Anaconda3\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py:422: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", 339 | "\n", 340 | "Model : -22.15 (26.20) MSE\n" 341 | ] 342 | } 343 | ], 344 | "source": [ 345 | "def model():\n", 346 | " # create model\n", 347 | " model=Sequential()\n", 348 | " model.add(Dense(20,input_dim=13, kernel_initializer='normal', activation='relu'))\n", 349 | " model.add(Dense(1, kernel_initializer='normal'))\n", 350 | " # compile model\n", 351 | " model.compile(loss='mean_squared_error', optimizer='adam')\n", 352 | " return model\n", 353 | "\n", 354 | "# fix random seed for reproducibility\n", 355 | "seed = 7\n", 356 | "numpy.random.seed(seed)\n", 357 | "# evaluate model with standardized dataset\n", 358 | "estimators=[]\n", 359 | "estimators.append(('standardize', StandardScaler()))\n", 360 | "estimators.append(('mlp', KerasRegressor(build_fn=model , epochs = 150 , batch_size=5, verbose=0)))\n", 361 | "pipeline = Pipeline(estimators)\n", 362 | "kfold = KFold(n_splits=10, random_state= seed)\n", 363 | "results = cross_val_score(pipeline, X, Y, cv=kfold)\n", 364 | "print('Model : %.2f (%.2f) MSE' %(results.mean(), results.std()))\n", 365 | " " 366 | ] 367 | }, 368 | { 369 | "cell_type": "code", 370 | "execution_count": null, 371 | "metadata": {}, 372 | "outputs": [], 373 | "source": [ 374 | " ##Reasonable performance for models evaluated using Mean Squared Error (MSE)\n", 375 | " # are around 22 in squared thousands of dollars (or $4,700 if you take the square root)." 376 | ] 377 | } 378 | ], 379 | "metadata": { 380 | "kernelspec": { 381 | "display_name": "Python 3", 382 | "language": "python", 383 | "name": "python3" 384 | }, 385 | "language_info": { 386 | "codemirror_mode": { 387 | "name": "ipython", 388 | "version": 3 389 | }, 390 | "file_extension": ".py", 391 | "mimetype": "text/x-python", 392 | "name": "python", 393 | "nbconvert_exporter": "python", 394 | "pygments_lexer": "ipython3", 395 | "version": "3.7.3" 396 | } 397 | }, 398 | "nbformat": 4, 399 | "nbformat_minor": 2 400 | } 401 | -------------------------------------------------------------------------------- /Decision Tree.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Implementing Decision tree classifier on Iris dataset." 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 2, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import numpy as np\n", 17 | "import pandas as pd\n", 18 | "import matplotlib.pyplot as plt\n", 19 | "from sklearn.datasets import load_iris\n", 20 | "from sklearn.tree import DecisionTreeClassifier\n", 21 | "from sklearn.model_selection import train_test_split\n", 22 | "from sklearn import metrics\n" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 3, 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "iris = load_iris()" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 12, 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "X= iris.data\n", 41 | "Y= iris.target\n", 42 | "X_train, X_test, Y_train, Y_test = train_test_split(X, Y, random_state=0)" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 13, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "DT= DecisionTreeClassifier(random_state=5)\n", 52 | "fit= DT.fit(X_train, Y_train)\n", 53 | "predict= DT.predict(X_test)\n" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": 14, 59 | "metadata": {}, 60 | "outputs": [ 61 | { 62 | "data": { 63 | "text/plain": [ 64 | "DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=None,\n", 65 | " max_features=None, max_leaf_nodes=None,\n", 66 | " min_impurity_decrease=0.0, min_impurity_split=None,\n", 67 | " min_samples_leaf=1, min_samples_split=2,\n", 68 | " min_weight_fraction_leaf=0.0, presort=False,\n", 69 | " random_state=5, splitter='best')" 70 | ] 71 | }, 72 | "execution_count": 14, 73 | "metadata": {}, 74 | "output_type": "execute_result" 75 | } 76 | ], 77 | "source": [ 78 | "DT" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": 15, 84 | "metadata": { 85 | "collapsed": true 86 | }, 87 | "outputs": [ 88 | { 89 | "data": { 90 | "text/plain": [ 91 | "array([[5.9, 3. , 4.2, 1.5],\n", 92 | " [5.8, 2.6, 4. , 1.2],\n", 93 | " [6.8, 3. , 5.5, 2.1],\n", 94 | " [4.7, 3.2, 1.3, 0.2],\n", 95 | " [6.9, 3.1, 5.1, 2.3],\n", 96 | " [5. , 3.5, 1.6, 0.6],\n", 97 | " [5.4, 3.7, 1.5, 0.2],\n", 98 | " [5. , 2. , 3.5, 1. ],\n", 99 | " [6.5, 3. , 5.5, 1.8],\n", 100 | " [6.7, 3.3, 5.7, 2.5],\n", 101 | " [6. , 2.2, 5. , 1.5],\n", 102 | " [6.7, 2.5, 5.8, 1.8],\n", 103 | " [5.6, 2.5, 3.9, 1.1],\n", 104 | " [7.7, 3. , 6.1, 2.3],\n", 105 | " [6.3, 3.3, 4.7, 1.6],\n", 106 | " [5.5, 2.4, 3.8, 1.1],\n", 107 | " [6.3, 2.7, 4.9, 1.8],\n", 108 | " [6.3, 2.8, 5.1, 1.5],\n", 109 | " [4.9, 2.5, 4.5, 1.7],\n", 110 | " [6.3, 2.5, 5. , 1.9],\n", 111 | " [7. , 3.2, 4.7, 1.4],\n", 112 | " [6.5, 3. , 5.2, 2. ],\n", 113 | " [6. , 3.4, 4.5, 1.6],\n", 114 | " [4.8, 3.1, 1.6, 0.2],\n", 115 | " [5.8, 2.7, 5.1, 1.9],\n", 116 | " [5.6, 2.7, 4.2, 1.3],\n", 117 | " [5.6, 2.9, 3.6, 1.3],\n", 118 | " [5.5, 2.5, 4. , 1.3],\n", 119 | " [6.1, 3. , 4.6, 1.4],\n", 120 | " [7.2, 3.2, 6. , 1.8],\n", 121 | " [5.3, 3.7, 1.5, 0.2],\n", 122 | " [4.3, 3. , 1.1, 0.1],\n", 123 | " [6.4, 2.7, 5.3, 1.9],\n", 124 | " [5.7, 3. , 4.2, 1.2],\n", 125 | " [5.4, 3.4, 1.7, 0.2],\n", 126 | " [5.7, 4.4, 1.5, 0.4],\n", 127 | " [6.9, 3.1, 4.9, 1.5],\n", 128 | " [4.6, 3.1, 1.5, 0.2],\n", 129 | " [5.9, 3. , 5.1, 1.8],\n", 130 | " [5.1, 2.5, 3. , 1.1],\n", 131 | " [4.6, 3.4, 1.4, 0.3],\n", 132 | " [6.2, 2.2, 4.5, 1.5],\n", 133 | " [7.2, 3.6, 6.1, 2.5],\n", 134 | " [5.7, 2.9, 4.2, 1.3],\n", 135 | " [4.8, 3. , 1.4, 0.1],\n", 136 | " [7.1, 3. , 5.9, 2.1],\n", 137 | " [6.9, 3.2, 5.7, 2.3],\n", 138 | " [6.5, 3. , 5.8, 2.2],\n", 139 | " [6.4, 2.8, 5.6, 2.1],\n", 140 | " [5.1, 3.8, 1.6, 0.2],\n", 141 | " [4.8, 3.4, 1.6, 0.2],\n", 142 | " [6.5, 3.2, 5.1, 2. ],\n", 143 | " [6.7, 3.3, 5.7, 2.1],\n", 144 | " [4.5, 2.3, 1.3, 0.3],\n", 145 | " [6.2, 3.4, 5.4, 2.3],\n", 146 | " [4.9, 3. , 1.4, 0.2],\n", 147 | " [5.7, 2.5, 5. , 2. ],\n", 148 | " [6.9, 3.1, 5.4, 2.1],\n", 149 | " [4.4, 3.2, 1.3, 0.2],\n", 150 | " [5. , 3.6, 1.4, 0.2],\n", 151 | " [7.2, 3. , 5.8, 1.6],\n", 152 | " [5.1, 3.5, 1.4, 0.3],\n", 153 | " [4.4, 3. , 1.3, 0.2],\n", 154 | " [5.4, 3.9, 1.7, 0.4],\n", 155 | " [5.5, 2.3, 4. , 1.3],\n", 156 | " [6.8, 3.2, 5.9, 2.3],\n", 157 | " [7.6, 3. , 6.6, 2.1],\n", 158 | " [5.1, 3.5, 1.4, 0.2],\n", 159 | " [4.9, 3.1, 1.5, 0.2],\n", 160 | " [5.2, 3.4, 1.4, 0.2],\n", 161 | " [5.7, 2.8, 4.5, 1.3],\n", 162 | " [6.6, 3. , 4.4, 1.4],\n", 163 | " [5. , 3.2, 1.2, 0.2],\n", 164 | " [5.1, 3.3, 1.7, 0.5],\n", 165 | " [6.4, 2.9, 4.3, 1.3],\n", 166 | " [5.4, 3.4, 1.5, 0.4],\n", 167 | " [7.7, 2.6, 6.9, 2.3],\n", 168 | " [4.9, 2.4, 3.3, 1. ],\n", 169 | " [7.9, 3.8, 6.4, 2. ],\n", 170 | " [6.7, 3.1, 4.4, 1.4],\n", 171 | " [5.2, 4.1, 1.5, 0.1],\n", 172 | " [6. , 3. , 4.8, 1.8],\n", 173 | " [5.8, 4. , 1.2, 0.2],\n", 174 | " [7.7, 2.8, 6.7, 2. ],\n", 175 | " [5.1, 3.8, 1.5, 0.3],\n", 176 | " [4.7, 3.2, 1.6, 0.2],\n", 177 | " [7.4, 2.8, 6.1, 1.9],\n", 178 | " [5. , 3.3, 1.4, 0.2],\n", 179 | " [6.3, 3.4, 5.6, 2.4],\n", 180 | " [5.7, 2.8, 4.1, 1.3],\n", 181 | " [5.8, 2.7, 3.9, 1.2],\n", 182 | " [5.7, 2.6, 3.5, 1. ],\n", 183 | " [6.4, 3.2, 5.3, 2.3],\n", 184 | " [6.7, 3. , 5.2, 2.3],\n", 185 | " [6.3, 2.5, 4.9, 1.5],\n", 186 | " [6.7, 3. , 5. , 1.7],\n", 187 | " [5. , 3. , 1.6, 0.2],\n", 188 | " [5.5, 2.4, 3.7, 1. ],\n", 189 | " [6.7, 3.1, 5.6, 2.4],\n", 190 | " [5.8, 2.7, 5.1, 1.9],\n", 191 | " [5.1, 3.4, 1.5, 0.2],\n", 192 | " [6.6, 2.9, 4.6, 1.3],\n", 193 | " [5.6, 3. , 4.1, 1.3],\n", 194 | " [5.9, 3.2, 4.8, 1.8],\n", 195 | " [6.3, 2.3, 4.4, 1.3],\n", 196 | " [5.5, 3.5, 1.3, 0.2],\n", 197 | " [5.1, 3.7, 1.5, 0.4],\n", 198 | " [4.9, 3.1, 1.5, 0.1],\n", 199 | " [6.3, 2.9, 5.6, 1.8],\n", 200 | " [5.8, 2.7, 4.1, 1. ],\n", 201 | " [7.7, 3.8, 6.7, 2.2],\n", 202 | " [4.6, 3.2, 1.4, 0.2]])" 203 | ] 204 | }, 205 | "execution_count": 15, 206 | "metadata": {}, 207 | "output_type": "execute_result" 208 | } 209 | ], 210 | "source": [ 211 | "X_train" 212 | ] 213 | }, 214 | { 215 | "cell_type": "code", 216 | "execution_count": 19, 217 | "metadata": {}, 218 | "outputs": [], 219 | "source": [ 220 | "DT=DecisionTreeClassifier(criterion='entropy', random_state=5)\n", 221 | "fit=DT.fit(X_train, Y_train)\n", 222 | "predict=DT.predict(X_test)\n", 223 | "Accuracy= metrics.accuracy_score(Y_test, predict)" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": 20, 229 | "metadata": {}, 230 | "outputs": [ 231 | { 232 | "data": { 233 | "text/plain": [ 234 | "0.9736842105263158" 235 | ] 236 | }, 237 | "execution_count": 20, 238 | "metadata": {}, 239 | "output_type": "execute_result" 240 | } 241 | ], 242 | "source": [ 243 | "Accuracy" 244 | ] 245 | }, 246 | { 247 | "cell_type": "code", 248 | "execution_count": 21, 249 | "metadata": {}, 250 | "outputs": [], 251 | "source": [ 252 | "DT= DecisionTreeClassifier(criterion='gini') \n", 253 | "fit=DT.fit(X_train, Y_train)\n", 254 | "predict= DT.predict(X_test)\n", 255 | "Accuracy= metrics.accuracy_score(Y_test, predict)" 256 | ] 257 | }, 258 | { 259 | "cell_type": "code", 260 | "execution_count": 22, 261 | "metadata": {}, 262 | "outputs": [ 263 | { 264 | "data": { 265 | "text/plain": [ 266 | "0.9736842105263158" 267 | ] 268 | }, 269 | "execution_count": 22, 270 | "metadata": {}, 271 | "output_type": "execute_result" 272 | } 273 | ], 274 | "source": [ 275 | "Accuracy" 276 | ] 277 | }, 278 | { 279 | "cell_type": "code", 280 | "execution_count": 23, 281 | "metadata": {}, 282 | "outputs": [], 283 | "source": [ 284 | "# We observe that gini and entropy gives the same accuracy." 285 | ] 286 | }, 287 | { 288 | "cell_type": "raw", 289 | "metadata": {}, 290 | "source": [ 291 | "Entropy: is the degree of uncertainty or disorder, measure of impurity \n", 292 | " Entropy controls how a Decision Tree decides to split the data. \n", 293 | " It actually effects how a Decision Tree draws its boundaries\n", 294 | "Decision Tree Alogrithm constructs Decision tree based on features that have maximum \n", 295 | "information gain and mininmum entropy" 296 | ] 297 | }, 298 | { 299 | "cell_type": "markdown", 300 | "metadata": {}, 301 | "source": [ 302 | "Information Gain : It can be considered as the difference between the entropy of parent node \n", 303 | "and weighted average entropy of child nodes. we calculate information gain by doing a split. the attribute with maximum IG is chosen as a decision node and further splitting is carried on.\n", 304 | "\n", 305 | "CART is the most significant splitter.\n", 306 | "\n", 307 | "Gini impurity:Gini Impurity is the probability of incorrectly classifying a randomly chosen element in the dataset if it were randomly labeled according to the class distribution in the dataset.\n" 308 | ] 309 | }, 310 | { 311 | "cell_type": "code", 312 | "execution_count": null, 313 | "metadata": {}, 314 | "outputs": [], 315 | "source": [] 316 | } 317 | ], 318 | "metadata": { 319 | "kernelspec": { 320 | "display_name": "Python 3", 321 | "language": "python", 322 | "name": "python3" 323 | }, 324 | "language_info": { 325 | "codemirror_mode": { 326 | "name": "ipython", 327 | "version": 3 328 | }, 329 | "file_extension": ".py", 330 | "mimetype": "text/x-python", 331 | "name": "python", 332 | "nbconvert_exporter": "python", 333 | "pygments_lexer": "ipython3", 334 | "version": "3.7.3" 335 | } 336 | }, 337 | "nbformat": 4, 338 | "nbformat_minor": 2 339 | } 340 | -------------------------------------------------------------------------------- /Deep Learning: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Machine-Learning 2 | Applying Machine learning Algorithms on various data sets. 3 | 4 | There are several projects on Machine Learning which are uploaded here for practice and references. 5 | You can learn end to end model building in Machine Learning with these starter projects. Various NLTK, Deep Learning projects are also uploaded for practice. 6 | 7 | 8 | Iris dataset one of the most basic dataset to learn and understand supervised machine learning alogothims and how do they work. 9 | I have done the data exploration , data visulaization of the IRIS data set 10 | Gone further in training the model by various Machine learning algorithms like Regression algorithm (Linear Regression), another is 11 | Instance based learning algorithm like K-NN which does not create model. 12 | Iris data is also tested upon the Decision Tree algorithm. 13 | We notice that Decision Tree Classifier gives te maximum accuracy when compared with the other two. 14 | We move on further and check six various algorthims. SVM give 93% accuracy. 15 | 16 | 17 | Boston Data is a house pricing data and I have applied Linear Regression to train the model. The score is not great means that model 18 | might not perform well when given data to predict prices. 19 | 20 | Other projects include Wine Dataset, Adult UCI Income Dataset, Amazon Forrd review, content based Movie Recommender system and others. 21 | 22 | You can additional help from my YouTube Channel: https://www.youtube.com/c/PriyankaSharmastudyclub 23 | 24 | 25 | -------------------------------------------------------------------------------- /Report (Musk , non-musk): -------------------------------------------------------------------------------- 1 | In this report I intend to describe the procedure followed in building the model, explaining why a method or function was choosen.Going further and highlighting the significance of the adopted approach. 2 | 3 | OBJECTIVE 4 | The Objective of the project was to Classify the given data set into Musk or non-Musk category. It was a binary classification problem.It is a Supervised Machine Learning example as the data set had labelled data( the class attribute determines whether the given molecule is MUSK or NON-MUSK). 5 | 6 | The choice was given to opt for either Multi-Layered Perceptron, CNN or RNN or even can deploy transfer learning. The dataset consists of 6598 Rows and 170 columns or attributes including the class or the label. 7 | The purpose was to Train the model on the given data set, Multi-Layered Perceptron model is used. 8 | 9 | The model is build upon Multilayered Perceptron or Neural Network using Keras as a deep learning library. The data is been standarized before feeding into the model. The metric used is 'ACCURACY' which gives 100% accuracy with no loss. Hence the model is very well trained and can predict unknown data with high accuracy. The following provides the brief of the result obtained after training and testing the data. 10 | Accuracy: 1.000000 11 | Precision: 1.000000 12 | Recall: 1.000000 13 | F1 score: 1.000000 14 | Cohens kappa: 1.000000 15 | ROC AUC: 1.000000 16 | [[1118 0] 17 | [ 0 202]] 18 | 19 | The project begins with importing basic libraries, the libraries used are 20 | 1) Pandas 21 | Pandas a Python library used for reading the csv file, converting the data into a dataframe. Pandas are used for data mungigng and preparation. The data exploration is possible because of the structured representation of the data, possible with the help of Pandas. 22 | 23 | 2) Numpy 24 | Numpy yet another Python library used for numerical computation. It is helpful for performing matrix multiplication, array opeartion and for scientific computing. 25 | 26 | 3)Matplotlib and Seaborn 27 | These are the plotting libraries of Python. They help to Visulaize the data.To get a quick view of the data. By plotting the density and Histograms I understood thayt data is not uniform.This meant that before feeding the data into the model it will require some kind of Standardization. 28 | 29 | 4)Sklearn/SICKIT Learn Library 30 | Sklean library of Python provides with excellent functions and modules for model buliding. 31 | a)I have used PIPELINE to avoid data leakage in the test data. Pipelines provide standard workflows. Python Scikit Learn provides a pipeline utility to help automate the Machine Learning workflows. 32 | 33 | b)For MODEL PREPROCESSING I have used Standard Scaler and LabelEncoder 34 | b.1)Standard Scaler : Scales the data and while taking into account standard deviation. If standard deviation of a feature is different,their range would also differ. Standard Scaling reduces the effect of any outlier. 35 | The Formula : z = (x - u) / s 36 | 37 | b.2)Label Encoder : Important step in data preprocessing. it refers to converting the labels into numeric form so as to convert it into the machine-readable form. The columns containing string values like molecule_name and Conformation_name were converted into numeric format with Label Encoding. 38 | 39 | 40 | c)For MODEL EVALUATION I have used StratifiedKFold and cross_val_score 41 | c.1)StrtifiedKFold : Stratification is the process of rearranging the data as to ensure each fold is a good representative of the whole. For example in a binary classification problem where each class comprises 50% of the data, it is best to arrange the data such that in every fold, each class comprises around half the instances. 42 | 43 | c.2)Cross_val_score : Evaluate a score by cross-validation. 44 | Cross-validation is a technique used to protect against overfitting in a predictive model. It is used to estimate performance of the model. 45 | I have taken 10 splits/ folds here and after every fold, the data is shuffled. 46 | 47 | The RESULT we get is the mean of various scores obtained. 48 | This is more accurate as the model is trained and evaluated multiple times on different data. 49 | 50 | SEED is used for result reproducibility. Now, any time this code is run it will give the same output as I have fixed the seed. The seed can be set to random to obtain different results. 51 | 52 | 5) Keras Deep Learning Library: 53 | It is run on top of Theano or Tensorflow. Keras is a powerful and easy to use Python library for developing and evaluating deep learning models. It wraps the efficient numerical computation libraries Theano and TensorFlow and allows to define and train neural network models in a few short line of code. 54 | 55 | 56 | DATA TRANSFORMS : 57 | these are ways or methods for data preprocessing 58 | Fit() function to prepare parameters of the transform once on the data. Later we use transform() function on the same data to prepare it for modelling and again on the test or validation dataset. 59 | 60 | 61 | 62 | EXPLANATION OF THE MODEL IN DETAIL: 63 | The Keras library provides wrapper classes and allows to use neural network models developed with Keras in scikit-learn. 64 | There is a KerasClassifier class in Keras that can be used as an Estimator in scikit-learn, the base type of model in the library. 65 | The KerasClassifier takes the name of a function as an argument. 66 | This function returnsthe constructed neural network model, ready for training. 67 | 68 | I have created a function create_larger() and then defined the model within this function .Models in Keras are defined as a sequence of layers. We create a sequential model and add a layer at a time(the layers is solely a personal discretion). 69 | Let's explore the hidden and other layers: 70 | INPUT LAYER 71 | Input layer to have right number of inputs (how to determine the number of inputs depends upon hit and trial process) 72 | Inputs are sepecified when creating the first layer with the input-din argument. 73 | 74 | DENSE 75 | when we use fully connected network we use fully connected layers and these layers are defined using Dense class. Number of neurons as the first argument, initialization as the second argument and specify the acitvation argument. 76 | 77 | OUTPUT LAYER 78 | The output layer contains a single neuron in order to make predictions. It 79 | uses the sigmoid activation function in order to produce a probability output in the range of 80 | 0 to 1 81 | 82 | ACTIVATION FUNCTION 83 | The weighted inputs are summed and passed through an activation function often called a transfer function. I have used a sigmoid activation function in the output layer. This is to ensure theoutput values are in the range of 0 and 1 and may be used as predicted probabilities. 84 | 85 | Relu or Rectified Linear Unit Activation function is used in first 2 layers.It is a non-linear activation function.The main advantage of using the ReLU function over other activation functions is that it does not activate all the neurons at the same time. This means that the neurons will only be deactivated if the output of the linear transformation is less than 0 86 | 87 | Relu SOLVES the problem of Vanishing Gradient Problem(VGP), due to increased number of layers saturation increases and thus model does not perform well there is a VGP.Relu help in Weight Convergence,sloves problem of Vanishing Gradient. 88 | 89 | 90 | Finally,the network uses the efficient ADAM gradient descent optimization algorithm with a logarithmic loss function, which is called categorical crossentropy in Keras. 91 | 92 | ADAM Optimization Algorthim has following benefits: 93 | Easy to implement. 94 | Quite computationally efficient. 95 | Requires little memory space. 96 | Good for non-stationary objectives. 97 | Works well on problems with noisy or sparse gradients. 98 | Works well with large data sets and large parameters. 99 | 100 | 101 | Binary crossentropy 102 | It is a loss function used on problems involving yes/no (binary) decisions. For instance, in multi-label problems, where an example can belong to multiple classes at the same time, the model tries to decide for each class whether the example belongs to that class or not. 103 | 104 | The pipeline is a wrapper that 105 | executes one or more models within a pass of the cross-validation procedure. Here, we can 106 | define a pipeline with the StandardScaler followed by our neural network model. 107 | 108 | MODEL COMPILATION 109 | Numeric libraries are used. Tensorflow here , the backend automatically chooses the best way to represent the network for training and making predictions to run on the hardware. 110 | Debugging is turned off by setting Verbose to Zero. 111 | 112 | Fit Model 113 | The model is defined and compiled it is now ready for efficient computation. Now we can execute the model. We train or fit our model on our loaded data by calling the fit() function on the model. 114 | 115 | The training process will run for a fixed number of iterations through the dataset called EPOCHS. 116 | Batch size argument set the number of instances that are evaluated befor a weight update in the network. 117 | 118 | 119 | 120 | PLOTTING THE ACCURACY AND LOSS GRAPH 121 | Keras provides the capability to register callbacks when training a deep learning model. 122 | One of the default callbacks that is registered when training all deep learning models is the History 123 | callback. It records training metrics for each epoch. This includes the loss and the accuracy (for 124 | classification problems) as well as the loss and accuracy for the validation dataset 125 | 126 | 127 | The plots can provide an 128 | indication of useful things about the training of the model, such as: 129 | ˆ It’s speed of convergence over epochs (slope). 130 | ˆ Whether the model may have already converged (plateau of the line). 131 | ˆ Whether the model may be over-learning the training data (inflection for validation line). 132 | 133 | 134 | We can create plots from the collected history data 135 | 1. A plot of accuracy on the training and validation datasets over training epochs. 136 | 2. A plot of loss on the training and validation datasets over training epochs. 137 | 138 | The Plot Shows a straight line around 1 depicting a 100% accuracy attained over 150 epochs 139 | and the loss graph shows no error. 140 | 141 | METRICS USED AND THEIR IMPORTANCE 142 | The Project used Classification Metrics to measure and compare the performance of Machine Learning Algorthim. Algorthim is evaluated through Metrics. 143 | 144 | THe result obtained are given below: 145 | Accuracy: 1.000000 146 | Precision: 1.000000 147 | Recall: 1.000000 148 | F1 score: 1.000000 149 | Cohens kappa: 1.000000 150 | ROC AUC: 1.000000 151 | [[1118 0] 152 | [ 0 202]] 153 | 154 | DETAILS OF METRICS: 155 | 1) ACCURACY 156 | Classification Accuracy is the number of correct predictions made as a ratio of all predictions made. Here we notice that we obtain all correct predictions. Thus the result is 100%. 157 | Accuracy is the most common evaluation metric for classification problems. 158 | However, Accuracy Metric has its own limitations as it measures the overall correctness and does not distinguish between false positive errors and false negative errors. 159 | 160 | 2) PRECISION 161 | Precision is the fraction of positive predictions that are correct, here it happens to be 1. This implies that the algorithm correctly predicted the positive predictions. It has correctly classified all Musk as Musk. 162 | Precision = TruePositives / (TruePositives + FalsePositives) 163 | Precision = 1 ; implies perfect precision. 164 | 165 | 3) RECALL 166 | Recall is the TRUE POSITIVE RATE also called 'Sensitivity'. It is the number of instances from positive (first) class that are actually predicted correctly.There might be few instances where the algorthim correctly classifies the negative instances as false and positive instances as true,if we are interested in knowing both then Recall comes to our aid. recall provides an indication of missed positive predictions. 167 | Recall quantifies the number of positive class predictions made out of all positive examples in the dataset. 168 | e.g. In medical domain where we want to know the cases of cancer and also cases of non-cancer patients. 169 | Recall = TruePositives / (TruePositives + FalseNegatives) 170 | Recall = 90 / (90 + 10) 171 | Recall = 90 / 100 172 | Recall = 0.9 173 | 174 | 175 | 4)F1 SCORE 176 | F-Measure provides a single score that balances both the concerns of precision and recall in one number. 177 | It is the harmonic mean or weighted average of the precision and recall. 178 | F1-Measure = (2 * Precision * Recall) / (Precision + Recall) 179 | F1-Measure = (2 * Precision * Recall) / (Precision + Recall) 180 | F1-Measure = (2 * 1.0 * 1.0) / (1.0 + 1.0) 181 | F1-Measure = (2 * 1.0) / 2.0 182 | F1-Measure = 1.0 183 | 184 | 5) Classification Report 185 | This gives a quick idea of the accuracy of a model using a number of measures. It displays Precision, recall, F-1 score and support 186 | Support is the number of occurences of each class in True. 187 | 188 | 6)Confusion Matrix 189 | The table presents predictions on x axis and accuracy outcomes on the y-axis. 190 | 1118 classified as True Positive (Non Musk) 191 | 202 classified as True Negative (Musk) 192 | 193 | 7) ROC Curve 194 | Roc curve shows the true positive rates against the false positive rate at various cut points. It also demonstrates a trade-off between sensitivity (recall and specificity or the true negative rate). Here it is ONE. 195 | 196 | 197 | CONCLUSION 198 | The project started by importing basic libraries,loading the dataset.Performing exploratory data analysis, fetching a few statistical data that gave an idea that standarization will be required before model building. Then proceeding further on to data visulaization observations were made regarding the distribution of the data, the density plots, histograms and countplots helped to give a quick view about the data set.Next step was Data preprocessing to prepare the data for training. Standarization was performed. Data was split into training and validation set.Subsequently the model was bulit, compiled and fitted. PLots of accuracy and loss function were plotted. In the next step the metrics were evaluated and the results were obtained giving 100% accuracy achieved by the model. 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | -------------------------------------------------------------------------------- /Sonar Data set Neural Network model.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 6, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "# Binary Classification with Sonar Dataset\n", 10 | "import numpy\n", 11 | "from pandas import read_csv\n", 12 | "from keras.models import Sequential\n", 13 | "from keras.layers import Dense\n", 14 | "from keras.wrappers.scikit_learn import KerasClassifier\n", 15 | "from sklearn.model_selection import cross_val_score\n", 16 | "from sklearn.preprocessing import LabelEncoder\n", 17 | "from sklearn.model_selection import StratifiedKFold\n", 18 | "from sklearn.preprocessing import StandardScaler\n", 19 | "from sklearn.pipeline import Pipeline" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 3, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "# fix random seed for reproducibility\n", 29 | "seed =7 \n", 30 | "numpy.random.seed(seed)" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 8, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "# load the dataset\n", 40 | "dataframe = read_csv('sonar.all-data', header = None)\n", 41 | "dataset = dataframe.values\n", 42 | "X = dataset[:,0:60].astype(float)\n", 43 | "Y = dataset[:,60]" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 10, 49 | "metadata": {}, 50 | "outputs": [], 51 | "source": [ 52 | "# encode class values as integers\n", 53 | "encoder = LabelEncoder()\n", 54 | "encoder.fit(Y)\n", 55 | "encoded_Y = encoder.transform(Y)\n" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 11, 61 | "metadata": {}, 62 | "outputs": [ 63 | { 64 | "name": "stdout", 65 | "output_type": "stream", 66 | "text": [ 67 | "WARNING:tensorflow:From C:\\Users\\HP\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\ops\\nn_impl.py:180: add_dispatch_support..wrapper (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n", 68 | "Instructions for updating:\n", 69 | "Use tf.where in 2.0, which has the same broadcast rule as np.where\n", 70 | "WARNING:tensorflow:From C:\\Users\\HP\\Anaconda3\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py:422: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", 71 | "\n", 72 | "Model: 86.04% (5.88%)\n" 73 | ] 74 | } 75 | ], 76 | "source": [ 77 | "# create a model\n", 78 | "\n", 79 | "def create_larger():\n", 80 | " # create model\n", 81 | " model= Sequential()\n", 82 | " model.add(Dense(30, input_dim=60, kernel_initializer='normal', activation='relu'))\n", 83 | " model.add(Dense(1, kernel_initializer='normal', activation = 'sigmoid'))\n", 84 | " \n", 85 | " # compile model\n", 86 | " model.compile(loss = 'binary_crossentropy', optimizer='adam', metrics = ['accuracy'])\n", 87 | " return model\n", 88 | "\n", 89 | "estimators = []\n", 90 | "estimators.append(('standardize', StandardScaler()))\n", 91 | "estimators.append(('mlp',KerasClassifier(build_fn=create_larger, epochs=250, batch_size=5, verbose=0)))\n", 92 | "\n", 93 | "pipeline = Pipeline(estimators)\n", 94 | "kfold = StratifiedKFold(n_splits=10, shuffle=True, random_state= seed)\n", 95 | "results = cross_val_score(pipeline,X , encoded_Y, cv=kfold)\n", 96 | "print(\"Model: %.2f%% (%.2f%%)\" % (results.mean()*100, results.std()*100))\n", 97 | " " 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": null, 103 | "metadata": {}, 104 | "outputs": [], 105 | "source": [ 106 | "## Thus the model accuracy is 86%" 107 | ] 108 | } 109 | ], 110 | "metadata": { 111 | "kernelspec": { 112 | "display_name": "Python 3", 113 | "language": "python", 114 | "name": "python3" 115 | }, 116 | "language_info": { 117 | "codemirror_mode": { 118 | "name": "ipython", 119 | "version": 3 120 | }, 121 | "file_extension": ".py", 122 | "mimetype": "text/x-python", 123 | "name": "python", 124 | "nbconvert_exporter": "python", 125 | "pygments_lexer": "ipython3", 126 | "version": "3.7.3" 127 | } 128 | }, 129 | "nbformat": 4, 130 | "nbformat_minor": 2 131 | } 132 | -------------------------------------------------------------------------------- /Training a poem LSTM.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 37, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "\n", 19 | "poem= \"Jack and Jill went up the hill to fetch a pail of water \"" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 38, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "poem =poem.lower().split()" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 39, 34 | "metadata": {}, 35 | "outputs": [ 36 | { 37 | "data": { 38 | "text/plain": [ 39 | "['jack',\n", 40 | " 'and',\n", 41 | " 'jill',\n", 42 | " 'went',\n", 43 | " 'up',\n", 44 | " 'the',\n", 45 | " 'hill',\n", 46 | " 'to',\n", 47 | " 'fetch',\n", 48 | " 'a',\n", 49 | " 'pail',\n", 50 | " 'of',\n", 51 | " 'water']" 52 | ] 53 | }, 54 | "execution_count": 39, 55 | "metadata": {}, 56 | "output_type": "execute_result" 57 | } 58 | ], 59 | "source": [ 60 | "poem" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": null, 66 | "metadata": {}, 67 | "outputs": [], 68 | "source": [] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 40, 73 | "metadata": { 74 | "collapsed": true 75 | }, 76 | "outputs": [ 77 | { 78 | "name": "stdout", 79 | "output_type": "stream", 80 | "text": [ 81 | "['jack', 'and', 'jill'] -> went\n", 82 | "['jack', 'and'] -> jill\n", 83 | "['the', 'hill', 'to', 'fetch', 'a'] -> pail\n", 84 | "['went', 'up'] -> the\n", 85 | "['the', 'hill', 'to', 'fetch', 'a'] -> pail\n", 86 | "['fetch', 'a'] -> pail\n", 87 | "['up', 'the'] -> hill\n", 88 | "['fetch', 'a', 'pail'] -> of\n", 89 | "['went'] -> up\n", 90 | "['up', 'the', 'hill'] -> to\n", 91 | "['a'] -> pail\n", 92 | "['jack', 'and', 'jill', 'went', 'up'] -> the\n", 93 | "['fetch', 'a', 'pail', 'of'] -> water\n", 94 | "['up'] -> the\n", 95 | "['up', 'the'] -> hill\n", 96 | "['to', 'fetch', 'a', 'pail'] -> of\n", 97 | "['up'] -> the\n", 98 | "['a'] -> pail\n", 99 | "['to', 'fetch', 'a', 'pail'] -> of\n", 100 | "['a'] -> pail\n", 101 | "['jill', 'went', 'up'] -> the\n", 102 | "['to', 'fetch', 'a'] -> pail\n", 103 | "['the'] -> hill\n", 104 | "['the', 'hill', 'to', 'fetch'] -> a\n", 105 | "['hill', 'to'] -> fetch\n", 106 | "['fetch', 'a', 'pail'] -> of\n", 107 | "['hill'] -> to\n", 108 | "['up', 'the'] -> hill\n", 109 | "['fetch'] -> a\n", 110 | "['up', 'the', 'hill', 'to'] -> fetch\n", 111 | "['went', 'up', 'the', 'hill'] -> to\n", 112 | "['pail', 'of'] -> water\n", 113 | "['the', 'hill', 'to', 'fetch'] -> a\n", 114 | "['to', 'fetch', 'a'] -> pail\n", 115 | "['up', 'the', 'hill'] -> to\n", 116 | "['hill'] -> to\n", 117 | "['hill', 'to', 'fetch'] -> a\n", 118 | "['pail'] -> of\n", 119 | "['up', 'the', 'hill', 'to', 'fetch'] -> a\n", 120 | "['and', 'jill', 'went'] -> up\n", 121 | "['pail'] -> of\n", 122 | "['hill', 'to'] -> fetch\n", 123 | "['and', 'jill', 'went', 'up'] -> the\n", 124 | "['to', 'fetch', 'a'] -> pail\n", 125 | "['to', 'fetch'] -> a\n", 126 | "['fetch', 'a', 'pail', 'of'] -> water\n", 127 | "['pail', 'of'] -> water\n", 128 | "['to', 'fetch'] -> a\n", 129 | "['up'] -> the\n", 130 | "['up', 'the', 'hill', 'to'] -> fetch\n", 131 | "['jack'] -> and\n", 132 | "['a', 'pail'] -> of\n", 133 | "['pail', 'of'] -> water\n", 134 | "['the'] -> hill\n", 135 | "['a', 'pail'] -> of\n", 136 | "['went', 'up'] -> the\n", 137 | "['fetch', 'a', 'pail', 'of'] -> water\n", 138 | "['the', 'hill', 'to'] -> fetch\n", 139 | "['jill'] -> went\n", 140 | "['went', 'up', 'the', 'hill', 'to'] -> fetch\n", 141 | "['went'] -> up\n", 142 | "['to', 'fetch', 'a'] -> pail\n", 143 | "['a'] -> pail\n", 144 | "['a'] -> pail\n", 145 | "['to', 'fetch'] -> a\n", 146 | "['jill', 'went'] -> up\n", 147 | "['a', 'pail'] -> of\n", 148 | "['jill', 'went'] -> up\n", 149 | "['jack', 'and', 'jill', 'went'] -> up\n", 150 | "['jill', 'went'] -> up\n", 151 | "['hill', 'to', 'fetch', 'a'] -> pail\n", 152 | "['jack', 'and'] -> jill\n", 153 | "['up', 'the', 'hill', 'to', 'fetch'] -> a\n", 154 | "['jack'] -> and\n", 155 | "['the'] -> hill\n", 156 | "['a', 'pail'] -> of\n", 157 | "['hill', 'to', 'fetch', 'a'] -> pail\n", 158 | "['hill', 'to', 'fetch', 'a', 'pail'] -> of\n", 159 | "['the', 'hill'] -> to\n", 160 | "['a'] -> pail\n", 161 | "['to'] -> fetch\n", 162 | "['jack', 'and', 'jill', 'went'] -> up\n", 163 | "['pail', 'of'] -> water\n", 164 | "['jill', 'went', 'up'] -> the\n", 165 | "['up', 'the', 'hill', 'to', 'fetch'] -> a\n", 166 | "['up'] -> the\n", 167 | "['fetch'] -> a\n", 168 | "['hill', 'to', 'fetch', 'a'] -> pail\n", 169 | "['went'] -> up\n", 170 | "['the', 'hill', 'to'] -> fetch\n", 171 | "['fetch', 'a', 'pail', 'of'] -> water\n", 172 | "['the', 'hill', 'to', 'fetch'] -> a\n", 173 | "['a', 'pail', 'of'] -> water\n", 174 | "['jack', 'and'] -> jill\n", 175 | "['jack', 'and', 'jill', 'went'] -> up\n", 176 | "['hill'] -> to\n", 177 | "['the', 'hill'] -> to\n", 178 | "['fetch', 'a', 'pail', 'of'] -> water\n", 179 | "['a', 'pail'] -> of\n", 180 | "['hill'] -> to\n", 181 | "['went', 'up', 'the', 'hill', 'to'] -> fetch\n", 182 | "['to', 'fetch', 'a'] -> pail\n", 183 | "['fetch', 'a'] -> pail\n", 184 | "['jack', 'and', 'jill', 'went'] -> up\n", 185 | "['up', 'the'] -> hill\n", 186 | "['the', 'hill', 'to', 'fetch', 'a'] -> pail\n", 187 | "['the'] -> hill\n", 188 | "['the', 'hill', 'to'] -> fetch\n", 189 | "['a', 'pail'] -> of\n", 190 | "['to'] -> fetch\n", 191 | "['the', 'hill', 'to', 'fetch'] -> a\n", 192 | "['and', 'jill', 'went'] -> up\n", 193 | "['pail', 'of'] -> water\n", 194 | "['jack'] -> and\n", 195 | "['went', 'up'] -> the\n", 196 | "['jack'] -> and\n", 197 | "['to', 'fetch', 'a'] -> pail\n", 198 | "['jill', 'went', 'up'] -> the\n", 199 | "['fetch'] -> a\n", 200 | "['a', 'pail', 'of'] -> water\n", 201 | "['pail'] -> of\n", 202 | "['jill', 'went'] -> up\n", 203 | "['pail', 'of'] -> water\n", 204 | "['hill'] -> to\n", 205 | "['up'] -> the\n", 206 | "['hill', 'to', 'fetch'] -> a\n", 207 | "['jill', 'went', 'up'] -> the\n", 208 | "['the', 'hill', 'to', 'fetch', 'a'] -> pail\n", 209 | "['went', 'up', 'the', 'hill', 'to'] -> fetch\n", 210 | "['hill', 'to', 'fetch', 'a', 'pail'] -> of\n", 211 | "['and', 'jill', 'went'] -> up\n", 212 | "['hill', 'to'] -> fetch\n", 213 | "['up', 'the', 'hill', 'to'] -> fetch\n", 214 | "['up', 'the', 'hill', 'to', 'fetch'] -> a\n", 215 | "['to', 'fetch'] -> a\n", 216 | "['and', 'jill', 'went', 'up', 'the'] -> hill\n", 217 | "['a', 'pail', 'of'] -> water\n", 218 | "['hill', 'to'] -> fetch\n", 219 | "['a'] -> pail\n", 220 | "['pail'] -> of\n", 221 | "['hill', 'to', 'fetch', 'a'] -> pail\n", 222 | "['the'] -> hill\n", 223 | "['up', 'the', 'hill'] -> to\n", 224 | "['jill'] -> went\n", 225 | "['fetch', 'a', 'pail'] -> of\n", 226 | "['a', 'pail'] -> of\n", 227 | "['hill', 'to', 'fetch', 'a', 'pail'] -> of\n", 228 | "['went', 'up'] -> the\n", 229 | "['a', 'pail'] -> of\n", 230 | "['went', 'up', 'the'] -> hill\n", 231 | "['fetch', 'a', 'pail', 'of'] -> water\n", 232 | "['hill', 'to'] -> fetch\n", 233 | "['and', 'jill'] -> went\n", 234 | "['jack'] -> and\n", 235 | "['to'] -> fetch\n", 236 | "['hill'] -> to\n", 237 | "['hill', 'to', 'fetch', 'a'] -> pail\n", 238 | "['a'] -> pail\n", 239 | "['and'] -> jill\n", 240 | "['went', 'up', 'the', 'hill', 'to'] -> fetch\n", 241 | "['a', 'pail'] -> of\n", 242 | "['pail', 'of'] -> water\n", 243 | "['fetch', 'a', 'pail'] -> of\n", 244 | "['jill', 'went'] -> up\n", 245 | "['jack', 'and', 'jill'] -> went\n", 246 | "['a'] -> pail\n", 247 | "['a', 'pail', 'of'] -> water\n", 248 | "['a'] -> pail\n", 249 | "['pail'] -> of\n", 250 | "['fetch', 'a', 'pail'] -> of\n", 251 | "['and', 'jill', 'went'] -> up\n", 252 | "['up', 'the', 'hill', 'to'] -> fetch\n", 253 | "['fetch', 'a'] -> pail\n", 254 | "['jack', 'and', 'jill', 'went'] -> up\n", 255 | "['fetch', 'a', 'pail', 'of'] -> water\n", 256 | "['jill', 'went', 'up', 'the'] -> hill\n", 257 | "['hill', 'to', 'fetch', 'a'] -> pail\n", 258 | "['jill', 'went'] -> up\n", 259 | "['pail', 'of'] -> water\n", 260 | "['went', 'up', 'the', 'hill'] -> to\n", 261 | "['a', 'pail', 'of'] -> water\n", 262 | "['went', 'up', 'the'] -> hill\n", 263 | "['hill', 'to', 'fetch'] -> a\n", 264 | "['the', 'hill', 'to', 'fetch', 'a'] -> pail\n", 265 | "['jack', 'and', 'jill', 'went', 'up'] -> the\n", 266 | "['went', 'up', 'the'] -> hill\n", 267 | "['went', 'up'] -> the\n", 268 | "['pail', 'of'] -> water\n", 269 | "['jack', 'and', 'jill', 'went'] -> up\n", 270 | "['to', 'fetch', 'a'] -> pail\n", 271 | "['pail'] -> of\n", 272 | "['the', 'hill', 'to', 'fetch'] -> a\n", 273 | "['went', 'up', 'the', 'hill'] -> to\n", 274 | "['up', 'the', 'hill'] -> to\n", 275 | "['fetch', 'a', 'pail'] -> of\n", 276 | "['the'] -> hill\n", 277 | "['the', 'hill', 'to'] -> fetch\n", 278 | "['and', 'jill', 'went'] -> up\n", 279 | "['jack', 'and', 'jill', 'went'] -> up\n", 280 | "['to', 'fetch'] -> a\n", 281 | "['to', 'fetch', 'a', 'pail', 'of'] -> water\n", 282 | "['to'] -> fetch\n", 283 | "['pail'] -> of\n", 284 | "['pail'] -> of\n", 285 | "['the', 'hill', 'to', 'fetch', 'a'] -> pail\n", 286 | "['jack', 'and'] -> jill\n", 287 | "['hill'] -> to\n", 288 | "['jack', 'and'] -> jill\n", 289 | "['up', 'the', 'hill', 'to'] -> fetch\n", 290 | "['the', 'hill', 'to', 'fetch', 'a'] -> pail\n", 291 | "['up', 'the'] -> hill\n", 292 | "['hill', 'to', 'fetch', 'a'] -> pail\n", 293 | "['jill', 'went', 'up', 'the'] -> hill\n", 294 | "['up'] -> the\n", 295 | "['up', 'the', 'hill', 'to', 'fetch'] -> a\n", 296 | "['fetch', 'a', 'pail', 'of'] -> water\n", 297 | "['fetch', 'a'] -> pail\n", 298 | "['fetch', 'a'] -> pail\n", 299 | "['to'] -> fetch\n", 300 | "['the', 'hill', 'to'] -> fetch\n", 301 | "['fetch', 'a', 'pail'] -> of\n", 302 | "['jack', 'and'] -> jill\n", 303 | "['hill', 'to', 'fetch', 'a'] -> pail\n", 304 | "['to'] -> fetch\n", 305 | "['pail'] -> of\n", 306 | "['up', 'the', 'hill'] -> to\n", 307 | "['the', 'hill', 'to'] -> fetch\n", 308 | "['pail'] -> of\n", 309 | "['jack'] -> and\n", 310 | "['a', 'pail'] -> of\n", 311 | "['fetch', 'a'] -> pail\n", 312 | "['a', 'pail', 'of'] -> water\n", 313 | "['went', 'up', 'the', 'hill', 'to'] -> fetch\n", 314 | "['went', 'up', 'the', 'hill'] -> to\n", 315 | "['to'] -> fetch\n", 316 | "['a', 'pail'] -> of\n", 317 | "['to', 'fetch', 'a'] -> pail\n", 318 | "['hill', 'to'] -> fetch\n", 319 | "['hill', 'to', 'fetch', 'a', 'pail'] -> of\n", 320 | "['hill', 'to', 'fetch', 'a', 'pail'] -> of\n", 321 | "['fetch', 'a', 'pail', 'of'] -> water\n", 322 | "['pail', 'of'] -> water\n", 323 | "['a', 'pail', 'of'] -> water\n", 324 | "['fetch'] -> a\n", 325 | "['and', 'jill', 'went', 'up', 'the'] -> hill\n", 326 | "['to'] -> fetch\n", 327 | "['and'] -> jill\n", 328 | "['pail'] -> of\n", 329 | "['jack', 'and'] -> jill\n", 330 | "['jack', 'and', 'jill', 'went'] -> up\n", 331 | "['the', 'hill', 'to'] -> fetch\n", 332 | "['to'] -> fetch\n", 333 | "['jack'] -> and\n", 334 | "['the', 'hill', 'to'] -> fetch\n", 335 | "['to', 'fetch', 'a'] -> pail\n", 336 | "['pail'] -> of\n", 337 | "['went'] -> up\n", 338 | "['went'] -> up\n", 339 | "['up', 'the'] -> hill\n", 340 | "['jack'] -> and\n", 341 | "['jill', 'went', 'up', 'the', 'hill'] -> to\n", 342 | "['to', 'fetch'] -> a\n", 343 | "['the', 'hill', 'to', 'fetch', 'a'] -> pail\n", 344 | "['up', 'the'] -> hill\n", 345 | "['jack'] -> and\n", 346 | "['up', 'the', 'hill'] -> to\n", 347 | "['and', 'jill', 'went'] -> up\n", 348 | "['a', 'pail', 'of'] -> water\n", 349 | "['and', 'jill', 'went', 'up'] -> the\n", 350 | "['hill', 'to', 'fetch', 'a'] -> pail\n", 351 | "['the', 'hill', 'to', 'fetch', 'a'] -> pail\n", 352 | "['jack', 'and', 'jill'] -> went\n", 353 | "['hill', 'to', 'fetch', 'a', 'pail'] -> of\n", 354 | "['up', 'the', 'hill', 'to', 'fetch'] -> a\n", 355 | "['fetch', 'a', 'pail', 'of'] -> water\n", 356 | "['jill', 'went', 'up'] -> the\n", 357 | "['went', 'up', 'the', 'hill', 'to'] -> fetch\n", 358 | "['pail', 'of'] -> water\n", 359 | "['to'] -> fetch\n", 360 | "['jill', 'went'] -> up\n", 361 | "['fetch'] -> a\n", 362 | "['went', 'up', 'the', 'hill'] -> to\n", 363 | "['fetch', 'a', 'pail', 'of'] -> water\n", 364 | "['and', 'jill', 'went'] -> up\n", 365 | "['up', 'the', 'hill'] -> to\n", 366 | "['went', 'up'] -> the\n", 367 | "['went', 'up', 'the', 'hill', 'to'] -> fetch\n", 368 | "['jill'] -> went\n", 369 | "['and', 'jill', 'went'] -> up\n", 370 | "['jill', 'went', 'up', 'the'] -> hill\n", 371 | "['jill', 'went', 'up', 'the', 'hill'] -> to\n", 372 | "['jack', 'and', 'jill'] -> went\n", 373 | "['fetch', 'a'] -> pail\n", 374 | "['a'] -> pail\n", 375 | "['went', 'up', 'the', 'hill', 'to'] -> fetch\n", 376 | "['to'] -> fetch\n", 377 | "['a', 'pail'] -> of\n", 378 | "['a'] -> pail\n", 379 | "['jill', 'went', 'up'] -> the\n", 380 | "['hill', 'to', 'fetch'] -> a\n" 381 | ] 382 | } 383 | ], 384 | "source": [ 385 | "word_to_int = dict((w,i) for i, w in enumerate(poem))\n", 386 | "int_to_word = dict((i,w) for i, w in enumerate(poem))\n", 387 | "\n", 388 | "num_inputs = 300\n", 389 | "max_len = 5\n", 390 | "dataX =[]\n", 391 | "dataY =[]\n", 392 | "for i in range(num_inputs):\n", 393 | " start = np.random.randint(len(poem)-2)\n", 394 | " end = np.random.randint(start, min(start+max_len, len(poem)-1))\n", 395 | " sequence_in = poem[start:end+1]\n", 396 | " sequence_out = poem[end +1]\n", 397 | " dataX.append([word_to_int[word] for word in sequence_in])\n", 398 | " dataY.append(word_to_int[sequence_out])\n", 399 | " print(sequence_in, '->', sequence_out)\n", 400 | " " 401 | ] 402 | }, 403 | { 404 | "cell_type": "code", 405 | "execution_count": 42, 406 | "metadata": { 407 | "collapsed": true 408 | }, 409 | "outputs": [ 410 | { 411 | "data": { 412 | "text/plain": [ 413 | "[[0, 1, 2],\n", 414 | " [0, 1],\n", 415 | " [5, 6, 7, 8, 9],\n", 416 | " [3, 4],\n", 417 | " [5, 6, 7, 8, 9],\n", 418 | " [8, 9],\n", 419 | " [4, 5],\n", 420 | " [8, 9, 10],\n", 421 | " [3],\n", 422 | " [4, 5, 6],\n", 423 | " [9],\n", 424 | " [0, 1, 2, 3, 4],\n", 425 | " [8, 9, 10, 11],\n", 426 | " [4],\n", 427 | " [4, 5],\n", 428 | " [7, 8, 9, 10],\n", 429 | " [4],\n", 430 | " [9],\n", 431 | " [7, 8, 9, 10],\n", 432 | " [9],\n", 433 | " [2, 3, 4],\n", 434 | " [7, 8, 9],\n", 435 | " [5],\n", 436 | " [5, 6, 7, 8],\n", 437 | " [6, 7],\n", 438 | " [8, 9, 10],\n", 439 | " [6],\n", 440 | " [4, 5],\n", 441 | " [8],\n", 442 | " [4, 5, 6, 7],\n", 443 | " [3, 4, 5, 6],\n", 444 | " [10, 11],\n", 445 | " [5, 6, 7, 8],\n", 446 | " [7, 8, 9],\n", 447 | " [4, 5, 6],\n", 448 | " [6],\n", 449 | " [6, 7, 8],\n", 450 | " [10],\n", 451 | " [4, 5, 6, 7, 8],\n", 452 | " [1, 2, 3],\n", 453 | " [10],\n", 454 | " [6, 7],\n", 455 | " [1, 2, 3, 4],\n", 456 | " [7, 8, 9],\n", 457 | " [7, 8],\n", 458 | " [8, 9, 10, 11],\n", 459 | " [10, 11],\n", 460 | " [7, 8],\n", 461 | " [4],\n", 462 | " [4, 5, 6, 7],\n", 463 | " [0],\n", 464 | " [9, 10],\n", 465 | " [10, 11],\n", 466 | " [5],\n", 467 | " [9, 10],\n", 468 | " [3, 4],\n", 469 | " [8, 9, 10, 11],\n", 470 | " [5, 6, 7],\n", 471 | " [2],\n", 472 | " [3, 4, 5, 6, 7],\n", 473 | " [3],\n", 474 | " [7, 8, 9],\n", 475 | " [9],\n", 476 | " [9],\n", 477 | " [7, 8],\n", 478 | " [2, 3],\n", 479 | " [9, 10],\n", 480 | " [2, 3],\n", 481 | " [0, 1, 2, 3],\n", 482 | " [2, 3],\n", 483 | " [6, 7, 8, 9],\n", 484 | " [0, 1],\n", 485 | " [4, 5, 6, 7, 8],\n", 486 | " [0],\n", 487 | " [5],\n", 488 | " [9, 10],\n", 489 | " [6, 7, 8, 9],\n", 490 | " [6, 7, 8, 9, 10],\n", 491 | " [5, 6],\n", 492 | " [9],\n", 493 | " [7],\n", 494 | " [0, 1, 2, 3],\n", 495 | " [10, 11],\n", 496 | " [2, 3, 4],\n", 497 | " [4, 5, 6, 7, 8],\n", 498 | " [4],\n", 499 | " [8],\n", 500 | " [6, 7, 8, 9],\n", 501 | " [3],\n", 502 | " [5, 6, 7],\n", 503 | " [8, 9, 10, 11],\n", 504 | " [5, 6, 7, 8],\n", 505 | " [9, 10, 11],\n", 506 | " [0, 1],\n", 507 | " [0, 1, 2, 3],\n", 508 | " [6],\n", 509 | " [5, 6],\n", 510 | " [8, 9, 10, 11],\n", 511 | " [9, 10],\n", 512 | " [6],\n", 513 | " [3, 4, 5, 6, 7],\n", 514 | " [7, 8, 9],\n", 515 | " [8, 9],\n", 516 | " [0, 1, 2, 3],\n", 517 | " [4, 5],\n", 518 | " [5, 6, 7, 8, 9],\n", 519 | " [5],\n", 520 | " [5, 6, 7],\n", 521 | " [9, 10],\n", 522 | " [7],\n", 523 | " [5, 6, 7, 8],\n", 524 | " [1, 2, 3],\n", 525 | " [10, 11],\n", 526 | " [0],\n", 527 | " [3, 4],\n", 528 | " [0],\n", 529 | " [7, 8, 9],\n", 530 | " [2, 3, 4],\n", 531 | " [8],\n", 532 | " [9, 10, 11],\n", 533 | " [10],\n", 534 | " [2, 3],\n", 535 | " [10, 11],\n", 536 | " [6],\n", 537 | " [4],\n", 538 | " [6, 7, 8],\n", 539 | " [2, 3, 4],\n", 540 | " [5, 6, 7, 8, 9],\n", 541 | " [3, 4, 5, 6, 7],\n", 542 | " [6, 7, 8, 9, 10],\n", 543 | " [1, 2, 3],\n", 544 | " [6, 7],\n", 545 | " [4, 5, 6, 7],\n", 546 | " [4, 5, 6, 7, 8],\n", 547 | " [7, 8],\n", 548 | " [1, 2, 3, 4, 5],\n", 549 | " [9, 10, 11],\n", 550 | " [6, 7],\n", 551 | " [9],\n", 552 | " [10],\n", 553 | " [6, 7, 8, 9],\n", 554 | " [5],\n", 555 | " [4, 5, 6],\n", 556 | " [2],\n", 557 | " [8, 9, 10],\n", 558 | " [9, 10],\n", 559 | " [6, 7, 8, 9, 10],\n", 560 | " [3, 4],\n", 561 | " [9, 10],\n", 562 | " [3, 4, 5],\n", 563 | " [8, 9, 10, 11],\n", 564 | " [6, 7],\n", 565 | " [1, 2],\n", 566 | " [0],\n", 567 | " [7],\n", 568 | " [6],\n", 569 | " [6, 7, 8, 9],\n", 570 | " [9],\n", 571 | " [1],\n", 572 | " [3, 4, 5, 6, 7],\n", 573 | " [9, 10],\n", 574 | " [10, 11],\n", 575 | " [8, 9, 10],\n", 576 | " [2, 3],\n", 577 | " [0, 1, 2],\n", 578 | " [9],\n", 579 | " [9, 10, 11],\n", 580 | " [9],\n", 581 | " [10],\n", 582 | " [8, 9, 10],\n", 583 | " [1, 2, 3],\n", 584 | " [4, 5, 6, 7],\n", 585 | " [8, 9],\n", 586 | " [0, 1, 2, 3],\n", 587 | " [8, 9, 10, 11],\n", 588 | " [2, 3, 4, 5],\n", 589 | " [6, 7, 8, 9],\n", 590 | " [2, 3],\n", 591 | " [10, 11],\n", 592 | " [3, 4, 5, 6],\n", 593 | " [9, 10, 11],\n", 594 | " [3, 4, 5],\n", 595 | " [6, 7, 8],\n", 596 | " [5, 6, 7, 8, 9],\n", 597 | " [0, 1, 2, 3, 4],\n", 598 | " [3, 4, 5],\n", 599 | " [3, 4],\n", 600 | " [10, 11],\n", 601 | " [0, 1, 2, 3],\n", 602 | " [7, 8, 9],\n", 603 | " [10],\n", 604 | " [5, 6, 7, 8],\n", 605 | " [3, 4, 5, 6],\n", 606 | " [4, 5, 6],\n", 607 | " [8, 9, 10],\n", 608 | " [5],\n", 609 | " [5, 6, 7],\n", 610 | " [1, 2, 3],\n", 611 | " [0, 1, 2, 3],\n", 612 | " [7, 8],\n", 613 | " [7, 8, 9, 10, 11],\n", 614 | " [7],\n", 615 | " [10],\n", 616 | " [10],\n", 617 | " [5, 6, 7, 8, 9],\n", 618 | " [0, 1],\n", 619 | " [6],\n", 620 | " [0, 1],\n", 621 | " [4, 5, 6, 7],\n", 622 | " [5, 6, 7, 8, 9],\n", 623 | " [4, 5],\n", 624 | " [6, 7, 8, 9],\n", 625 | " [2, 3, 4, 5],\n", 626 | " [4],\n", 627 | " [4, 5, 6, 7, 8],\n", 628 | " [8, 9, 10, 11],\n", 629 | " [8, 9],\n", 630 | " [8, 9],\n", 631 | " [7],\n", 632 | " [5, 6, 7],\n", 633 | " [8, 9, 10],\n", 634 | " [0, 1],\n", 635 | " [6, 7, 8, 9],\n", 636 | " [7],\n", 637 | " [10],\n", 638 | " [4, 5, 6],\n", 639 | " [5, 6, 7],\n", 640 | " [10],\n", 641 | " [0],\n", 642 | " [9, 10],\n", 643 | " [8, 9],\n", 644 | " [9, 10, 11],\n", 645 | " [3, 4, 5, 6, 7],\n", 646 | " [3, 4, 5, 6],\n", 647 | " [7],\n", 648 | " [9, 10],\n", 649 | " [7, 8, 9],\n", 650 | " [6, 7],\n", 651 | " [6, 7, 8, 9, 10],\n", 652 | " [6, 7, 8, 9, 10],\n", 653 | " [8, 9, 10, 11],\n", 654 | " [10, 11],\n", 655 | " [9, 10, 11],\n", 656 | " [8],\n", 657 | " [1, 2, 3, 4, 5],\n", 658 | " [7],\n", 659 | " [1],\n", 660 | " [10],\n", 661 | " [0, 1],\n", 662 | " [0, 1, 2, 3],\n", 663 | " [5, 6, 7],\n", 664 | " [7],\n", 665 | " [0],\n", 666 | " [5, 6, 7],\n", 667 | " [7, 8, 9],\n", 668 | " [10],\n", 669 | " [3],\n", 670 | " [3],\n", 671 | " [4, 5],\n", 672 | " [0],\n", 673 | " [2, 3, 4, 5, 6],\n", 674 | " [7, 8],\n", 675 | " [5, 6, 7, 8, 9],\n", 676 | " [4, 5],\n", 677 | " [0],\n", 678 | " [4, 5, 6],\n", 679 | " [1, 2, 3],\n", 680 | " [9, 10, 11],\n", 681 | " [1, 2, 3, 4],\n", 682 | " [6, 7, 8, 9],\n", 683 | " [5, 6, 7, 8, 9],\n", 684 | " [0, 1, 2],\n", 685 | " [6, 7, 8, 9, 10],\n", 686 | " [4, 5, 6, 7, 8],\n", 687 | " [8, 9, 10, 11],\n", 688 | " [2, 3, 4],\n", 689 | " [3, 4, 5, 6, 7],\n", 690 | " [10, 11],\n", 691 | " [7],\n", 692 | " [2, 3],\n", 693 | " [8],\n", 694 | " [3, 4, 5, 6],\n", 695 | " [8, 9, 10, 11],\n", 696 | " [1, 2, 3],\n", 697 | " [4, 5, 6],\n", 698 | " [3, 4],\n", 699 | " [3, 4, 5, 6, 7],\n", 700 | " [2],\n", 701 | " [1, 2, 3],\n", 702 | " [2, 3, 4, 5],\n", 703 | " [2, 3, 4, 5, 6],\n", 704 | " [0, 1, 2],\n", 705 | " [8, 9],\n", 706 | " [9],\n", 707 | " [3, 4, 5, 6, 7],\n", 708 | " [7],\n", 709 | " [9, 10],\n", 710 | " [9],\n", 711 | " [2, 3, 4],\n", 712 | " [6, 7, 8]]" 713 | ] 714 | }, 715 | "execution_count": 42, 716 | "metadata": {}, 717 | "output_type": "execute_result" 718 | } 719 | ], 720 | "source": [ 721 | "dataX" 722 | ] 723 | }, 724 | { 725 | "cell_type": "code", 726 | "execution_count": 43, 727 | "metadata": { 728 | "collapsed": true 729 | }, 730 | "outputs": [ 731 | { 732 | "data": { 733 | "text/plain": [ 734 | "[3,\n", 735 | " 2,\n", 736 | " 10,\n", 737 | " 5,\n", 738 | " 10,\n", 739 | " 10,\n", 740 | " 6,\n", 741 | " 11,\n", 742 | " 4,\n", 743 | " 7,\n", 744 | " 10,\n", 745 | " 5,\n", 746 | " 12,\n", 747 | " 5,\n", 748 | " 6,\n", 749 | " 11,\n", 750 | " 5,\n", 751 | " 10,\n", 752 | " 11,\n", 753 | " 10,\n", 754 | " 5,\n", 755 | " 10,\n", 756 | " 6,\n", 757 | " 9,\n", 758 | " 8,\n", 759 | " 11,\n", 760 | " 7,\n", 761 | " 6,\n", 762 | " 9,\n", 763 | " 8,\n", 764 | " 7,\n", 765 | " 12,\n", 766 | " 9,\n", 767 | " 10,\n", 768 | " 7,\n", 769 | " 7,\n", 770 | " 9,\n", 771 | " 11,\n", 772 | " 9,\n", 773 | " 4,\n", 774 | " 11,\n", 775 | " 8,\n", 776 | " 5,\n", 777 | " 10,\n", 778 | " 9,\n", 779 | " 12,\n", 780 | " 12,\n", 781 | " 9,\n", 782 | " 5,\n", 783 | " 8,\n", 784 | " 1,\n", 785 | " 11,\n", 786 | " 12,\n", 787 | " 6,\n", 788 | " 11,\n", 789 | " 5,\n", 790 | " 12,\n", 791 | " 8,\n", 792 | " 3,\n", 793 | " 8,\n", 794 | " 4,\n", 795 | " 10,\n", 796 | " 10,\n", 797 | " 10,\n", 798 | " 9,\n", 799 | " 4,\n", 800 | " 11,\n", 801 | " 4,\n", 802 | " 4,\n", 803 | " 4,\n", 804 | " 10,\n", 805 | " 2,\n", 806 | " 9,\n", 807 | " 1,\n", 808 | " 6,\n", 809 | " 11,\n", 810 | " 10,\n", 811 | " 11,\n", 812 | " 7,\n", 813 | " 10,\n", 814 | " 8,\n", 815 | " 4,\n", 816 | " 12,\n", 817 | " 5,\n", 818 | " 9,\n", 819 | " 5,\n", 820 | " 9,\n", 821 | " 10,\n", 822 | " 4,\n", 823 | " 8,\n", 824 | " 12,\n", 825 | " 9,\n", 826 | " 12,\n", 827 | " 2,\n", 828 | " 4,\n", 829 | " 7,\n", 830 | " 7,\n", 831 | " 12,\n", 832 | " 11,\n", 833 | " 7,\n", 834 | " 8,\n", 835 | " 10,\n", 836 | " 10,\n", 837 | " 4,\n", 838 | " 6,\n", 839 | " 10,\n", 840 | " 6,\n", 841 | " 8,\n", 842 | " 11,\n", 843 | " 8,\n", 844 | " 9,\n", 845 | " 4,\n", 846 | " 12,\n", 847 | " 1,\n", 848 | " 5,\n", 849 | " 1,\n", 850 | " 10,\n", 851 | " 5,\n", 852 | " 9,\n", 853 | " 12,\n", 854 | " 11,\n", 855 | " 4,\n", 856 | " 12,\n", 857 | " 7,\n", 858 | " 5,\n", 859 | " 9,\n", 860 | " 5,\n", 861 | " 10,\n", 862 | " 8,\n", 863 | " 11,\n", 864 | " 4,\n", 865 | " 8,\n", 866 | " 8,\n", 867 | " 9,\n", 868 | " 9,\n", 869 | " 6,\n", 870 | " 12,\n", 871 | " 8,\n", 872 | " 10,\n", 873 | " 11,\n", 874 | " 10,\n", 875 | " 6,\n", 876 | " 7,\n", 877 | " 3,\n", 878 | " 11,\n", 879 | " 11,\n", 880 | " 11,\n", 881 | " 5,\n", 882 | " 11,\n", 883 | " 6,\n", 884 | " 12,\n", 885 | " 8,\n", 886 | " 3,\n", 887 | " 1,\n", 888 | " 8,\n", 889 | " 7,\n", 890 | " 10,\n", 891 | " 10,\n", 892 | " 2,\n", 893 | " 8,\n", 894 | " 11,\n", 895 | " 12,\n", 896 | " 11,\n", 897 | " 4,\n", 898 | " 3,\n", 899 | " 10,\n", 900 | " 12,\n", 901 | " 10,\n", 902 | " 11,\n", 903 | " 11,\n", 904 | " 4,\n", 905 | " 8,\n", 906 | " 10,\n", 907 | " 4,\n", 908 | " 12,\n", 909 | " 6,\n", 910 | " 10,\n", 911 | " 4,\n", 912 | " 12,\n", 913 | " 7,\n", 914 | " 12,\n", 915 | " 6,\n", 916 | " 9,\n", 917 | " 10,\n", 918 | " 5,\n", 919 | " 6,\n", 920 | " 5,\n", 921 | " 12,\n", 922 | " 4,\n", 923 | " 10,\n", 924 | " 11,\n", 925 | " 9,\n", 926 | " 7,\n", 927 | " 7,\n", 928 | " 11,\n", 929 | " 6,\n", 930 | " 8,\n", 931 | " 4,\n", 932 | " 4,\n", 933 | " 9,\n", 934 | " 12,\n", 935 | " 8,\n", 936 | " 11,\n", 937 | " 11,\n", 938 | " 10,\n", 939 | " 2,\n", 940 | " 7,\n", 941 | " 2,\n", 942 | " 8,\n", 943 | " 10,\n", 944 | " 6,\n", 945 | " 10,\n", 946 | " 6,\n", 947 | " 5,\n", 948 | " 9,\n", 949 | " 12,\n", 950 | " 10,\n", 951 | " 10,\n", 952 | " 8,\n", 953 | " 8,\n", 954 | " 11,\n", 955 | " 2,\n", 956 | " 10,\n", 957 | " 8,\n", 958 | " 11,\n", 959 | " 7,\n", 960 | " 8,\n", 961 | " 11,\n", 962 | " 1,\n", 963 | " 11,\n", 964 | " 10,\n", 965 | " 12,\n", 966 | " 8,\n", 967 | " 7,\n", 968 | " 8,\n", 969 | " 11,\n", 970 | " 10,\n", 971 | " 8,\n", 972 | " 11,\n", 973 | " 11,\n", 974 | " 12,\n", 975 | " 12,\n", 976 | " 12,\n", 977 | " 9,\n", 978 | " 6,\n", 979 | " 8,\n", 980 | " 2,\n", 981 | " 11,\n", 982 | " 2,\n", 983 | " 4,\n", 984 | " 8,\n", 985 | " 8,\n", 986 | " 1,\n", 987 | " 8,\n", 988 | " 10,\n", 989 | " 11,\n", 990 | " 4,\n", 991 | " 4,\n", 992 | " 6,\n", 993 | " 1,\n", 994 | " 7,\n", 995 | " 9,\n", 996 | " 10,\n", 997 | " 6,\n", 998 | " 1,\n", 999 | " 7,\n", 1000 | " 4,\n", 1001 | " 12,\n", 1002 | " 5,\n", 1003 | " 10,\n", 1004 | " 10,\n", 1005 | " 3,\n", 1006 | " 11,\n", 1007 | " 9,\n", 1008 | " 12,\n", 1009 | " 5,\n", 1010 | " 8,\n", 1011 | " 12,\n", 1012 | " 8,\n", 1013 | " 4,\n", 1014 | " 9,\n", 1015 | " 7,\n", 1016 | " 12,\n", 1017 | " 4,\n", 1018 | " 7,\n", 1019 | " 5,\n", 1020 | " 8,\n", 1021 | " 3,\n", 1022 | " 4,\n", 1023 | " 6,\n", 1024 | " 7,\n", 1025 | " 3,\n", 1026 | " 10,\n", 1027 | " 10,\n", 1028 | " 8,\n", 1029 | " 8,\n", 1030 | " 11,\n", 1031 | " 10,\n", 1032 | " 5,\n", 1033 | " 9]" 1034 | ] 1035 | }, 1036 | "execution_count": 43, 1037 | "metadata": {}, 1038 | "output_type": "execute_result" 1039 | } 1040 | ], 1041 | "source": [ 1042 | "dataY" 1043 | ] 1044 | }, 1045 | { 1046 | "cell_type": "code", 1047 | "execution_count": 41, 1048 | "metadata": { 1049 | "scrolled": true 1050 | }, 1051 | "outputs": [ 1052 | { 1053 | "name": "stderr", 1054 | "output_type": "stream", 1055 | "text": [ 1056 | "Using TensorFlow backend.\n" 1057 | ] 1058 | } 1059 | ], 1060 | "source": [ 1061 | "from tensorflow.keras.preprocessing.sequence import pad_sequences\n", 1062 | "from tensorflow.keras.layers import Embedding, LSTM , Dense, Dropout, Bidirectional\n", 1063 | "from tensorflow.keras.preprocessing.text import Tokenizer\n", 1064 | "from tensorflow.keras.models import Sequential\n", 1065 | "from keras.utils import np_utils\n", 1066 | "from tensorflow.keras import regularizers\n", 1067 | "\n", 1068 | "import numpy as np" 1069 | ] 1070 | }, 1071 | { 1072 | "cell_type": "code", 1073 | "execution_count": 47, 1074 | "metadata": {}, 1075 | "outputs": [], 1076 | "source": [ 1077 | "X= pad_sequences(dataX, maxlen=max_len , dtype='float32')" 1078 | ] 1079 | }, 1080 | { 1081 | "cell_type": "code", 1082 | "execution_count": 48, 1083 | "metadata": {}, 1084 | "outputs": [], 1085 | "source": [ 1086 | "X = np.reshape(X, (X.shape[0], max_len, 1))\n", 1087 | "# normalize\n", 1088 | "X=X/float(len(poem))\n", 1089 | "# one hot encoding to the output variable\n", 1090 | "y=np_utils.to_categorical(dataY)" 1091 | ] 1092 | }, 1093 | { 1094 | "cell_type": "code", 1095 | "execution_count": 49, 1096 | "metadata": { 1097 | "collapsed": true 1098 | }, 1099 | "outputs": [ 1100 | { 1101 | "data": { 1102 | "text/plain": [ 1103 | "array([[0., 0., 0., ..., 0., 0., 0.],\n", 1104 | " [0., 0., 1., ..., 0., 0., 0.],\n", 1105 | " [0., 0., 0., ..., 1., 0., 0.],\n", 1106 | " ...,\n", 1107 | " [0., 0., 0., ..., 1., 0., 0.],\n", 1108 | " [0., 0., 0., ..., 0., 0., 0.],\n", 1109 | " [0., 0., 0., ..., 0., 0., 0.]], dtype=float32)" 1110 | ] 1111 | }, 1112 | "execution_count": 49, 1113 | "metadata": {}, 1114 | "output_type": "execute_result" 1115 | } 1116 | ], 1117 | "source": [ 1118 | "y" 1119 | ] 1120 | }, 1121 | { 1122 | "cell_type": "code", 1123 | "execution_count": null, 1124 | "metadata": {}, 1125 | "outputs": [], 1126 | "source": [] 1127 | }, 1128 | { 1129 | "cell_type": "code", 1130 | "execution_count": 50, 1131 | "metadata": {}, 1132 | "outputs": [ 1133 | { 1134 | "name": "stdout", 1135 | "output_type": "stream", 1136 | "text": [ 1137 | "WARNING:tensorflow:From C:\\Users\\HP\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\ops\\init_ops.py:1251: calling VarianceScaling.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n", 1138 | "Instructions for updating:\n", 1139 | "Call initializer instance with the dtype argument instead of passing it to the constructor\n" 1140 | ] 1141 | } 1142 | ], 1143 | "source": [ 1144 | "# define the LSTM model\n", 1145 | "model = Sequential()\n", 1146 | "model.add(LSTM(32, input_shape=(X.shape[1], 1)))\n", 1147 | "\n", 1148 | "model.add(Dense(y.shape[1], activation='softmax'))\n", 1149 | "model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])" 1150 | ] 1151 | }, 1152 | { 1153 | "cell_type": "code", 1154 | "execution_count": 51, 1155 | "metadata": {}, 1156 | "outputs": [ 1157 | { 1158 | "data": { 1159 | "text/plain": [ 1160 | "(5, 1)" 1161 | ] 1162 | }, 1163 | "execution_count": 51, 1164 | "metadata": {}, 1165 | "output_type": "execute_result" 1166 | } 1167 | ], 1168 | "source": [ 1169 | "X.shape[1], 1" 1170 | ] 1171 | }, 1172 | { 1173 | "cell_type": "code", 1174 | "execution_count": 52, 1175 | "metadata": { 1176 | "collapsed": true 1177 | }, 1178 | "outputs": [ 1179 | { 1180 | "name": "stdout", 1181 | "output_type": "stream", 1182 | "text": [ 1183 | "WARNING:tensorflow:From C:\\Users\\HP\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\ops\\math_grad.py:1250: add_dispatch_support..wrapper (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n", 1184 | "Instructions for updating:\n", 1185 | "Use tf.where in 2.0, which has the same broadcast rule as np.where\n", 1186 | "Epoch 1/200\n", 1187 | "300/300 [==============================] - 1s 5ms/sample - loss: 2.5242 - acc: 0.1533\n", 1188 | "Epoch 2/200\n", 1189 | "300/300 [==============================] - 0s 610us/sample - loss: 2.4631 - acc: 0.1533\n", 1190 | "Epoch 3/200\n", 1191 | "300/300 [==============================] - 0s 640us/sample - loss: 2.3745 - acc: 0.1533\n", 1192 | "Epoch 4/200\n", 1193 | "300/300 [==============================] - 0s 690us/sample - loss: 2.3146 - acc: 0.1533\n", 1194 | "Epoch 5/200\n", 1195 | "300/300 [==============================] - 0s 747us/sample - loss: 2.2784 - acc: 0.1533\n", 1196 | "Epoch 6/200\n", 1197 | "300/300 [==============================] - 0s 677us/sample - loss: 2.2487 - acc: 0.1533\n", 1198 | "Epoch 7/200\n", 1199 | "300/300 [==============================] - 0s 680us/sample - loss: 2.2237 - acc: 0.1533\n", 1200 | "Epoch 8/200\n", 1201 | "300/300 [==============================] - 0s 590us/sample - loss: 2.1982 - acc: 0.1533\n", 1202 | "Epoch 9/200\n", 1203 | "300/300 [==============================] - 0s 840us/sample - loss: 2.1790 - acc: 0.1600\n", 1204 | "Epoch 10/200\n", 1205 | "300/300 [==============================] - 0s 880us/sample - loss: 2.1536 - acc: 0.1567\n", 1206 | "Epoch 11/200\n", 1207 | "300/300 [==============================] - 0s 873us/sample - loss: 2.1320 - acc: 0.2100\n", 1208 | "Epoch 12/200\n", 1209 | "300/300 [==============================] - 0s 717us/sample - loss: 2.1109 - acc: 0.2000\n", 1210 | "Epoch 13/200\n", 1211 | "300/300 [==============================] - 0s 813us/sample - loss: 2.0884 - acc: 0.2200\n", 1212 | "Epoch 14/200\n", 1213 | "300/300 [==============================] - 0s 823us/sample - loss: 2.0592 - acc: 0.2367\n", 1214 | "Epoch 15/200\n", 1215 | "300/300 [==============================] - 0s 783us/sample - loss: 2.0389 - acc: 0.2100\n", 1216 | "Epoch 16/200\n", 1217 | "300/300 [==============================] - 0s 890us/sample - loss: 2.0125 - acc: 0.2300\n", 1218 | "Epoch 17/200\n", 1219 | "300/300 [==============================] - 0s 947us/sample - loss: 1.9842 - acc: 0.2800\n", 1220 | "Epoch 18/200\n", 1221 | "300/300 [==============================] - 0s 587us/sample - loss: 1.9570 - acc: 0.2533\n", 1222 | "Epoch 19/200\n", 1223 | "300/300 [==============================] - 0s 543us/sample - loss: 1.9206 - acc: 0.3567\n", 1224 | "Epoch 20/200\n", 1225 | "300/300 [==============================] - 0s 553us/sample - loss: 1.8914 - acc: 0.3433\n", 1226 | "Epoch 21/200\n", 1227 | "300/300 [==============================] - 0s 557us/sample - loss: 1.8569 - acc: 0.3600\n", 1228 | "Epoch 22/200\n", 1229 | "300/300 [==============================] - 0s 577us/sample - loss: 1.8160 - acc: 0.3667\n", 1230 | "Epoch 23/200\n", 1231 | "300/300 [==============================] - 0s 573us/sample - loss: 1.7943 - acc: 0.3633\n", 1232 | "Epoch 24/200\n", 1233 | "300/300 [==============================] - 0s 600us/sample - loss: 1.7671 - acc: 0.3633\n", 1234 | "Epoch 25/200\n", 1235 | "300/300 [==============================] - 0s 573us/sample - loss: 1.7225 - acc: 0.3900\n", 1236 | "Epoch 26/200\n", 1237 | "300/300 [==============================] - 0s 577us/sample - loss: 1.6989 - acc: 0.4067\n", 1238 | "Epoch 27/200\n", 1239 | "300/300 [==============================] - 0s 590us/sample - loss: 1.6788 - acc: 0.4067\n", 1240 | "Epoch 28/200\n", 1241 | "300/300 [==============================] - 0s 607us/sample - loss: 1.6380 - acc: 0.4000\n", 1242 | "Epoch 29/200\n", 1243 | "300/300 [==============================] - 0s 623us/sample - loss: 1.6028 - acc: 0.4067\n", 1244 | "Epoch 30/200\n", 1245 | "300/300 [==============================] - 0s 653us/sample - loss: 1.5789 - acc: 0.4167\n", 1246 | "Epoch 31/200\n", 1247 | "300/300 [==============================] - 0s 627us/sample - loss: 1.5422 - acc: 0.4567\n", 1248 | "Epoch 32/200\n", 1249 | "300/300 [==============================] - 0s 607us/sample - loss: 1.5179 - acc: 0.4633\n", 1250 | "Epoch 33/200\n", 1251 | "300/300 [==============================] - 0s 647us/sample - loss: 1.4907 - acc: 0.4367\n", 1252 | "Epoch 34/200\n", 1253 | "300/300 [==============================] - 0s 723us/sample - loss: 1.4701 - acc: 0.4600\n", 1254 | "Epoch 35/200\n", 1255 | "300/300 [==============================] - 0s 700us/sample - loss: 1.4383 - acc: 0.4967\n", 1256 | "Epoch 36/200\n", 1257 | "300/300 [==============================] - 0s 723us/sample - loss: 1.4112 - acc: 0.5267\n", 1258 | "Epoch 37/200\n", 1259 | "300/300 [==============================] - 0s 1ms/sample - loss: 1.3854 - acc: 0.5200\n", 1260 | "Epoch 38/200\n", 1261 | "300/300 [==============================] - 0s 697us/sample - loss: 1.3555 - acc: 0.5133\n", 1262 | "Epoch 39/200\n", 1263 | "300/300 [==============================] - 0s 687us/sample - loss: 1.3392 - acc: 0.5233\n", 1264 | "Epoch 40/200\n", 1265 | "300/300 [==============================] - 0s 677us/sample - loss: 1.3185 - acc: 0.5433\n", 1266 | "Epoch 41/200\n", 1267 | "300/300 [==============================] - 0s 670us/sample - loss: 1.3087 - acc: 0.5233\n", 1268 | "Epoch 42/200\n", 1269 | "300/300 [==============================] - 0s 670us/sample - loss: 1.2784 - acc: 0.5533\n", 1270 | "Epoch 43/200\n", 1271 | "300/300 [==============================] - 0s 697us/sample - loss: 1.2625 - acc: 0.5700\n", 1272 | "Epoch 44/200\n", 1273 | "300/300 [==============================] - 0s 703us/sample - loss: 1.2380 - acc: 0.5767\n", 1274 | "Epoch 45/200\n", 1275 | "300/300 [==============================] - 0s 690us/sample - loss: 1.2296 - acc: 0.6167\n", 1276 | "Epoch 46/200\n", 1277 | "300/300 [==============================] - 0s 693us/sample - loss: 1.2108 - acc: 0.5533\n", 1278 | "Epoch 47/200\n", 1279 | "300/300 [==============================] - 0s 717us/sample - loss: 1.1856 - acc: 0.6233\n", 1280 | "Epoch 48/200\n", 1281 | "300/300 [==============================] - 0s 710us/sample - loss: 1.1713 - acc: 0.6233\n", 1282 | "Epoch 49/200\n", 1283 | "300/300 [==============================] - 0s 667us/sample - loss: 1.1572 - acc: 0.6467\n", 1284 | "Epoch 50/200\n", 1285 | "300/300 [==============================] - 0s 700us/sample - loss: 1.1393 - acc: 0.6167\n", 1286 | "Epoch 51/200\n", 1287 | "300/300 [==============================] - 0s 697us/sample - loss: 1.1275 - acc: 0.6800\n", 1288 | "Epoch 52/200\n", 1289 | "300/300 [==============================] - 0s 677us/sample - loss: 1.1192 - acc: 0.6467\n", 1290 | "Epoch 53/200\n", 1291 | "300/300 [==============================] - 0s 710us/sample - loss: 1.0992 - acc: 0.6533\n", 1292 | "Epoch 54/200\n", 1293 | "300/300 [==============================] - 0s 680us/sample - loss: 1.0910 - acc: 0.6267\n", 1294 | "Epoch 55/200\n", 1295 | "300/300 [==============================] - 0s 690us/sample - loss: 1.0735 - acc: 0.6567\n", 1296 | "Epoch 56/200\n", 1297 | "300/300 [==============================] - 0s 677us/sample - loss: 1.0681 - acc: 0.6333\n", 1298 | "Epoch 57/200\n", 1299 | "300/300 [==============================] - 0s 687us/sample - loss: 1.0548 - acc: 0.6700\n", 1300 | "Epoch 58/200\n", 1301 | "300/300 [==============================] - 0s 707us/sample - loss: 1.0444 - acc: 0.6933\n", 1302 | "Epoch 59/200\n", 1303 | "300/300 [==============================] - 0s 690us/sample - loss: 1.0287 - acc: 0.6567\n", 1304 | "Epoch 60/200\n", 1305 | "300/300 [==============================] - 0s 707us/sample - loss: 1.0114 - acc: 0.6800\n", 1306 | "Epoch 61/200\n", 1307 | "300/300 [==============================] - 0s 710us/sample - loss: 1.0037 - acc: 0.6667\n", 1308 | "Epoch 62/200\n", 1309 | "300/300 [==============================] - 0s 770us/sample - loss: 0.9936 - acc: 0.6767\n", 1310 | "Epoch 63/200\n", 1311 | "300/300 [==============================] - 0s 743us/sample - loss: 0.9887 - acc: 0.6767\n", 1312 | "Epoch 64/200\n", 1313 | "300/300 [==============================] - 0s 700us/sample - loss: 0.9804 - acc: 0.6700\n", 1314 | "Epoch 65/200\n", 1315 | "300/300 [==============================] - 0s 690us/sample - loss: 0.9665 - acc: 0.7000\n", 1316 | "Epoch 66/200\n", 1317 | "300/300 [==============================] - 0s 723us/sample - loss: 0.9492 - acc: 0.7033\n", 1318 | "Epoch 67/200\n", 1319 | "300/300 [==============================] - 0s 703us/sample - loss: 0.9370 - acc: 0.7133\n", 1320 | "Epoch 68/200\n", 1321 | "300/300 [==============================] - 0s 743us/sample - loss: 0.9306 - acc: 0.6833\n", 1322 | "Epoch 69/200\n", 1323 | "300/300 [==============================] - 0s 760us/sample - loss: 0.9285 - acc: 0.7067\n", 1324 | "Epoch 70/200\n", 1325 | "300/300 [==============================] - 0s 757us/sample - loss: 0.9156 - acc: 0.7067\n", 1326 | "Epoch 71/200\n", 1327 | "300/300 [==============================] - 0s 720us/sample - loss: 0.9052 - acc: 0.7067\n", 1328 | "Epoch 72/200\n", 1329 | "300/300 [==============================] - 0s 700us/sample - loss: 0.8976 - acc: 0.7133\n", 1330 | "Epoch 73/200\n", 1331 | "300/300 [==============================] - 0s 707us/sample - loss: 0.8893 - acc: 0.6967\n", 1332 | "Epoch 74/200\n", 1333 | "300/300 [==============================] - 0s 707us/sample - loss: 0.8772 - acc: 0.7300\n", 1334 | "Epoch 75/200\n", 1335 | "300/300 [==============================] - 0s 707us/sample - loss: 0.8722 - acc: 0.7200\n", 1336 | "Epoch 76/200\n", 1337 | "300/300 [==============================] - 0s 713us/sample - loss: 0.8607 - acc: 0.7400\n", 1338 | "Epoch 77/200\n", 1339 | "300/300 [==============================] - 0s 720us/sample - loss: 0.8570 - acc: 0.7033\n", 1340 | "Epoch 78/200\n", 1341 | "300/300 [==============================] - 0s 710us/sample - loss: 0.8403 - acc: 0.7367\n" 1342 | ] 1343 | }, 1344 | { 1345 | "name": "stdout", 1346 | "output_type": "stream", 1347 | "text": [ 1348 | "Epoch 79/200\n", 1349 | "300/300 [==============================] - 0s 693us/sample - loss: 0.8382 - acc: 0.7133\n", 1350 | "Epoch 80/200\n", 1351 | "300/300 [==============================] - 0s 673us/sample - loss: 0.8264 - acc: 0.7367\n", 1352 | "Epoch 81/200\n", 1353 | "300/300 [==============================] - 0s 670us/sample - loss: 0.8155 - acc: 0.7967\n", 1354 | "Epoch 82/200\n", 1355 | "300/300 [==============================] - 0s 687us/sample - loss: 0.8152 - acc: 0.6800\n", 1356 | "Epoch 83/200\n", 1357 | "300/300 [==============================] - 0s 677us/sample - loss: 0.8045 - acc: 0.7633\n", 1358 | "Epoch 84/200\n", 1359 | "300/300 [==============================] - 0s 683us/sample - loss: 0.7947 - acc: 0.7767\n", 1360 | "Epoch 85/200\n", 1361 | "300/300 [==============================] - 0s 687us/sample - loss: 0.7876 - acc: 0.7633\n", 1362 | "Epoch 86/200\n", 1363 | "300/300 [==============================] - 0s 677us/sample - loss: 0.7787 - acc: 0.7767\n", 1364 | "Epoch 87/200\n", 1365 | "300/300 [==============================] - 0s 687us/sample - loss: 0.7761 - acc: 0.7567\n", 1366 | "Epoch 88/200\n", 1367 | "300/300 [==============================] - 0s 710us/sample - loss: 0.7636 - acc: 0.7900\n", 1368 | "Epoch 89/200\n", 1369 | "300/300 [==============================] - 0s 747us/sample - loss: 0.7607 - acc: 0.7633\n", 1370 | "Epoch 90/200\n", 1371 | "300/300 [==============================] - 0s 727us/sample - loss: 0.7486 - acc: 0.8100\n", 1372 | "Epoch 91/200\n", 1373 | "300/300 [==============================] - 0s 683us/sample - loss: 0.7414 - acc: 0.8133\n", 1374 | "Epoch 92/200\n", 1375 | "300/300 [==============================] - 0s 960us/sample - loss: 0.7361 - acc: 0.7867\n", 1376 | "Epoch 93/200\n", 1377 | "300/300 [==============================] - 0s 870us/sample - loss: 0.7294 - acc: 0.8100\n", 1378 | "Epoch 94/200\n", 1379 | "300/300 [==============================] - 0s 710us/sample - loss: 0.7238 - acc: 0.7900\n", 1380 | "Epoch 95/200\n", 1381 | "300/300 [==============================] - 0s 653us/sample - loss: 0.7202 - acc: 0.8033\n", 1382 | "Epoch 96/200\n", 1383 | "300/300 [==============================] - 0s 683us/sample - loss: 0.7110 - acc: 0.8033\n", 1384 | "Epoch 97/200\n", 1385 | "300/300 [==============================] - 0s 667us/sample - loss: 0.7066 - acc: 0.8033\n", 1386 | "Epoch 98/200\n", 1387 | "300/300 [==============================] - 0s 670us/sample - loss: 0.7078 - acc: 0.8067\n", 1388 | "Epoch 99/200\n", 1389 | "300/300 [==============================] - 0s 693us/sample - loss: 0.6900 - acc: 0.7900\n", 1390 | "Epoch 100/200\n", 1391 | "300/300 [==============================] - 0s 680us/sample - loss: 0.6829 - acc: 0.8200\n", 1392 | "Epoch 101/200\n", 1393 | "300/300 [==============================] - 0s 657us/sample - loss: 0.6823 - acc: 0.8333\n", 1394 | "Epoch 102/200\n", 1395 | "300/300 [==============================] - 0s 677us/sample - loss: 0.6738 - acc: 0.8167\n", 1396 | "Epoch 103/200\n", 1397 | "300/300 [==============================] - 0s 690us/sample - loss: 0.6686 - acc: 0.8200\n", 1398 | "Epoch 104/200\n", 1399 | "300/300 [==============================] - 0s 683us/sample - loss: 0.6614 - acc: 0.8200\n", 1400 | "Epoch 105/200\n", 1401 | "300/300 [==============================] - 0s 673us/sample - loss: 0.6529 - acc: 0.8333\n", 1402 | "Epoch 106/200\n", 1403 | "300/300 [==============================] - 0s 713us/sample - loss: 0.6477 - acc: 0.8233\n", 1404 | "Epoch 107/200\n", 1405 | "300/300 [==============================] - 0s 697us/sample - loss: 0.6432 - acc: 0.8267\n", 1406 | "Epoch 108/200\n", 1407 | "300/300 [==============================] - 0s 687us/sample - loss: 0.6516 - acc: 0.8100\n", 1408 | "Epoch 109/200\n", 1409 | "300/300 [==============================] - 0s 680us/sample - loss: 0.6437 - acc: 0.8200\n", 1410 | "Epoch 110/200\n", 1411 | "300/300 [==============================] - 0s 777us/sample - loss: 0.6451 - acc: 0.8233\n", 1412 | "Epoch 111/200\n", 1413 | "300/300 [==============================] - 0s 677us/sample - loss: 0.6266 - acc: 0.8200\n", 1414 | "Epoch 112/200\n", 1415 | "300/300 [==============================] - 0s 677us/sample - loss: 0.6280 - acc: 0.8400\n", 1416 | "Epoch 113/200\n", 1417 | "300/300 [==============================] - 0s 773us/sample - loss: 0.6237 - acc: 0.8733\n", 1418 | "Epoch 114/200\n", 1419 | "300/300 [==============================] - 0s 760us/sample - loss: 0.6151 - acc: 0.8300\n", 1420 | "Epoch 115/200\n", 1421 | "300/300 [==============================] - 0s 700us/sample - loss: 0.6056 - acc: 0.8567\n", 1422 | "Epoch 116/200\n", 1423 | "300/300 [==============================] - 0s 683us/sample - loss: 0.6010 - acc: 0.8133\n", 1424 | "Epoch 117/200\n", 1425 | "300/300 [==============================] - 0s 693us/sample - loss: 0.5986 - acc: 0.8400\n", 1426 | "Epoch 118/200\n", 1427 | "300/300 [==============================] - 0s 693us/sample - loss: 0.5965 - acc: 0.8567\n", 1428 | "Epoch 119/200\n", 1429 | "300/300 [==============================] - 0s 727us/sample - loss: 0.5885 - acc: 0.8333\n", 1430 | "Epoch 120/200\n", 1431 | "300/300 [==============================] - 0s 697us/sample - loss: 0.5900 - acc: 0.8600\n", 1432 | "Epoch 121/200\n", 1433 | "300/300 [==============================] - 0s 1ms/sample - loss: 0.5776 - acc: 0.8233\n", 1434 | "Epoch 122/200\n", 1435 | "300/300 [==============================] - 0s 960us/sample - loss: 0.5768 - acc: 0.8300\n", 1436 | "Epoch 123/200\n", 1437 | "300/300 [==============================] - 0s 687us/sample - loss: 0.5768 - acc: 0.8500\n", 1438 | "Epoch 124/200\n", 1439 | "300/300 [==============================] - 0s 653us/sample - loss: 0.5685 - acc: 0.8467\n", 1440 | "Epoch 125/200\n", 1441 | "300/300 [==============================] - 0s 687us/sample - loss: 0.5613 - acc: 0.8467\n", 1442 | "Epoch 126/200\n", 1443 | "300/300 [==============================] - 0s 703us/sample - loss: 0.5596 - acc: 0.8733\n", 1444 | "Epoch 127/200\n", 1445 | "300/300 [==============================] - 0s 673us/sample - loss: 0.5601 - acc: 0.8367\n", 1446 | "Epoch 128/200\n", 1447 | "300/300 [==============================] - 0s 663us/sample - loss: 0.5544 - acc: 0.8467\n", 1448 | "Epoch 129/200\n", 1449 | "300/300 [==============================] - 0s 717us/sample - loss: 0.5487 - acc: 0.8400\n", 1450 | "Epoch 130/200\n", 1451 | "300/300 [==============================] - 0s 687us/sample - loss: 0.5455 - acc: 0.8567\n", 1452 | "Epoch 131/200\n", 1453 | "300/300 [==============================] - 0s 737us/sample - loss: 0.5450 - acc: 0.8500\n", 1454 | "Epoch 132/200\n", 1455 | "300/300 [==============================] - 0s 750us/sample - loss: 0.5430 - acc: 0.8767\n", 1456 | "Epoch 133/200\n", 1457 | "300/300 [==============================] - 0s 767us/sample - loss: 0.5427 - acc: 0.8500\n", 1458 | "Epoch 134/200\n", 1459 | "300/300 [==============================] - 0s 733us/sample - loss: 0.5318 - acc: 0.8400\n", 1460 | "Epoch 135/200\n", 1461 | "300/300 [==============================] - 0s 757us/sample - loss: 0.5241 - acc: 0.8433\n", 1462 | "Epoch 136/200\n", 1463 | "300/300 [==============================] - 0s 793us/sample - loss: 0.5345 - acc: 0.8400\n", 1464 | "Epoch 137/200\n", 1465 | "300/300 [==============================] - 0s 753us/sample - loss: 0.5300 - acc: 0.8567\n", 1466 | "Epoch 138/200\n", 1467 | "300/300 [==============================] - 0s 850us/sample - loss: 0.5159 - acc: 0.8900\n", 1468 | "Epoch 139/200\n", 1469 | "300/300 [==============================] - 0s 797us/sample - loss: 0.5129 - acc: 0.8700\n", 1470 | "Epoch 140/200\n", 1471 | "300/300 [==============================] - 0s 813us/sample - loss: 0.5122 - acc: 0.8533\n", 1472 | "Epoch 141/200\n", 1473 | "300/300 [==============================] - 0s 760us/sample - loss: 0.5115 - acc: 0.8433\n", 1474 | "Epoch 142/200\n", 1475 | "300/300 [==============================] - 0s 800us/sample - loss: 0.5037 - acc: 0.8567\n", 1476 | "Epoch 143/200\n", 1477 | "300/300 [==============================] - 0s 780us/sample - loss: 0.5032 - acc: 0.8833\n", 1478 | "Epoch 144/200\n", 1479 | "300/300 [==============================] - 0s 870us/sample - loss: 0.4989 - acc: 0.8767\n", 1480 | "Epoch 145/200\n", 1481 | "300/300 [==============================] - 0s 833us/sample - loss: 0.5017 - acc: 0.8567\n", 1482 | "Epoch 146/200\n", 1483 | "300/300 [==============================] - 0s 783us/sample - loss: 0.4925 - acc: 0.8700\n", 1484 | "Epoch 147/200\n", 1485 | "300/300 [==============================] - 0s 907us/sample - loss: 0.4912 - acc: 0.8767\n", 1486 | "Epoch 148/200\n", 1487 | "300/300 [==============================] - 0s 763us/sample - loss: 0.5006 - acc: 0.8833\n", 1488 | "Epoch 149/200\n", 1489 | "300/300 [==============================] - 0s 973us/sample - loss: 0.4848 - acc: 0.8733\n", 1490 | "Epoch 150/200\n", 1491 | "300/300 [==============================] - 0s 887us/sample - loss: 0.4839 - acc: 0.8733\n", 1492 | "Epoch 151/200\n", 1493 | "300/300 [==============================] - 0s 863us/sample - loss: 0.4978 - acc: 0.8400\n", 1494 | "Epoch 152/200\n", 1495 | "300/300 [==============================] - 0s 807us/sample - loss: 0.4833 - acc: 0.8600\n", 1496 | "Epoch 153/200\n", 1497 | "300/300 [==============================] - 0s 883us/sample - loss: 0.4767 - acc: 0.8733\n", 1498 | "Epoch 154/200\n", 1499 | "300/300 [==============================] - 0s 833us/sample - loss: 0.4808 - acc: 0.8633\n", 1500 | "Epoch 155/200\n", 1501 | "300/300 [==============================] - 0s 883us/sample - loss: 0.4681 - acc: 0.8767\n", 1502 | "Epoch 156/200\n", 1503 | "300/300 [==============================] - 0s 790us/sample - loss: 0.4647 - acc: 0.8767\n", 1504 | "Epoch 157/200\n", 1505 | "300/300 [==============================] - 0s 860us/sample - loss: 0.4641 - acc: 0.8800\n", 1506 | "Epoch 158/200\n", 1507 | "300/300 [==============================] - 0s 790us/sample - loss: 0.4762 - acc: 0.8533\n", 1508 | "Epoch 159/200\n", 1509 | "300/300 [==============================] - 0s 837us/sample - loss: 0.4705 - acc: 0.8933\n", 1510 | "Epoch 160/200\n", 1511 | "300/300 [==============================] - 0s 710us/sample - loss: 0.4605 - acc: 0.8600\n", 1512 | "Epoch 161/200\n", 1513 | "300/300 [==============================] - 0s 827us/sample - loss: 0.4587 - acc: 0.8633\n", 1514 | "Epoch 162/200\n", 1515 | "300/300 [==============================] - 0s 737us/sample - loss: 0.4557 - acc: 0.9000\n", 1516 | "Epoch 163/200\n", 1517 | "300/300 [==============================] - 0s 790us/sample - loss: 0.4530 - acc: 0.8667\n", 1518 | "Epoch 164/200\n", 1519 | "300/300 [==============================] - 0s 800us/sample - loss: 0.4564 - acc: 0.8733\n", 1520 | "Epoch 165/200\n", 1521 | "300/300 [==============================] - 0s 903us/sample - loss: 0.4485 - acc: 0.8967\n", 1522 | "Epoch 166/200\n", 1523 | "300/300 [==============================] - 0s 843us/sample - loss: 0.4439 - acc: 0.8800\n", 1524 | "Epoch 167/200\n", 1525 | "300/300 [==============================] - 0s 803us/sample - loss: 0.4387 - acc: 0.8967\n", 1526 | "Epoch 168/200\n", 1527 | "300/300 [==============================] - 0s 853us/sample - loss: 0.4356 - acc: 0.8667\n", 1528 | "Epoch 169/200\n", 1529 | "300/300 [==============================] - 0s 757us/sample - loss: 0.4329 - acc: 0.8967\n", 1530 | "Epoch 170/200\n", 1531 | "300/300 [==============================] - 0s 753us/sample - loss: 0.4379 - acc: 0.9100\n", 1532 | "Epoch 171/200\n", 1533 | "300/300 [==============================] - 0s 767us/sample - loss: 0.4323 - acc: 0.9000\n", 1534 | "Epoch 172/200\n", 1535 | "300/300 [==============================] - 0s 820us/sample - loss: 0.4297 - acc: 0.8833\n", 1536 | "Epoch 173/200\n", 1537 | "300/300 [==============================] - 0s 750us/sample - loss: 0.4279 - acc: 0.9033\n", 1538 | "Epoch 174/200\n", 1539 | "300/300 [==============================] - ETA: 0s - loss: 0.4212 - acc: 0.903 - 0s 830us/sample - loss: 0.4227 - acc: 0.9000\n", 1540 | "Epoch 175/200\n", 1541 | "300/300 [==============================] - 0s 773us/sample - loss: 0.4210 - acc: 0.8733\n", 1542 | "Epoch 176/200\n", 1543 | "300/300 [==============================] - 0s 1ms/sample - loss: 0.4281 - acc: 0.9067\n", 1544 | "Epoch 177/200\n", 1545 | "300/300 [==============================] - 0s 1ms/sample - loss: 0.4268 - acc: 0.9000\n", 1546 | "Epoch 178/200\n", 1547 | "300/300 [==============================] - 0s 733us/sample - loss: 0.4153 - acc: 0.8800\n", 1548 | "Epoch 179/200\n", 1549 | "300/300 [==============================] - 0s 727us/sample - loss: 0.4128 - acc: 0.9133\n", 1550 | "Epoch 180/200\n", 1551 | "300/300 [==============================] - 0s 797us/sample - loss: 0.4214 - acc: 0.8800\n", 1552 | "Epoch 181/200\n", 1553 | "300/300 [==============================] - 0s 760us/sample - loss: 0.4127 - acc: 0.8933\n", 1554 | "Epoch 182/200\n", 1555 | "300/300 [==============================] - 0s 727us/sample - loss: 0.4128 - acc: 0.8833\n", 1556 | "Epoch 183/200\n", 1557 | "300/300 [==============================] - 0s 727us/sample - loss: 0.4105 - acc: 0.9100\n", 1558 | "Epoch 184/200\n", 1559 | "300/300 [==============================] - 0s 710us/sample - loss: 0.4023 - acc: 0.9133\n", 1560 | "Epoch 185/200\n", 1561 | "300/300 [==============================] - 0s 817us/sample - loss: 0.4033 - acc: 0.9067\n", 1562 | "Epoch 186/200\n", 1563 | "300/300 [==============================] - 0s 783us/sample - loss: 0.4001 - acc: 0.9100\n", 1564 | "Epoch 187/200\n", 1565 | "300/300 [==============================] - 0s 873us/sample - loss: 0.4017 - acc: 0.9000\n", 1566 | "Epoch 188/200\n", 1567 | "300/300 [==============================] - 0s 710us/sample - loss: 0.3992 - acc: 0.8867\n", 1568 | "Epoch 189/200\n", 1569 | "300/300 [==============================] - 0s 787us/sample - loss: 0.3942 - acc: 0.9067\n", 1570 | "Epoch 190/200\n", 1571 | "300/300 [==============================] - 0s 813us/sample - loss: 0.3926 - acc: 0.8967\n", 1572 | "Epoch 191/200\n", 1573 | "300/300 [==============================] - 0s 790us/sample - loss: 0.3910 - acc: 0.8867\n", 1574 | "Epoch 192/200\n", 1575 | "300/300 [==============================] - 0s 773us/sample - loss: 0.3890 - acc: 0.8933\n", 1576 | "Epoch 193/200\n", 1577 | "300/300 [==============================] - 0s 753us/sample - loss: 0.3879 - acc: 0.9133\n", 1578 | "Epoch 194/200\n", 1579 | "300/300 [==============================] - 0s 760us/sample - loss: 0.3848 - acc: 0.9200\n", 1580 | "Epoch 195/200\n", 1581 | "300/300 [==============================] - 0s 770us/sample - loss: 0.3821 - acc: 0.8933\n", 1582 | "Epoch 196/200\n", 1583 | "300/300 [==============================] - 0s 760us/sample - loss: 0.3822 - acc: 0.9233\n", 1584 | "Epoch 197/200\n", 1585 | "300/300 [==============================] - 0s 843us/sample - loss: 0.3787 - acc: 0.9133s - loss: 0.2856 - acc: 0.\n", 1586 | "Epoch 198/200\n", 1587 | "300/300 [==============================] - 0s 830us/sample - loss: 0.3797 - acc: 0.9267\n", 1588 | "Epoch 199/200\n", 1589 | "300/300 [==============================] - 0s 740us/sample - loss: 0.3819 - acc: 0.9133\n", 1590 | "Epoch 200/200\n", 1591 | "300/300 [==============================] - 0s 813us/sample - loss: 0.3741 - acc: 0.8933\n", 1592 | "Model Accuracy: 91.33%\n" 1593 | ] 1594 | } 1595 | ], 1596 | "source": [ 1597 | "model.fit(X, y, epochs=200, batch_size=10, verbose=1)\n", 1598 | "# summarize performance of the model\n", 1599 | "scores = model.evaluate(X, y, verbose=0)\n", 1600 | "print('Model Accuracy: %.2f%%' % (scores[1]*100))" 1601 | ] 1602 | }, 1603 | { 1604 | "cell_type": "code", 1605 | "execution_count": 53, 1606 | "metadata": {}, 1607 | "outputs": [ 1608 | { 1609 | "name": "stdout", 1610 | "output_type": "stream", 1611 | "text": [ 1612 | "['pail', 'of'] -> water\n", 1613 | "['a', 'pail'] -> of\n", 1614 | "['fetch', 'a', 'pail', 'of'] -> water\n", 1615 | "['hill'] -> fetch\n", 1616 | "['pail'] -> of\n", 1617 | "['jack', 'and'] -> jill\n", 1618 | "['a', 'pail'] -> of\n", 1619 | "['and', 'jill', 'went', 'up'] -> the\n", 1620 | "['and', 'jill', 'went'] -> up\n", 1621 | "['went', 'up'] -> the\n", 1622 | "['pail'] -> of\n", 1623 | "['the', 'hill', 'to', 'fetch', 'a'] -> pail\n", 1624 | "['jill', 'went', 'up'] -> the\n", 1625 | "['fetch', 'a'] -> pail\n", 1626 | "['fetch', 'a', 'pail'] -> of\n", 1627 | "['pail'] -> of\n", 1628 | "['a', 'pail', 'of'] -> water\n", 1629 | "['jill', 'went'] -> up\n", 1630 | "['pail'] -> of\n", 1631 | "['jill', 'went', 'up'] -> the\n" 1632 | ] 1633 | } 1634 | ], 1635 | "source": [ 1636 | "for i in range(20):\n", 1637 | " \n", 1638 | " pattern_index = np.random.randint(len(dataX))\n", 1639 | " pattern = dataX[pattern_index]\n", 1640 | " \n", 1641 | " x = pad_sequences([pattern], maxlen=max_len, dtype='float32')\n", 1642 | " x= np.reshape(x,(1, max_len , 1))\n", 1643 | " x= x/float(len(poem))\n", 1644 | " prediction = model.predict(x, verbose=0)\n", 1645 | " index= np.argmax(prediction)\n", 1646 | " result = int_to_word[index]\n", 1647 | " seq_in = [int_to_word[value] for value in pattern]\n", 1648 | " print(seq_in , '->', result)" 1649 | ] 1650 | }, 1651 | { 1652 | "cell_type": "raw", 1653 | "metadata": {}, 1654 | "source": [ 1655 | "# this model is trained on the above given lines and now if we provide any single word we can get \n", 1656 | "the next word in sequence." 1657 | ] 1658 | }, 1659 | { 1660 | "cell_type": "code", 1661 | "execution_count": null, 1662 | "metadata": {}, 1663 | "outputs": [], 1664 | "source": [] 1665 | } 1666 | ], 1667 | "metadata": { 1668 | "kernelspec": { 1669 | "display_name": "Python 3", 1670 | "language": "python", 1671 | "name": "python3" 1672 | }, 1673 | "language_info": { 1674 | "codemirror_mode": { 1675 | "name": "ipython", 1676 | "version": 3 1677 | }, 1678 | "file_extension": ".py", 1679 | "mimetype": "text/x-python", 1680 | "name": "python", 1681 | "nbconvert_exporter": "python", 1682 | "pygments_lexer": "ipython3", 1683 | "version": "3.7.3" 1684 | } 1685 | }, 1686 | "nbformat": 4, 1687 | "nbformat_minor": 2 1688 | } 1689 | -------------------------------------------------------------------------------- /sonar.all-data: -------------------------------------------------------------------------------- 1 | 0.0200,0.0371,0.0428,0.0207,0.0954,0.0986,0.1539,0.1601,0.3109,0.2111,0.1609,0.1582,0.2238,0.0645,0.0660,0.2273,0.3100,0.2999,0.5078,0.4797,0.5783,0.5071,0.4328,0.5550,0.6711,0.6415,0.7104,0.8080,0.6791,0.3857,0.1307,0.2604,0.5121,0.7547,0.8537,0.8507,0.6692,0.6097,0.4943,0.2744,0.0510,0.2834,0.2825,0.4256,0.2641,0.1386,0.1051,0.1343,0.0383,0.0324,0.0232,0.0027,0.0065,0.0159,0.0072,0.0167,0.0180,0.0084,0.0090,0.0032,R 2 | 0.0453,0.0523,0.0843,0.0689,0.1183,0.2583,0.2156,0.3481,0.3337,0.2872,0.4918,0.6552,0.6919,0.7797,0.7464,0.9444,1.0000,0.8874,0.8024,0.7818,0.5212,0.4052,0.3957,0.3914,0.3250,0.3200,0.3271,0.2767,0.4423,0.2028,0.3788,0.2947,0.1984,0.2341,0.1306,0.4182,0.3835,0.1057,0.1840,0.1970,0.1674,0.0583,0.1401,0.1628,0.0621,0.0203,0.0530,0.0742,0.0409,0.0061,0.0125,0.0084,0.0089,0.0048,0.0094,0.0191,0.0140,0.0049,0.0052,0.0044,R 3 | 0.0262,0.0582,0.1099,0.1083,0.0974,0.2280,0.2431,0.3771,0.5598,0.6194,0.6333,0.7060,0.5544,0.5320,0.6479,0.6931,0.6759,0.7551,0.8929,0.8619,0.7974,0.6737,0.4293,0.3648,0.5331,0.2413,0.5070,0.8533,0.6036,0.8514,0.8512,0.5045,0.1862,0.2709,0.4232,0.3043,0.6116,0.6756,0.5375,0.4719,0.4647,0.2587,0.2129,0.2222,0.2111,0.0176,0.1348,0.0744,0.0130,0.0106,0.0033,0.0232,0.0166,0.0095,0.0180,0.0244,0.0316,0.0164,0.0095,0.0078,R 4 | 0.0100,0.0171,0.0623,0.0205,0.0205,0.0368,0.1098,0.1276,0.0598,0.1264,0.0881,0.1992,0.0184,0.2261,0.1729,0.2131,0.0693,0.2281,0.4060,0.3973,0.2741,0.3690,0.5556,0.4846,0.3140,0.5334,0.5256,0.2520,0.2090,0.3559,0.6260,0.7340,0.6120,0.3497,0.3953,0.3012,0.5408,0.8814,0.9857,0.9167,0.6121,0.5006,0.3210,0.3202,0.4295,0.3654,0.2655,0.1576,0.0681,0.0294,0.0241,0.0121,0.0036,0.0150,0.0085,0.0073,0.0050,0.0044,0.0040,0.0117,R 5 | 0.0762,0.0666,0.0481,0.0394,0.0590,0.0649,0.1209,0.2467,0.3564,0.4459,0.4152,0.3952,0.4256,0.4135,0.4528,0.5326,0.7306,0.6193,0.2032,0.4636,0.4148,0.4292,0.5730,0.5399,0.3161,0.2285,0.6995,1.0000,0.7262,0.4724,0.5103,0.5459,0.2881,0.0981,0.1951,0.4181,0.4604,0.3217,0.2828,0.2430,0.1979,0.2444,0.1847,0.0841,0.0692,0.0528,0.0357,0.0085,0.0230,0.0046,0.0156,0.0031,0.0054,0.0105,0.0110,0.0015,0.0072,0.0048,0.0107,0.0094,R 6 | 0.0286,0.0453,0.0277,0.0174,0.0384,0.0990,0.1201,0.1833,0.2105,0.3039,0.2988,0.4250,0.6343,0.8198,1.0000,0.9988,0.9508,0.9025,0.7234,0.5122,0.2074,0.3985,0.5890,0.2872,0.2043,0.5782,0.5389,0.3750,0.3411,0.5067,0.5580,0.4778,0.3299,0.2198,0.1407,0.2856,0.3807,0.4158,0.4054,0.3296,0.2707,0.2650,0.0723,0.1238,0.1192,0.1089,0.0623,0.0494,0.0264,0.0081,0.0104,0.0045,0.0014,0.0038,0.0013,0.0089,0.0057,0.0027,0.0051,0.0062,R 7 | 0.0317,0.0956,0.1321,0.1408,0.1674,0.1710,0.0731,0.1401,0.2083,0.3513,0.1786,0.0658,0.0513,0.3752,0.5419,0.5440,0.5150,0.4262,0.2024,0.4233,0.7723,0.9735,0.9390,0.5559,0.5268,0.6826,0.5713,0.5429,0.2177,0.2149,0.5811,0.6323,0.2965,0.1873,0.2969,0.5163,0.6153,0.4283,0.5479,0.6133,0.5017,0.2377,0.1957,0.1749,0.1304,0.0597,0.1124,0.1047,0.0507,0.0159,0.0195,0.0201,0.0248,0.0131,0.0070,0.0138,0.0092,0.0143,0.0036,0.0103,R 8 | 0.0519,0.0548,0.0842,0.0319,0.1158,0.0922,0.1027,0.0613,0.1465,0.2838,0.2802,0.3086,0.2657,0.3801,0.5626,0.4376,0.2617,0.1199,0.6676,0.9402,0.7832,0.5352,0.6809,0.9174,0.7613,0.8220,0.8872,0.6091,0.2967,0.1103,0.1318,0.0624,0.0990,0.4006,0.3666,0.1050,0.1915,0.3930,0.4288,0.2546,0.1151,0.2196,0.1879,0.1437,0.2146,0.2360,0.1125,0.0254,0.0285,0.0178,0.0052,0.0081,0.0120,0.0045,0.0121,0.0097,0.0085,0.0047,0.0048,0.0053,R 9 | 0.0223,0.0375,0.0484,0.0475,0.0647,0.0591,0.0753,0.0098,0.0684,0.1487,0.1156,0.1654,0.3833,0.3598,0.1713,0.1136,0.0349,0.3796,0.7401,0.9925,0.9802,0.8890,0.6712,0.4286,0.3374,0.7366,0.9611,0.7353,0.4856,0.1594,0.3007,0.4096,0.3170,0.3305,0.3408,0.2186,0.2463,0.2726,0.1680,0.2792,0.2558,0.1740,0.2121,0.1099,0.0985,0.1271,0.1459,0.1164,0.0777,0.0439,0.0061,0.0145,0.0128,0.0145,0.0058,0.0049,0.0065,0.0093,0.0059,0.0022,R 10 | 0.0164,0.0173,0.0347,0.0070,0.0187,0.0671,0.1056,0.0697,0.0962,0.0251,0.0801,0.1056,0.1266,0.0890,0.0198,0.1133,0.2826,0.3234,0.3238,0.4333,0.6068,0.7652,0.9203,0.9719,0.9207,0.7545,0.8289,0.8907,0.7309,0.6896,0.5829,0.4935,0.3101,0.0306,0.0244,0.1108,0.1594,0.1371,0.0696,0.0452,0.0620,0.1421,0.1597,0.1384,0.0372,0.0688,0.0867,0.0513,0.0092,0.0198,0.0118,0.0090,0.0223,0.0179,0.0084,0.0068,0.0032,0.0035,0.0056,0.0040,R 11 | 0.0039,0.0063,0.0152,0.0336,0.0310,0.0284,0.0396,0.0272,0.0323,0.0452,0.0492,0.0996,0.1424,0.1194,0.0628,0.0907,0.1177,0.1429,0.1223,0.1104,0.1847,0.3715,0.4382,0.5707,0.6654,0.7476,0.7654,0.8555,0.9720,0.9221,0.7502,0.7209,0.7757,0.6055,0.5021,0.4499,0.3947,0.4281,0.4427,0.3749,0.1972,0.0511,0.0793,0.1269,0.1533,0.0690,0.0402,0.0534,0.0228,0.0073,0.0062,0.0062,0.0120,0.0052,0.0056,0.0093,0.0042,0.0003,0.0053,0.0036,R 12 | 0.0123,0.0309,0.0169,0.0313,0.0358,0.0102,0.0182,0.0579,0.1122,0.0835,0.0548,0.0847,0.2026,0.2557,0.1870,0.2032,0.1463,0.2849,0.5824,0.7728,0.7852,0.8515,0.5312,0.3653,0.5973,0.8275,1.0000,0.8673,0.6301,0.4591,0.3940,0.2576,0.2817,0.2641,0.2757,0.2698,0.3994,0.4576,0.3940,0.2522,0.1782,0.1354,0.0516,0.0337,0.0894,0.0861,0.0872,0.0445,0.0134,0.0217,0.0188,0.0133,0.0265,0.0224,0.0074,0.0118,0.0026,0.0092,0.0009,0.0044,R 13 | 0.0079,0.0086,0.0055,0.0250,0.0344,0.0546,0.0528,0.0958,0.1009,0.1240,0.1097,0.1215,0.1874,0.3383,0.3227,0.2723,0.3943,0.6432,0.7271,0.8673,0.9674,0.9847,0.9480,0.8036,0.6833,0.5136,0.3090,0.0832,0.4019,0.2344,0.1905,0.1235,0.1717,0.2351,0.2489,0.3649,0.3382,0.1589,0.0989,0.1089,0.1043,0.0839,0.1391,0.0819,0.0678,0.0663,0.1202,0.0692,0.0152,0.0266,0.0174,0.0176,0.0127,0.0088,0.0098,0.0019,0.0059,0.0058,0.0059,0.0032,R 14 | 0.0090,0.0062,0.0253,0.0489,0.1197,0.1589,0.1392,0.0987,0.0955,0.1895,0.1896,0.2547,0.4073,0.2988,0.2901,0.5326,0.4022,0.1571,0.3024,0.3907,0.3542,0.4438,0.6414,0.4601,0.6009,0.8690,0.8345,0.7669,0.5081,0.4620,0.5380,0.5375,0.3844,0.3601,0.7402,0.7761,0.3858,0.0667,0.3684,0.6114,0.3510,0.2312,0.2195,0.3051,0.1937,0.1570,0.0479,0.0538,0.0146,0.0068,0.0187,0.0059,0.0095,0.0194,0.0080,0.0152,0.0158,0.0053,0.0189,0.0102,R 15 | 0.0124,0.0433,0.0604,0.0449,0.0597,0.0355,0.0531,0.0343,0.1052,0.2120,0.1640,0.1901,0.3026,0.2019,0.0592,0.2390,0.3657,0.3809,0.5929,0.6299,0.5801,0.4574,0.4449,0.3691,0.6446,0.8940,0.8978,0.4980,0.3333,0.2350,0.1553,0.3666,0.4340,0.3082,0.3024,0.4109,0.5501,0.4129,0.5499,0.5018,0.3132,0.2802,0.2351,0.2298,0.1155,0.0724,0.0621,0.0318,0.0450,0.0167,0.0078,0.0083,0.0057,0.0174,0.0188,0.0054,0.0114,0.0196,0.0147,0.0062,R 16 | 0.0298,0.0615,0.0650,0.0921,0.1615,0.2294,0.2176,0.2033,0.1459,0.0852,0.2476,0.3645,0.2777,0.2826,0.3237,0.4335,0.5638,0.4555,0.4348,0.6433,0.3932,0.1989,0.3540,0.9165,0.9371,0.4620,0.2771,0.6613,0.8028,0.4200,0.5192,0.6962,0.5792,0.8889,0.7863,0.7133,0.7615,0.4401,0.3009,0.3163,0.2809,0.2898,0.0526,0.1867,0.1553,0.1633,0.1252,0.0748,0.0452,0.0064,0.0154,0.0031,0.0153,0.0071,0.0212,0.0076,0.0152,0.0049,0.0200,0.0073,R 17 | 0.0352,0.0116,0.0191,0.0469,0.0737,0.1185,0.1683,0.1541,0.1466,0.2912,0.2328,0.2237,0.2470,0.1560,0.3491,0.3308,0.2299,0.2203,0.2493,0.4128,0.3158,0.6191,0.5854,0.3395,0.2561,0.5599,0.8145,0.6941,0.6985,0.8660,0.5930,0.3664,0.6750,0.8697,0.7837,0.7552,0.5789,0.4713,0.1252,0.6087,0.7322,0.5977,0.3431,0.1803,0.2378,0.3424,0.2303,0.0689,0.0216,0.0469,0.0426,0.0346,0.0158,0.0154,0.0109,0.0048,0.0095,0.0015,0.0073,0.0067,R 18 | 0.0192,0.0607,0.0378,0.0774,0.1388,0.0809,0.0568,0.0219,0.1037,0.1186,0.1237,0.1601,0.3520,0.4479,0.3769,0.5761,0.6426,0.6790,0.7157,0.5466,0.5399,0.6362,0.7849,0.7756,0.5780,0.4862,0.4181,0.2457,0.0716,0.0613,0.1816,0.4493,0.5976,0.3785,0.2495,0.5771,0.8852,0.8409,0.3570,0.3133,0.6096,0.6378,0.2709,0.1419,0.1260,0.1288,0.0790,0.0829,0.0520,0.0216,0.0360,0.0331,0.0131,0.0120,0.0108,0.0024,0.0045,0.0037,0.0112,0.0075,R 19 | 0.0270,0.0092,0.0145,0.0278,0.0412,0.0757,0.1026,0.1138,0.0794,0.1520,0.1675,0.1370,0.1361,0.1345,0.2144,0.5354,0.6830,0.5600,0.3093,0.3226,0.4430,0.5573,0.5782,0.6173,0.8132,0.9819,0.9823,0.9166,0.7423,0.7736,0.8473,0.7352,0.6671,0.6083,0.6239,0.5972,0.5715,0.5242,0.2924,0.1536,0.2003,0.2031,0.2207,0.1778,0.1353,0.1373,0.0749,0.0472,0.0325,0.0179,0.0045,0.0084,0.0010,0.0018,0.0068,0.0039,0.0120,0.0132,0.0070,0.0088,R 20 | 0.0126,0.0149,0.0641,0.1732,0.2565,0.2559,0.2947,0.4110,0.4983,0.5920,0.5832,0.5419,0.5472,0.5314,0.4981,0.6985,0.8292,0.7839,0.8215,0.9363,1.0000,0.9224,0.7839,0.5470,0.4562,0.5922,0.5448,0.3971,0.0882,0.2385,0.2005,0.0587,0.2544,0.2009,0.0329,0.1547,0.1212,0.2446,0.3171,0.3195,0.3051,0.0836,0.1266,0.1381,0.1136,0.0516,0.0073,0.0278,0.0372,0.0121,0.0153,0.0092,0.0035,0.0098,0.0121,0.0006,0.0181,0.0094,0.0116,0.0063,R 21 | 0.0473,0.0509,0.0819,0.1252,0.1783,0.3070,0.3008,0.2362,0.3830,0.3759,0.3021,0.2909,0.2301,0.1411,0.1582,0.2430,0.4474,0.5964,0.6744,0.7969,0.8319,0.7813,0.8626,0.7369,0.4122,0.2596,0.3392,0.3788,0.4488,0.6281,0.7449,0.7328,0.7704,0.7870,0.6048,0.5860,0.6385,0.7279,0.6286,0.5316,0.4069,0.1791,0.1625,0.2527,0.1903,0.1643,0.0604,0.0209,0.0436,0.0175,0.0107,0.0193,0.0118,0.0064,0.0042,0.0054,0.0049,0.0082,0.0028,0.0027,R 22 | 0.0664,0.0575,0.0842,0.0372,0.0458,0.0771,0.0771,0.1130,0.2353,0.1838,0.2869,0.4129,0.3647,0.1984,0.2840,0.4039,0.5837,0.6792,0.6086,0.4858,0.3246,0.2013,0.2082,0.1686,0.2484,0.2736,0.2984,0.4655,0.6990,0.7474,0.7956,0.7981,0.6715,0.6942,0.7440,0.8169,0.8912,1.0000,0.8753,0.7061,0.6803,0.5898,0.4618,0.3639,0.1492,0.1216,0.1306,0.1198,0.0578,0.0235,0.0135,0.0141,0.0190,0.0043,0.0036,0.0026,0.0024,0.0162,0.0109,0.0079,R 23 | 0.0099,0.0484,0.0299,0.0297,0.0652,0.1077,0.2363,0.2385,0.0075,0.1882,0.1456,0.1892,0.3176,0.1340,0.2169,0.2458,0.2589,0.2786,0.2298,0.0656,0.1441,0.1179,0.1668,0.1783,0.2476,0.2570,0.1036,0.5356,0.7124,0.6291,0.4756,0.6015,0.7208,0.6234,0.5725,0.7523,0.8712,0.9252,0.9709,0.9297,0.8995,0.7911,0.5600,0.2838,0.4407,0.5507,0.4331,0.2905,0.1981,0.0779,0.0396,0.0173,0.0149,0.0115,0.0202,0.0139,0.0029,0.0160,0.0106,0.0134,R 24 | 0.0115,0.0150,0.0136,0.0076,0.0211,0.1058,0.1023,0.0440,0.0931,0.0734,0.0740,0.0622,0.1055,0.1183,0.1721,0.2584,0.3232,0.3817,0.4243,0.4217,0.4449,0.4075,0.3306,0.4012,0.4466,0.5218,0.7552,0.9503,1.0000,0.9084,0.8283,0.7571,0.7262,0.6152,0.5680,0.5757,0.5324,0.3672,0.1669,0.0866,0.0646,0.1891,0.2683,0.2887,0.2341,0.1668,0.1015,0.1195,0.0704,0.0167,0.0107,0.0091,0.0016,0.0084,0.0064,0.0026,0.0029,0.0037,0.0070,0.0041,R 25 | 0.0293,0.0644,0.0390,0.0173,0.0476,0.0816,0.0993,0.0315,0.0736,0.0860,0.0414,0.0472,0.0835,0.0938,0.1466,0.0809,0.1179,0.2179,0.3326,0.3258,0.2111,0.2302,0.3361,0.4259,0.4609,0.2606,0.0874,0.2862,0.5606,0.8344,0.8096,0.7250,0.8048,0.9435,1.0000,0.8960,0.5516,0.3037,0.2338,0.2382,0.3318,0.3821,0.1575,0.2228,0.1582,0.1433,0.1634,0.1133,0.0567,0.0133,0.0170,0.0035,0.0052,0.0083,0.0078,0.0075,0.0105,0.0160,0.0095,0.0011,R 26 | 0.0201,0.0026,0.0138,0.0062,0.0133,0.0151,0.0541,0.0210,0.0505,0.1097,0.0841,0.0942,0.1204,0.0420,0.0031,0.0162,0.0624,0.2127,0.3436,0.3813,0.3825,0.4764,0.6313,0.7523,0.8675,0.8788,0.7901,0.8357,0.9631,0.9619,0.9236,0.8903,0.9708,0.9647,0.7892,0.5307,0.2718,0.1953,0.1374,0.3105,0.3790,0.4105,0.3355,0.2998,0.2748,0.2024,0.1043,0.0453,0.0337,0.0122,0.0072,0.0108,0.0070,0.0063,0.0030,0.0011,0.0007,0.0024,0.0057,0.0044,R 27 | 0.0151,0.0320,0.0599,0.1050,0.1163,0.1734,0.1679,0.1119,0.0889,0.1205,0.0847,0.1518,0.2305,0.2793,0.3404,0.4527,0.6950,0.8807,0.9154,0.7542,0.6736,0.7146,0.8335,0.7701,0.6993,0.6543,0.5040,0.4926,0.4992,0.4161,0.1631,0.0404,0.0637,0.2962,0.3609,0.1866,0.0476,0.1497,0.2405,0.1980,0.3175,0.2379,0.1716,0.1559,0.1556,0.0422,0.0493,0.0476,0.0219,0.0059,0.0086,0.0061,0.0015,0.0084,0.0128,0.0054,0.0011,0.0019,0.0023,0.0062,R 28 | 0.0177,0.0300,0.0288,0.0394,0.0630,0.0526,0.0688,0.0633,0.0624,0.0613,0.1680,0.3476,0.4561,0.5188,0.6308,0.7201,0.5153,0.3818,0.2644,0.3345,0.4865,0.6628,0.7389,0.9213,1.0000,0.7750,0.5593,0.6172,0.8635,0.6592,0.4770,0.4983,0.3330,0.3076,0.2876,0.2226,0.0794,0.0603,0.1049,0.0606,0.1530,0.0983,0.1643,0.1901,0.1107,0.1917,0.1467,0.0392,0.0356,0.0270,0.0168,0.0102,0.0122,0.0044,0.0075,0.0124,0.0099,0.0057,0.0032,0.0019,R 29 | 0.0100,0.0275,0.0190,0.0371,0.0416,0.0201,0.0314,0.0651,0.1896,0.2668,0.3376,0.3282,0.2432,0.1268,0.1278,0.4441,0.6795,0.7051,0.7966,0.9401,0.9857,0.8193,0.5789,0.6394,0.7043,0.6875,0.4081,0.1811,0.2064,0.3917,0.3791,0.2042,0.2227,0.3341,0.3984,0.5077,0.5534,0.3352,0.2723,0.2278,0.2044,0.1986,0.0835,0.0908,0.1380,0.1948,0.1211,0.0843,0.0589,0.0247,0.0118,0.0088,0.0104,0.0036,0.0088,0.0047,0.0117,0.0020,0.0091,0.0058,R 30 | 0.0189,0.0308,0.0197,0.0622,0.0080,0.0789,0.1440,0.1451,0.1789,0.2522,0.2607,0.3710,0.3906,0.2672,0.2716,0.4183,0.6988,0.5733,0.2226,0.2631,0.7473,0.7263,0.3393,0.2824,0.6053,0.5897,0.4967,0.8616,0.8339,0.4084,0.2268,0.1745,0.0507,0.1588,0.3040,0.1369,0.1605,0.2061,0.0734,0.0202,0.1638,0.1583,0.1830,0.1886,0.1008,0.0663,0.0183,0.0404,0.0108,0.0143,0.0091,0.0038,0.0096,0.0142,0.0190,0.0140,0.0099,0.0092,0.0052,0.0075,R 31 | 0.0240,0.0218,0.0324,0.0569,0.0330,0.0513,0.0897,0.0713,0.0569,0.0389,0.1934,0.2434,0.2906,0.2606,0.3811,0.4997,0.3015,0.3655,0.6791,0.7307,0.5053,0.4441,0.6987,0.8133,0.7781,0.8943,0.8929,0.8913,0.8610,0.8063,0.5540,0.2446,0.3459,0.1615,0.2467,0.5564,0.4681,0.0979,0.1582,0.0751,0.3321,0.3745,0.2666,0.1078,0.1418,0.1687,0.0738,0.0634,0.0144,0.0226,0.0061,0.0162,0.0146,0.0093,0.0112,0.0094,0.0054,0.0019,0.0066,0.0023,R 32 | 0.0084,0.0153,0.0291,0.0432,0.0951,0.0752,0.0414,0.0259,0.0692,0.1753,0.1970,0.1167,0.1683,0.0814,0.2179,0.5121,0.7231,0.7776,0.6222,0.3501,0.3733,0.2622,0.3776,0.7361,0.8673,0.8223,0.7772,0.7862,0.5652,0.3635,0.3534,0.3865,0.3370,0.1693,0.2627,0.3195,0.1388,0.1048,0.1681,0.1910,0.1174,0.0933,0.0856,0.0951,0.0986,0.0956,0.0426,0.0407,0.0106,0.0179,0.0056,0.0236,0.0114,0.0136,0.0117,0.0060,0.0058,0.0031,0.0072,0.0045,R 33 | 0.0195,0.0213,0.0058,0.0190,0.0319,0.0571,0.1004,0.0668,0.0691,0.0242,0.0728,0.0639,0.3002,0.3854,0.4767,0.4602,0.3175,0.4160,0.6428,1.0000,0.8631,0.5212,0.3156,0.5952,0.7732,0.6042,0.4375,0.5487,0.4720,0.6235,0.3851,0.1590,0.3891,0.5294,0.3504,0.4480,0.4041,0.5031,0.6475,0.5493,0.3548,0.2028,0.1882,0.0845,0.1315,0.1590,0.0562,0.0617,0.0343,0.0370,0.0261,0.0157,0.0074,0.0271,0.0203,0.0089,0.0095,0.0095,0.0021,0.0053,R 34 | 0.0442,0.0477,0.0049,0.0581,0.0278,0.0678,0.1664,0.1490,0.0974,0.1268,0.1109,0.2375,0.2007,0.2140,0.1109,0.2036,0.2468,0.6682,0.8345,0.8252,0.8017,0.8982,0.9664,0.8515,0.6626,0.3241,0.2054,0.5669,0.5726,0.4877,0.7532,0.7600,0.5185,0.4120,0.5560,0.5569,0.1336,0.3831,0.4611,0.4330,0.2556,0.1466,0.3489,0.2659,0.0944,0.1370,0.1344,0.0416,0.0719,0.0637,0.0210,0.0204,0.0216,0.0135,0.0055,0.0073,0.0080,0.0105,0.0059,0.0105,R 35 | 0.0311,0.0491,0.0692,0.0831,0.0079,0.0200,0.0981,0.1016,0.2025,0.0767,0.1767,0.2555,0.2812,0.2722,0.3227,0.3463,0.5395,0.7911,0.9064,0.8701,0.7672,0.2957,0.4148,0.6043,0.3178,0.3482,0.6158,0.8049,0.6289,0.4999,0.5830,0.6660,0.4124,0.1260,0.2487,0.4676,0.5382,0.3150,0.2139,0.1848,0.1679,0.2328,0.1015,0.0713,0.0615,0.0779,0.0761,0.0845,0.0592,0.0068,0.0089,0.0087,0.0032,0.0130,0.0188,0.0101,0.0229,0.0182,0.0046,0.0038,R 36 | 0.0206,0.0132,0.0533,0.0569,0.0647,0.1432,0.1344,0.2041,0.1571,0.1573,0.2327,0.1785,0.1507,0.1916,0.2061,0.2307,0.2360,0.1299,0.3812,0.5858,0.4497,0.4876,1.0000,0.8675,0.4718,0.5341,0.6197,0.7143,0.5605,0.3728,0.2481,0.1921,0.1386,0.3325,0.2883,0.3228,0.2607,0.2040,0.2396,0.1319,0.0683,0.0334,0.0716,0.0976,0.0787,0.0522,0.0500,0.0231,0.0221,0.0144,0.0307,0.0386,0.0147,0.0018,0.0100,0.0096,0.0077,0.0180,0.0109,0.0070,R 37 | 0.0094,0.0166,0.0398,0.0359,0.0681,0.0706,0.1020,0.0893,0.0381,0.1328,0.1303,0.0273,0.0644,0.0712,0.1204,0.0717,0.1224,0.2349,0.3684,0.3918,0.4925,0.8793,0.9606,0.8786,0.6905,0.6937,0.5674,0.6540,0.7802,0.7575,0.5836,0.6316,0.8108,0.9039,0.8647,0.6695,0.4027,0.2370,0.2685,0.3662,0.3267,0.2200,0.2996,0.2205,0.1163,0.0635,0.0465,0.0422,0.0174,0.0172,0.0134,0.0141,0.0191,0.0145,0.0065,0.0129,0.0217,0.0087,0.0077,0.0122,R 38 | 0.0333,0.0221,0.0270,0.0481,0.0679,0.0981,0.0843,0.1172,0.0759,0.0920,0.1475,0.0522,0.1119,0.0970,0.1174,0.1678,0.1642,0.1205,0.0494,0.1544,0.3485,0.6146,0.9146,0.9364,0.8677,0.8772,0.8553,0.8833,1.0000,0.8296,0.6601,0.5499,0.5716,0.6859,0.6825,0.5142,0.2750,0.1358,0.1551,0.2646,0.1994,0.1883,0.2746,0.1651,0.0575,0.0695,0.0598,0.0456,0.0021,0.0068,0.0036,0.0022,0.0032,0.0060,0.0054,0.0063,0.0143,0.0132,0.0051,0.0041,R 39 | 0.0123,0.0022,0.0196,0.0206,0.0180,0.0492,0.0033,0.0398,0.0791,0.0475,0.1152,0.0520,0.1192,0.1943,0.1840,0.2077,0.1956,0.1630,0.1218,0.1017,0.1354,0.3157,0.4645,0.5906,0.6776,0.8119,0.8594,0.9228,0.8387,0.7238,0.6292,0.5181,0.4629,0.5255,0.5147,0.3929,0.1279,0.0411,0.0859,0.1131,0.1306,0.1757,0.2648,0.1955,0.0656,0.0580,0.0319,0.0301,0.0272,0.0074,0.0149,0.0125,0.0134,0.0026,0.0038,0.0018,0.0113,0.0058,0.0047,0.0071,R 40 | 0.0091,0.0213,0.0206,0.0505,0.0657,0.0795,0.0970,0.0872,0.0743,0.0837,0.1579,0.0898,0.0309,0.1856,0.2969,0.2032,0.1264,0.1655,0.1661,0.2091,0.2310,0.4460,0.6634,0.6933,0.7663,0.8206,0.7049,0.7560,0.7466,0.6387,0.4846,0.3328,0.5356,0.8741,0.8573,0.6718,0.3446,0.3150,0.2702,0.2598,0.2742,0.3594,0.4382,0.2460,0.0758,0.0187,0.0797,0.0748,0.0367,0.0155,0.0300,0.0112,0.0112,0.0102,0.0026,0.0097,0.0098,0.0043,0.0071,0.0108,R 41 | 0.0068,0.0232,0.0513,0.0444,0.0249,0.0637,0.0422,0.1130,0.1911,0.2475,0.1606,0.0922,0.2398,0.3220,0.4295,0.2652,0.0666,0.1442,0.2373,0.2595,0.2493,0.3903,0.6384,0.8037,0.7026,0.6874,0.6997,0.8558,1.0000,0.9621,0.8996,0.7575,0.6902,0.5686,0.4396,0.4546,0.2959,0.1587,0.1681,0.0842,0.1173,0.1754,0.2728,0.1705,0.0194,0.0213,0.0354,0.0420,0.0093,0.0204,0.0199,0.0173,0.0163,0.0055,0.0045,0.0068,0.0041,0.0052,0.0194,0.0105,R 42 | 0.0093,0.0185,0.0056,0.0064,0.0260,0.0458,0.0470,0.0057,0.0425,0.0640,0.0888,0.1599,0.1541,0.2768,0.2176,0.2799,0.3491,0.2824,0.2479,0.3005,0.4300,0.4684,0.4520,0.5026,0.6217,0.6571,0.6632,0.7321,0.8534,1.0000,0.8448,0.6354,0.6308,0.6211,0.6976,0.5868,0.4889,0.3683,0.2043,0.1469,0.2220,0.1449,0.1490,0.1211,0.1144,0.0791,0.0365,0.0152,0.0085,0.0120,0.0022,0.0069,0.0064,0.0129,0.0114,0.0054,0.0089,0.0050,0.0058,0.0025,R 43 | 0.0211,0.0319,0.0415,0.0286,0.0121,0.0438,0.1299,0.1390,0.0695,0.0568,0.0869,0.1935,0.1478,0.1871,0.1994,0.3283,0.6861,0.5814,0.2500,0.1734,0.3363,0.5588,0.6592,0.7012,0.8099,0.8901,0.8745,0.7887,0.8725,0.9376,0.8920,0.7508,0.6832,0.7610,0.9017,1.0000,0.9123,0.7388,0.5915,0.4057,0.3019,0.2331,0.2931,0.2298,0.2391,0.1910,0.1096,0.0300,0.0171,0.0383,0.0053,0.0090,0.0042,0.0153,0.0106,0.0020,0.0105,0.0049,0.0070,0.0080,R 44 | 0.0093,0.0269,0.0217,0.0339,0.0305,0.1172,0.1450,0.0638,0.0740,0.1360,0.2132,0.3738,0.3738,0.2673,0.2333,0.5367,0.7312,0.7659,0.6271,0.4395,0.4330,0.4326,0.5544,0.7360,0.8589,0.8989,0.9420,0.9401,0.9379,0.8575,0.7284,0.6700,0.7547,0.8773,0.9919,0.9922,0.9419,0.8388,0.6605,0.4816,0.2917,0.1769,0.1136,0.0701,0.1578,0.1938,0.1106,0.0693,0.0176,0.0205,0.0309,0.0212,0.0091,0.0056,0.0086,0.0092,0.0070,0.0116,0.0060,0.0110,R 45 | 0.0257,0.0447,0.0388,0.0239,0.1315,0.1323,0.1608,0.2145,0.0847,0.0561,0.0891,0.0861,0.1531,0.1524,0.1849,0.2871,0.2009,0.2748,0.5017,0.2172,0.4978,0.5265,0.3647,0.5768,0.5161,0.5715,0.4006,0.3650,0.6685,0.8659,0.8052,0.4082,0.3379,0.5092,0.6776,0.7313,0.6062,0.7040,0.8849,0.8979,0.7751,0.7247,0.7733,0.7762,0.6009,0.4514,0.3096,0.1859,0.0956,0.0206,0.0206,0.0096,0.0153,0.0096,0.0131,0.0198,0.0025,0.0199,0.0255,0.0180,R 46 | 0.0408,0.0653,0.0397,0.0604,0.0496,0.1817,0.1178,0.1024,0.0583,0.2176,0.2459,0.3332,0.3087,0.2613,0.3232,0.3731,0.4203,0.5364,0.7062,0.8196,0.8835,0.8299,0.7609,0.7605,0.8367,0.8905,0.7652,0.5897,0.3037,0.0823,0.2787,0.7241,0.8032,0.8050,0.7676,0.7468,0.6253,0.1730,0.2916,0.5003,0.5220,0.4824,0.4004,0.3877,0.1651,0.0442,0.0663,0.0418,0.0475,0.0235,0.0066,0.0062,0.0129,0.0184,0.0069,0.0198,0.0199,0.0102,0.0070,0.0055,R 47 | 0.0308,0.0339,0.0202,0.0889,0.1570,0.1750,0.0920,0.1353,0.1593,0.2795,0.3336,0.2940,0.1608,0.3335,0.4985,0.7295,0.7350,0.8253,0.8793,0.9657,1.0000,0.8707,0.6471,0.5973,0.8218,0.7755,0.6111,0.4195,0.2990,0.1354,0.2438,0.5624,0.5555,0.6963,0.7298,0.7022,0.5468,0.1421,0.4738,0.6410,0.4375,0.3178,0.2377,0.2808,0.1374,0.1136,0.1034,0.0688,0.0422,0.0117,0.0070,0.0167,0.0127,0.0138,0.0090,0.0051,0.0029,0.0122,0.0056,0.0020,R 48 | 0.0373,0.0281,0.0232,0.0225,0.0179,0.0733,0.0841,0.1031,0.0993,0.0802,0.1564,0.2565,0.2624,0.1179,0.0597,0.1563,0.2241,0.3586,0.1792,0.3256,0.6079,0.6988,0.8391,0.8553,0.7710,0.6215,0.5736,0.4402,0.4056,0.4411,0.5130,0.5965,0.7272,0.6539,0.5902,0.5393,0.4897,0.4081,0.4145,0.6003,0.7196,0.6633,0.6287,0.4087,0.3212,0.2518,0.1482,0.0988,0.0317,0.0269,0.0066,0.0008,0.0045,0.0024,0.0006,0.0073,0.0096,0.0054,0.0085,0.0060,R 49 | 0.0190,0.0038,0.0642,0.0452,0.0333,0.0690,0.0901,0.1454,0.0740,0.0349,0.1459,0.3473,0.3197,0.2823,0.0166,0.0572,0.2164,0.4563,0.3819,0.5627,0.6484,0.7235,0.8242,0.8766,1.0000,0.8582,0.6563,0.5087,0.4817,0.4530,0.4521,0.4532,0.5385,0.5308,0.5356,0.5271,0.4260,0.2436,0.1205,0.3845,0.4107,0.5067,0.4216,0.2479,0.1586,0.1124,0.0651,0.0789,0.0325,0.0070,0.0026,0.0093,0.0118,0.0112,0.0094,0.0140,0.0072,0.0022,0.0055,0.0122,R 50 | 0.0119,0.0582,0.0623,0.0600,0.1397,0.1883,0.1422,0.1447,0.0487,0.0864,0.2143,0.3720,0.2665,0.2113,0.1103,0.1136,0.1934,0.4142,0.3279,0.6222,0.7468,0.7676,0.7867,0.8253,1.0000,0.9481,0.7539,0.6008,0.5437,0.5387,0.5619,0.5141,0.6084,0.5621,0.5956,0.6078,0.5025,0.2829,0.0477,0.2811,0.3422,0.5147,0.4372,0.2470,0.1708,0.1343,0.0838,0.0755,0.0304,0.0074,0.0069,0.0025,0.0103,0.0074,0.0123,0.0069,0.0076,0.0073,0.0030,0.0138,R 51 | 0.0353,0.0713,0.0326,0.0272,0.0370,0.0792,0.1083,0.0687,0.0298,0.0880,0.1078,0.0979,0.2250,0.2819,0.2099,0.1240,0.1699,0.0939,0.1091,0.1410,0.1268,0.3151,0.1430,0.2264,0.5756,0.7876,0.7158,0.5998,0.5583,0.6295,0.7659,0.8940,0.8436,0.6807,0.8380,1.0000,0.9497,0.7866,0.5647,0.3480,0.2585,0.2304,0.2948,0.3363,0.3017,0.2193,0.1316,0.1078,0.0559,0.0035,0.0098,0.0163,0.0242,0.0043,0.0202,0.0108,0.0037,0.0096,0.0093,0.0053,R 52 | 0.0131,0.0068,0.0308,0.0311,0.0085,0.0767,0.0771,0.0640,0.0726,0.0901,0.0750,0.0844,0.1226,0.1619,0.2317,0.2934,0.3526,0.3657,0.3221,0.3093,0.4084,0.4285,0.4663,0.5956,0.6948,0.8386,0.8875,0.6404,0.3308,0.3425,0.4920,0.4592,0.3034,0.4366,0.5175,0.5122,0.4746,0.4902,0.4603,0.4460,0.4196,0.2873,0.2296,0.0949,0.0095,0.0527,0.0383,0.0107,0.0108,0.0077,0.0109,0.0062,0.0028,0.0040,0.0075,0.0039,0.0053,0.0013,0.0052,0.0023,R 53 | 0.0087,0.0046,0.0081,0.0230,0.0586,0.0682,0.0993,0.0717,0.0576,0.0818,0.1315,0.1862,0.2789,0.2579,0.2240,0.2568,0.2933,0.2991,0.3924,0.4691,0.5665,0.6464,0.6774,0.7577,0.8856,0.9419,1.0000,0.8564,0.6790,0.5587,0.4147,0.2946,0.2025,0.0688,0.1171,0.2157,0.2216,0.2776,0.2309,0.1444,0.1513,0.1745,0.1756,0.1424,0.0908,0.0138,0.0469,0.0480,0.0159,0.0045,0.0015,0.0052,0.0038,0.0079,0.0114,0.0050,0.0030,0.0064,0.0058,0.0030,R 54 | 0.0293,0.0378,0.0257,0.0062,0.0130,0.0612,0.0895,0.1107,0.0973,0.0751,0.0528,0.1209,0.1763,0.2039,0.2727,0.2321,0.2676,0.2934,0.3295,0.4910,0.5402,0.6257,0.6826,0.7527,0.8504,0.8938,0.9928,0.9134,0.7080,0.6318,0.6126,0.4638,0.2797,0.1721,0.1665,0.2561,0.2735,0.3209,0.2724,0.1880,0.1552,0.2522,0.2121,0.1801,0.1473,0.0681,0.1091,0.0919,0.0397,0.0093,0.0076,0.0065,0.0072,0.0108,0.0051,0.0102,0.0041,0.0055,0.0050,0.0087,R 55 | 0.0132,0.0080,0.0188,0.0141,0.0436,0.0668,0.0609,0.0131,0.0899,0.0922,0.1445,0.1475,0.2087,0.2558,0.2603,0.1985,0.2394,0.3134,0.4077,0.4529,0.4893,0.5666,0.6234,0.6741,0.8282,0.8823,0.9196,0.8965,0.7549,0.6736,0.6463,0.5007,0.3663,0.2298,0.1362,0.2123,0.2395,0.2673,0.2865,0.2060,0.1659,0.2633,0.2552,0.1696,0.1467,0.1286,0.0926,0.0716,0.0325,0.0258,0.0136,0.0044,0.0028,0.0021,0.0022,0.0048,0.0138,0.0140,0.0028,0.0064,R 56 | 0.0201,0.0116,0.0123,0.0245,0.0547,0.0208,0.0891,0.0836,0.1335,0.1199,0.1742,0.1387,0.2042,0.2580,0.2616,0.2097,0.2532,0.3213,0.4327,0.4760,0.5328,0.6057,0.6696,0.7476,0.8930,0.9405,1.0000,0.9785,0.8473,0.7639,0.6701,0.4989,0.3718,0.2196,0.1416,0.2680,0.2630,0.3104,0.3392,0.2123,0.1170,0.2655,0.2203,0.1541,0.1464,0.1044,0.1225,0.0745,0.0490,0.0224,0.0032,0.0076,0.0045,0.0056,0.0075,0.0037,0.0045,0.0029,0.0008,0.0018,R 57 | 0.0152,0.0102,0.0113,0.0263,0.0097,0.0391,0.0857,0.0915,0.0949,0.1504,0.1911,0.2115,0.2249,0.2573,0.1701,0.2023,0.2538,0.3417,0.4026,0.4553,0.5525,0.5991,0.5854,0.7114,0.9500,0.9858,1.0000,0.9578,0.8642,0.7128,0.5893,0.4323,0.2897,0.1744,0.0770,0.2297,0.2459,0.3101,0.3312,0.2220,0.0871,0.2064,0.1808,0.1624,0.1120,0.0815,0.1117,0.0950,0.0412,0.0120,0.0048,0.0049,0.0041,0.0036,0.0013,0.0046,0.0037,0.0011,0.0034,0.0033,R 58 | 0.0216,0.0124,0.0174,0.0152,0.0608,0.1026,0.1139,0.0877,0.1160,0.0866,0.1564,0.0780,0.0997,0.0915,0.0662,0.1134,0.1740,0.2573,0.3294,0.3910,0.5438,0.6115,0.7022,0.7610,0.7973,0.9105,0.8807,0.7949,0.7990,0.7180,0.6407,0.6312,0.5929,0.6168,0.6498,0.6764,0.6253,0.5117,0.3890,0.3273,0.2509,0.1530,0.1323,0.1657,0.1215,0.0978,0.0452,0.0273,0.0179,0.0092,0.0018,0.0052,0.0049,0.0096,0.0134,0.0122,0.0047,0.0018,0.0006,0.0023,R 59 | 0.0225,0.0019,0.0075,0.0097,0.0445,0.0906,0.0889,0.0655,0.1624,0.1452,0.1442,0.0948,0.0618,0.1641,0.0708,0.0844,0.2590,0.2679,0.3094,0.4678,0.5958,0.7245,0.8773,0.9214,0.9282,0.9942,1.0000,0.9071,0.8545,0.7293,0.6499,0.6071,0.5588,0.5967,0.6275,0.5459,0.4786,0.3965,0.2087,0.1651,0.1836,0.0652,0.0758,0.0486,0.0353,0.0297,0.0241,0.0379,0.0119,0.0073,0.0051,0.0034,0.0129,0.0100,0.0044,0.0057,0.0030,0.0035,0.0021,0.0027,R 60 | 0.0125,0.0152,0.0218,0.0175,0.0362,0.0696,0.0873,0.0616,0.1252,0.1302,0.0888,0.0500,0.0628,0.1274,0.0801,0.0742,0.2048,0.2950,0.3193,0.4567,0.5959,0.7101,0.8225,0.8425,0.9065,0.9802,1.0000,0.8752,0.7583,0.6616,0.5786,0.5128,0.4776,0.4994,0.5197,0.5071,0.4577,0.3505,0.1845,0.1890,0.1967,0.1041,0.0550,0.0492,0.0622,0.0505,0.0247,0.0219,0.0102,0.0047,0.0019,0.0041,0.0074,0.0030,0.0050,0.0048,0.0017,0.0041,0.0086,0.0058,R 61 | 0.0130,0.0006,0.0088,0.0456,0.0525,0.0778,0.0931,0.0941,0.1711,0.1483,0.1532,0.1100,0.0890,0.1236,0.1197,0.1145,0.2137,0.2838,0.3640,0.5430,0.6673,0.7979,0.9273,0.9027,0.9192,1.0000,0.9821,0.9092,0.8184,0.6962,0.5900,0.5447,0.5142,0.5389,0.5531,0.5318,0.4826,0.3790,0.1831,0.1750,0.1679,0.0674,0.0609,0.0375,0.0533,0.0278,0.0179,0.0114,0.0073,0.0116,0.0092,0.0078,0.0041,0.0013,0.0011,0.0045,0.0039,0.0022,0.0023,0.0016,R 62 | 0.0135,0.0045,0.0051,0.0289,0.0561,0.0929,0.1031,0.0883,0.1596,0.1908,0.1576,0.1112,0.1197,0.1174,0.1415,0.2215,0.2658,0.2713,0.3862,0.5717,0.6797,0.8747,1.0000,0.8948,0.8420,0.9174,0.9307,0.9050,0.8228,0.6986,0.5831,0.4924,0.4563,0.5159,0.5670,0.5284,0.5144,0.3742,0.2282,0.1193,0.1088,0.0431,0.1070,0.0583,0.0046,0.0473,0.0408,0.0290,0.0192,0.0094,0.0025,0.0037,0.0084,0.0102,0.0096,0.0024,0.0037,0.0028,0.0030,0.0030,R 63 | 0.0086,0.0215,0.0242,0.0445,0.0667,0.0771,0.0499,0.0906,0.1229,0.1185,0.0775,0.1101,0.1042,0.0853,0.0456,0.1304,0.2690,0.2947,0.3669,0.4948,0.6275,0.8162,0.9237,0.8710,0.8052,0.8756,1.0000,0.9858,0.9427,0.8114,0.6987,0.6810,0.6591,0.6954,0.7290,0.6680,0.5917,0.4899,0.3439,0.2366,0.1716,0.1013,0.0766,0.0845,0.0260,0.0333,0.0205,0.0309,0.0101,0.0095,0.0047,0.0072,0.0054,0.0022,0.0016,0.0029,0.0058,0.0050,0.0024,0.0030,R 64 | 0.0067,0.0096,0.0024,0.0058,0.0197,0.0618,0.0432,0.0951,0.0836,0.1180,0.0978,0.0909,0.0656,0.0593,0.0832,0.1297,0.2038,0.3811,0.4451,0.5224,0.5911,0.6566,0.6308,0.5998,0.4958,0.5647,0.6906,0.8513,1.0000,0.9166,0.7676,0.6177,0.5468,0.5516,0.5463,0.5515,0.4561,0.3466,0.3384,0.2853,0.2502,0.1641,0.1605,0.1491,0.1326,0.0687,0.0602,0.0561,0.0306,0.0154,0.0029,0.0048,0.0023,0.0020,0.0040,0.0019,0.0034,0.0034,0.0051,0.0031,R 65 | 0.0071,0.0103,0.0135,0.0494,0.0253,0.0806,0.0701,0.0738,0.0117,0.0898,0.0289,0.1554,0.1437,0.1035,0.1424,0.1227,0.0892,0.2047,0.0827,0.1524,0.3031,0.1608,0.0667,0.1426,0.0395,0.1653,0.3399,0.4855,0.5206,0.5508,0.6102,0.5989,0.6764,0.8897,1.0000,0.9517,0.8459,0.7073,0.6697,0.6326,0.5102,0.4161,0.2816,0.1705,0.1421,0.0971,0.0879,0.0863,0.0355,0.0233,0.0252,0.0043,0.0048,0.0076,0.0124,0.0105,0.0054,0.0032,0.0073,0.0063,R 66 | 0.0176,0.0172,0.0501,0.0285,0.0262,0.0351,0.0362,0.0535,0.0258,0.0474,0.0526,0.1854,0.1040,0.0948,0.0912,0.1688,0.1568,0.0375,0.1316,0.2086,0.1976,0.0946,0.1965,0.1242,0.0616,0.2141,0.4642,0.6471,0.6340,0.6107,0.7046,0.5376,0.5934,0.8443,0.9481,0.9705,0.7766,0.6313,0.5760,0.6148,0.5450,0.4813,0.3406,0.1916,0.1134,0.0640,0.0911,0.0980,0.0563,0.0187,0.0088,0.0042,0.0175,0.0171,0.0079,0.0050,0.0112,0.0179,0.0294,0.0063,R 67 | 0.0265,0.0440,0.0137,0.0084,0.0305,0.0438,0.0341,0.0780,0.0844,0.0779,0.0327,0.2060,0.1908,0.1065,0.1457,0.2232,0.2070,0.1105,0.1078,0.1165,0.2224,0.0689,0.2060,0.2384,0.0904,0.2278,0.5872,0.8457,0.8467,0.7679,0.8055,0.6260,0.6545,0.8747,0.9885,0.9348,0.6960,0.5733,0.5872,0.6663,0.5651,0.5247,0.3684,0.1997,0.1512,0.0508,0.0931,0.0982,0.0524,0.0188,0.0100,0.0038,0.0187,0.0156,0.0068,0.0097,0.0073,0.0081,0.0086,0.0095,R 68 | 0.0368,0.0403,0.0317,0.0293,0.0820,0.1342,0.1161,0.0663,0.0155,0.0506,0.0906,0.2545,0.1464,0.1272,0.1223,0.1669,0.1424,0.1285,0.1857,0.1136,0.2069,0.0219,0.2400,0.2547,0.0240,0.1923,0.4753,0.7003,0.6825,0.6443,0.7063,0.5373,0.6601,0.8708,0.9518,0.9605,0.7712,0.6772,0.6431,0.6720,0.6035,0.5155,0.3802,0.2278,0.1522,0.0801,0.0804,0.0752,0.0566,0.0175,0.0058,0.0091,0.0160,0.0160,0.0081,0.0070,0.0135,0.0067,0.0078,0.0068,R 69 | 0.0195,0.0142,0.0181,0.0406,0.0391,0.0249,0.0892,0.0973,0.0840,0.1191,0.1522,0.1322,0.1434,0.1244,0.0653,0.0890,0.1226,0.1846,0.3880,0.3658,0.2297,0.2610,0.4193,0.5848,0.5643,0.5448,0.4772,0.6897,0.9797,1.0000,0.9546,0.8835,0.7662,0.6547,0.5447,0.4593,0.4679,0.1987,0.0699,0.1493,0.1713,0.1654,0.2600,0.3846,0.3754,0.2414,0.1077,0.0224,0.0155,0.0187,0.0125,0.0028,0.0067,0.0120,0.0012,0.0022,0.0058,0.0042,0.0067,0.0012,R 70 | 0.0216,0.0215,0.0273,0.0139,0.0357,0.0785,0.0906,0.0908,0.1151,0.0973,0.1203,0.1102,0.1192,0.1762,0.2390,0.2138,0.1929,0.1765,0.0746,0.1265,0.2005,0.1571,0.2605,0.5386,0.8440,1.0000,0.8684,0.6742,0.5537,0.4638,0.3609,0.2055,0.1620,0.2092,0.3100,0.2344,0.1058,0.0383,0.0528,0.1291,0.2241,0.1915,0.1587,0.0942,0.0840,0.0670,0.0342,0.0469,0.0357,0.0136,0.0082,0.0140,0.0044,0.0052,0.0073,0.0021,0.0047,0.0024,0.0009,0.0017,R 71 | 0.0065,0.0122,0.0068,0.0108,0.0217,0.0284,0.0527,0.0575,0.1054,0.1109,0.0937,0.0827,0.0920,0.0911,0.1487,0.1666,0.1268,0.1374,0.1095,0.1286,0.2146,0.2889,0.4238,0.6168,0.8167,0.9622,0.8280,0.5816,0.4667,0.3539,0.2727,0.1410,0.1863,0.2176,0.2360,0.1725,0.0589,0.0621,0.1847,0.2452,0.2984,0.3041,0.2275,0.1480,0.1102,0.1178,0.0608,0.0333,0.0276,0.0100,0.0023,0.0069,0.0025,0.0027,0.0052,0.0036,0.0026,0.0036,0.0006,0.0035,R 72 | 0.0036,0.0078,0.0092,0.0387,0.0530,0.1197,0.1243,0.1026,0.1239,0.0888,0.0937,0.1245,0.1599,0.1542,0.1846,0.1732,0.1477,0.1748,0.1455,0.1579,0.2257,0.1975,0.3368,0.5828,0.8505,1.0000,0.8457,0.6624,0.5564,0.3925,0.3233,0.2054,0.1920,0.2227,0.3147,0.2268,0.0795,0.0748,0.1166,0.1969,0.2619,0.2507,0.1983,0.0948,0.0931,0.0965,0.0381,0.0435,0.0336,0.0055,0.0079,0.0119,0.0055,0.0035,0.0036,0.0004,0.0018,0.0049,0.0024,0.0016,R 73 | 0.0208,0.0186,0.0131,0.0211,0.0610,0.0613,0.0612,0.0506,0.0989,0.1093,0.1063,0.1179,0.1291,0.1591,0.1680,0.1918,0.1615,0.1647,0.1397,0.1426,0.2429,0.2816,0.4290,0.6443,0.9061,1.0000,0.8087,0.6119,0.5260,0.3677,0.2746,0.1020,0.1339,0.1582,0.1952,0.1787,0.0429,0.1096,0.1762,0.2481,0.3150,0.2920,0.1902,0.0696,0.0758,0.0910,0.0441,0.0244,0.0265,0.0095,0.0140,0.0074,0.0063,0.0081,0.0087,0.0044,0.0028,0.0019,0.0049,0.0023,R 74 | 0.0139,0.0222,0.0089,0.0108,0.0215,0.0136,0.0659,0.0954,0.0786,0.1015,0.1261,0.0828,0.0493,0.0848,0.1514,0.1396,0.1066,0.1923,0.2991,0.3247,0.3797,0.5658,0.7483,0.8757,0.9048,0.7511,0.6858,0.7043,0.5864,0.3773,0.2206,0.2628,0.2672,0.2907,0.1982,0.2288,0.3186,0.2871,0.2921,0.2806,0.2682,0.2112,0.1513,0.1789,0.1850,0.1717,0.0898,0.0656,0.0445,0.0110,0.0024,0.0062,0.0072,0.0113,0.0012,0.0022,0.0025,0.0059,0.0039,0.0048,R 75 | 0.0109,0.0093,0.0121,0.0378,0.0679,0.0863,0.1004,0.0664,0.0941,0.1036,0.0972,0.0501,0.1546,0.3404,0.4804,0.6570,0.7738,0.7827,0.8152,0.8129,0.8297,0.8535,0.8870,0.8894,0.8980,0.9667,1.0000,0.9134,0.6762,0.4659,0.2895,0.2959,0.1746,0.2112,0.2569,0.2276,0.2149,0.1601,0.0371,0.0117,0.0488,0.0288,0.0597,0.0431,0.0369,0.0025,0.0327,0.0257,0.0182,0.0108,0.0124,0.0077,0.0023,0.0117,0.0053,0.0077,0.0076,0.0056,0.0055,0.0039,R 76 | 0.0202,0.0104,0.0325,0.0239,0.0807,0.1529,0.1154,0.0608,0.1317,0.1370,0.0843,0.0269,0.1254,0.3046,0.5584,0.7973,0.8341,0.8057,0.8616,0.8769,0.9413,0.9403,0.9409,1.0000,0.9725,0.9309,0.9351,0.7317,0.4421,0.3244,0.4161,0.4611,0.4031,0.3000,0.2459,0.1348,0.2541,0.2255,0.1598,0.1485,0.0845,0.0569,0.0855,0.1262,0.1153,0.0570,0.0426,0.0425,0.0235,0.0006,0.0188,0.0127,0.0081,0.0067,0.0043,0.0065,0.0049,0.0054,0.0073,0.0054,R 77 | 0.0239,0.0189,0.0466,0.0440,0.0657,0.0742,0.1380,0.1099,0.1384,0.1376,0.0938,0.0259,0.1499,0.2851,0.5743,0.8278,0.8669,0.8131,0.9045,0.9046,1.0000,0.9976,0.9872,0.9761,0.9009,0.9724,0.9675,0.7633,0.4434,0.3822,0.4727,0.4007,0.3381,0.3172,0.2222,0.0733,0.2692,0.1888,0.0712,0.1062,0.0694,0.0300,0.0893,0.1459,0.1348,0.0391,0.0546,0.0469,0.0201,0.0095,0.0155,0.0091,0.0151,0.0080,0.0018,0.0078,0.0045,0.0026,0.0036,0.0024,R 78 | 0.0336,0.0294,0.0476,0.0539,0.0794,0.0804,0.1136,0.1228,0.1235,0.0842,0.0357,0.0689,0.1705,0.3257,0.4602,0.6225,0.7327,0.7843,0.7988,0.8261,1.0000,0.9814,0.9620,0.9601,0.9118,0.9086,0.7931,0.5877,0.3474,0.4235,0.4633,0.3410,0.2849,0.2847,0.1742,0.0549,0.1192,0.1154,0.0855,0.1811,0.1264,0.0799,0.0378,0.1268,0.1125,0.0505,0.0949,0.0677,0.0259,0.0170,0.0033,0.0150,0.0111,0.0032,0.0035,0.0169,0.0137,0.0015,0.0069,0.0051,R 79 | 0.0231,0.0351,0.0030,0.0304,0.0339,0.0860,0.1738,0.1351,0.1063,0.0347,0.0575,0.1382,0.2274,0.4038,0.5223,0.6847,0.7521,0.7760,0.7708,0.8627,1.0000,0.8873,0.8057,0.8760,0.9066,0.9430,0.8846,0.6500,0.2970,0.2423,0.2992,0.2285,0.2277,0.1529,0.1037,0.0352,0.1073,0.1373,0.1331,0.1454,0.1115,0.0440,0.0762,0.1381,0.0831,0.0654,0.0844,0.0595,0.0497,0.0313,0.0154,0.0106,0.0097,0.0022,0.0052,0.0072,0.0056,0.0038,0.0043,0.0030,R 80 | 0.0108,0.0086,0.0058,0.0460,0.0752,0.0887,0.1015,0.0494,0.0472,0.0393,0.1106,0.1412,0.2202,0.2976,0.4116,0.4754,0.5390,0.6279,0.7060,0.7918,0.9493,1.0000,0.9645,0.9432,0.8658,0.7895,0.6501,0.4492,0.4739,0.6153,0.4929,0.3195,0.3735,0.3336,0.1052,0.0671,0.0379,0.0461,0.1694,0.2169,0.1677,0.0644,0.0159,0.0778,0.0653,0.0210,0.0509,0.0387,0.0262,0.0101,0.0161,0.0029,0.0078,0.0114,0.0083,0.0058,0.0003,0.0023,0.0026,0.0027,R 81 | 0.0229,0.0369,0.0040,0.0375,0.0455,0.1452,0.2211,0.1188,0.0750,0.1631,0.2709,0.3358,0.4091,0.4400,0.5485,0.7213,0.8137,0.9185,1.0000,0.9418,0.9116,0.9349,0.7484,0.5146,0.4106,0.3443,0.6981,0.8713,0.9013,0.8014,0.4380,0.1319,0.1709,0.2484,0.3044,0.2312,0.1338,0.2056,0.2474,0.2790,0.1610,0.0056,0.0351,0.1148,0.1331,0.0276,0.0763,0.0631,0.0309,0.0240,0.0115,0.0064,0.0022,0.0122,0.0151,0.0056,0.0026,0.0029,0.0104,0.0163,R 82 | 0.0100,0.0194,0.0155,0.0489,0.0839,0.1009,0.1627,0.2071,0.2696,0.2990,0.3242,0.3565,0.3951,0.5201,0.6953,0.8468,1.0000,0.9278,0.8510,0.8010,0.8142,0.8825,0.7302,0.6107,0.7159,0.8458,0.6319,0.4808,0.6291,0.7152,0.6005,0.4235,0.4106,0.3992,0.1730,0.1975,0.2370,0.1339,0.1583,0.3151,0.1968,0.2054,0.1272,0.1129,0.1946,0.2195,0.1930,0.1498,0.0773,0.0196,0.0122,0.0130,0.0073,0.0077,0.0075,0.0060,0.0080,0.0019,0.0053,0.0019,R 83 | 0.0409,0.0421,0.0573,0.0130,0.0183,0.1019,0.1054,0.1070,0.2302,0.2259,0.2373,0.3323,0.3827,0.4840,0.6812,0.7555,0.9522,0.9826,0.8871,0.8268,0.7561,0.8217,0.6967,0.6444,0.6948,0.8014,0.6053,0.6084,0.8877,0.8557,0.5563,0.2897,0.3638,0.4786,0.2908,0.0899,0.2043,0.1707,0.0407,0.1286,0.1581,0.2191,0.1701,0.0971,0.2217,0.2732,0.1874,0.1062,0.0665,0.0405,0.0113,0.0028,0.0036,0.0105,0.0120,0.0087,0.0061,0.0061,0.0030,0.0078,R 84 | 0.0217,0.0340,0.0392,0.0236,0.1081,0.1164,0.1398,0.1009,0.1147,0.1777,0.4079,0.4113,0.3973,0.5078,0.6509,0.8073,0.9819,1.0000,0.9407,0.8452,0.8106,0.8460,0.6212,0.5815,0.7745,0.8204,0.5601,0.2989,0.5009,0.6628,0.5753,0.4055,0.3746,0.3481,0.1580,0.1422,0.2130,0.1866,0.1003,0.2396,0.2241,0.2029,0.0710,0.1606,0.1669,0.1700,0.1829,0.1403,0.0506,0.0224,0.0095,0.0031,0.0103,0.0078,0.0077,0.0094,0.0031,0.0030,0.0013,0.0069,R 85 | 0.0378,0.0318,0.0423,0.0350,0.1787,0.1635,0.0887,0.0817,0.1779,0.2053,0.3135,0.3118,0.3686,0.3885,0.5850,0.7868,0.9739,1.0000,0.9843,0.8610,0.8443,0.9061,0.5847,0.4033,0.5946,0.6793,0.6389,0.5002,0.5578,0.4831,0.4729,0.3318,0.3969,0.3894,0.2314,0.1036,0.1312,0.0864,0.2569,0.3179,0.2649,0.2714,0.1713,0.0584,0.1230,0.2200,0.2198,0.1074,0.0423,0.0162,0.0093,0.0046,0.0044,0.0078,0.0102,0.0065,0.0061,0.0062,0.0043,0.0053,R 86 | 0.0365,0.1632,0.1636,0.1421,0.1130,0.1306,0.2112,0.2268,0.2992,0.3735,0.3042,0.0387,0.2679,0.5397,0.6204,0.7257,0.8350,0.6888,0.4450,0.3921,0.5605,0.7545,0.8311,1.0000,0.8762,0.7092,0.7009,0.5014,0.3942,0.4456,0.4072,0.0773,0.1423,0.0401,0.3597,0.6847,0.7076,0.3597,0.0612,0.3027,0.3966,0.3868,0.2380,0.2059,0.2288,0.1704,0.1587,0.1792,0.1022,0.0151,0.0223,0.0110,0.0071,0.0205,0.0164,0.0063,0.0078,0.0094,0.0110,0.0068,R 87 | 0.0188,0.0370,0.0953,0.0824,0.0249,0.0488,0.1424,0.1972,0.1873,0.1806,0.2139,0.1523,0.1975,0.4844,0.7298,0.7807,0.7906,0.6122,0.4200,0.2807,0.5148,0.7569,0.8596,1.0000,0.8457,0.6797,0.6971,0.5843,0.4772,0.5201,0.4241,0.1592,0.1668,0.0588,0.3967,0.7147,0.7319,0.3509,0.0589,0.2690,0.4200,0.3874,0.2440,0.2000,0.2307,0.1886,0.1960,0.1701,0.1366,0.0398,0.0143,0.0093,0.0033,0.0113,0.0030,0.0057,0.0090,0.0057,0.0068,0.0024,R 88 | 0.0856,0.0454,0.0382,0.0203,0.0385,0.0534,0.2140,0.3110,0.2837,0.2751,0.2707,0.0946,0.1020,0.4519,0.6737,0.6699,0.7066,0.5632,0.3785,0.2721,0.5297,0.7697,0.8643,0.9304,0.9372,0.6247,0.6024,0.6810,0.5047,0.5775,0.4754,0.2400,0.2779,0.1997,0.5305,0.7409,0.7775,0.4424,0.1416,0.3508,0.4482,0.4208,0.3054,0.2235,0.2611,0.2798,0.2392,0.2021,0.1326,0.0358,0.0128,0.0172,0.0138,0.0079,0.0037,0.0051,0.0258,0.0102,0.0037,0.0037,R 89 | 0.0274,0.0242,0.0621,0.0560,0.1129,0.0973,0.1823,0.1745,0.1440,0.1808,0.2366,0.0906,0.1749,0.4012,0.5187,0.7312,0.9062,0.9260,0.7434,0.4463,0.5103,0.6952,0.7755,0.8364,0.7283,0.6399,0.5759,0.4146,0.3495,0.4437,0.2665,0.2024,0.1942,0.0765,0.3725,0.5843,0.4827,0.2347,0.0999,0.3244,0.3990,0.2975,0.1684,0.1761,0.1683,0.0729,0.1190,0.1297,0.0748,0.0067,0.0255,0.0113,0.0108,0.0085,0.0047,0.0074,0.0104,0.0161,0.0220,0.0173,R 90 | 0.0235,0.0291,0.0749,0.0519,0.0227,0.0834,0.0677,0.2002,0.2876,0.3674,0.2974,0.0837,0.1912,0.5040,0.6352,0.6804,0.7505,0.6595,0.4509,0.2964,0.4019,0.6794,0.8297,1.0000,0.8240,0.7115,0.7726,0.6124,0.4936,0.5648,0.4906,0.1820,0.1811,0.1107,0.4603,0.6650,0.6423,0.2166,0.1951,0.4947,0.4925,0.4041,0.2402,0.1392,0.1779,0.1946,0.1723,0.1522,0.0929,0.0179,0.0242,0.0083,0.0037,0.0095,0.0105,0.0030,0.0132,0.0068,0.0108,0.0090,R 91 | 0.0126,0.0519,0.0621,0.0518,0.1072,0.2587,0.2304,0.2067,0.3416,0.4284,0.3015,0.1207,0.3299,0.5707,0.6962,0.9751,1.0000,0.9293,0.6210,0.4586,0.5001,0.5032,0.7082,0.8420,0.8109,0.7690,0.8105,0.6203,0.2356,0.2595,0.6299,0.6762,0.2903,0.4393,0.8529,0.7180,0.4801,0.5856,0.4993,0.2866,0.0601,0.1167,0.2737,0.2812,0.2078,0.0660,0.0491,0.0345,0.0172,0.0287,0.0027,0.0208,0.0048,0.0199,0.0126,0.0022,0.0037,0.0034,0.0114,0.0077,R 92 | 0.0253,0.0808,0.0507,0.0244,0.1724,0.3823,0.3729,0.3583,0.3429,0.2197,0.2653,0.3223,0.5582,0.6916,0.7943,0.7152,0.3512,0.2008,0.2676,0.4299,0.5280,0.3489,0.1430,0.5453,0.6338,0.7712,0.6838,0.8015,0.8073,0.8310,0.7792,0.5049,0.1413,0.2767,0.5084,0.4787,0.1356,0.2299,0.2789,0.3833,0.2933,0.1155,0.1705,0.1294,0.0909,0.0800,0.0567,0.0198,0.0114,0.0151,0.0085,0.0178,0.0073,0.0079,0.0038,0.0116,0.0033,0.0039,0.0081,0.0053,R 93 | 0.0260,0.0192,0.0254,0.0061,0.0352,0.0701,0.1263,0.1080,0.1523,0.1630,0.1030,0.2187,0.1542,0.2630,0.2940,0.2978,0.0699,0.1401,0.2990,0.3915,0.3598,0.2403,0.4208,0.5675,0.6094,0.6323,0.6549,0.7673,1.0000,0.8463,0.5509,0.4444,0.5169,0.4268,0.1802,0.0791,0.0535,0.1906,0.2561,0.2153,0.2769,0.2841,0.1733,0.0815,0.0335,0.0933,0.1018,0.0309,0.0208,0.0318,0.0132,0.0118,0.0120,0.0051,0.0070,0.0015,0.0035,0.0008,0.0044,0.0077,R 94 | 0.0459,0.0437,0.0347,0.0456,0.0067,0.0890,0.1798,0.1741,0.1598,0.1408,0.2693,0.3259,0.4545,0.5785,0.4471,0.2231,0.2164,0.3201,0.2915,0.4235,0.4460,0.2380,0.6415,0.8966,0.8918,0.7529,0.6838,0.8390,1.0000,0.8362,0.5427,0.4577,0.8067,0.6973,0.3915,0.1558,0.1598,0.2161,0.5178,0.4782,0.2344,0.3599,0.2785,0.1807,0.0352,0.0473,0.0322,0.0408,0.0163,0.0088,0.0121,0.0067,0.0032,0.0109,0.0164,0.0151,0.0070,0.0085,0.0117,0.0056,R 95 | 0.0025,0.0309,0.0171,0.0228,0.0434,0.1224,0.1947,0.1661,0.1368,0.1430,0.0994,0.2250,0.2444,0.3239,0.3039,0.2410,0.0367,0.1672,0.3038,0.4069,0.3613,0.1994,0.4611,0.6849,0.7272,0.7152,0.7102,0.8516,1.0000,0.7690,0.4841,0.3717,0.6096,0.5110,0.2586,0.0916,0.0947,0.2287,0.3480,0.2095,0.1901,0.2941,0.2211,0.1524,0.0746,0.0606,0.0692,0.0446,0.0344,0.0082,0.0108,0.0149,0.0077,0.0036,0.0114,0.0085,0.0101,0.0016,0.0028,0.0014,R 96 | 0.0291,0.0400,0.0771,0.0809,0.0521,0.1051,0.0145,0.0674,0.1294,0.1146,0.0942,0.0794,0.0252,0.1191,0.1045,0.2050,0.1556,0.2690,0.3784,0.4024,0.3470,0.1395,0.1208,0.2827,0.1500,0.2626,0.4468,0.7520,0.9036,0.7812,0.4766,0.2483,0.5372,0.6279,0.3647,0.4572,0.6359,0.6474,0.5520,0.3253,0.2292,0.0653,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0056,0.0237,0.0204,0.0050,0.0137,0.0164,0.0081,0.0139,0.0111,R 97 | 0.0181,0.0146,0.0026,0.0141,0.0421,0.0473,0.0361,0.0741,0.1398,0.1045,0.0904,0.0671,0.0997,0.1056,0.0346,0.1231,0.1626,0.3652,0.3262,0.2995,0.2109,0.2104,0.2085,0.2282,0.0747,0.1969,0.4086,0.6385,0.7970,0.7508,0.5517,0.2214,0.4672,0.4479,0.2297,0.3235,0.4480,0.5581,0.6520,0.5354,0.2478,0.2268,0.1788,0.0898,0.0536,0.0374,0.0990,0.0956,0.0317,0.0142,0.0076,0.0223,0.0255,0.0145,0.0233,0.0041,0.0018,0.0048,0.0089,0.0085,R 98 | 0.0491,0.0279,0.0592,0.1270,0.1772,0.1908,0.2217,0.0768,0.1246,0.2028,0.0947,0.2497,0.2209,0.3195,0.3340,0.3323,0.2780,0.2975,0.2948,0.1729,0.3264,0.3834,0.3523,0.5410,0.5228,0.4475,0.5340,0.5323,0.3907,0.3456,0.4091,0.4639,0.5580,0.5727,0.6355,0.7563,0.6903,0.6176,0.5379,0.5622,0.6508,0.4797,0.3736,0.2804,0.1982,0.2438,0.1789,0.1706,0.0762,0.0238,0.0268,0.0081,0.0129,0.0161,0.0063,0.0119,0.0194,0.0140,0.0332,0.0439,M 99 | 0.1313,0.2339,0.3059,0.4264,0.4010,0.1791,0.1853,0.0055,0.1929,0.2231,0.2907,0.2259,0.3136,0.3302,0.3660,0.3956,0.4386,0.4670,0.5255,0.3735,0.2243,0.1973,0.4337,0.6532,0.5070,0.2796,0.4163,0.5950,0.5242,0.4178,0.3714,0.2375,0.0863,0.1437,0.2896,0.4577,0.3725,0.3372,0.3803,0.4181,0.3603,0.2711,0.1653,0.1951,0.2811,0.2246,0.1921,0.1500,0.0665,0.0193,0.0156,0.0362,0.0210,0.0154,0.0180,0.0013,0.0106,0.0127,0.0178,0.0231,M 100 | 0.0201,0.0423,0.0554,0.0783,0.0620,0.0871,0.1201,0.2707,0.1206,0.0279,0.2251,0.2615,0.1770,0.3709,0.4533,0.5553,0.4616,0.3797,0.3450,0.2665,0.2395,0.1127,0.2556,0.5169,0.3779,0.4082,0.5353,0.5116,0.4544,0.4258,0.3869,0.3939,0.4661,0.3974,0.2194,0.1816,0.1023,0.2108,0.3253,0.3697,0.2912,0.3010,0.2563,0.1927,0.2062,0.1751,0.0841,0.1035,0.0641,0.0153,0.0081,0.0191,0.0182,0.0160,0.0290,0.0090,0.0242,0.0224,0.0190,0.0096,M 101 | 0.0629,0.1065,0.1526,0.1229,0.1437,0.1190,0.0884,0.0907,0.2107,0.3597,0.5466,0.5205,0.5127,0.5395,0.6558,0.8705,0.9786,0.9335,0.7917,0.7383,0.6908,0.3850,0.0671,0.0502,0.2717,0.2839,0.2234,0.1911,0.0408,0.2531,0.1979,0.1891,0.2433,0.1956,0.2667,0.1340,0.1073,0.2023,0.1794,0.0227,0.1313,0.1775,0.1549,0.1626,0.0708,0.0129,0.0795,0.0762,0.0117,0.0061,0.0257,0.0089,0.0262,0.0108,0.0138,0.0187,0.0230,0.0057,0.0113,0.0131,M 102 | 0.0335,0.0134,0.0696,0.1180,0.0348,0.1180,0.1948,0.1607,0.3036,0.4372,0.5533,0.5771,0.7022,0.7067,0.7367,0.7391,0.8622,0.9458,0.8782,0.7913,0.5760,0.3061,0.0563,0.0239,0.2554,0.4862,0.5027,0.4402,0.2847,0.1797,0.3560,0.3522,0.3321,0.3112,0.3638,0.0754,0.1834,0.1820,0.1815,0.1593,0.0576,0.0954,0.1086,0.0812,0.0784,0.0487,0.0439,0.0586,0.0370,0.0185,0.0302,0.0244,0.0232,0.0093,0.0159,0.0193,0.0032,0.0377,0.0126,0.0156,M 103 | 0.0587,0.1210,0.1268,0.1498,0.1436,0.0561,0.0832,0.0672,0.1372,0.2352,0.3208,0.4257,0.5201,0.4914,0.5950,0.7221,0.9039,0.9111,0.8723,0.7686,0.7326,0.5222,0.3097,0.3172,0.2270,0.1640,0.1746,0.1835,0.2048,0.1674,0.2767,0.3104,0.3399,0.4441,0.5046,0.2814,0.1681,0.2633,0.3198,0.1933,0.0934,0.0443,0.0780,0.0722,0.0405,0.0553,0.1081,0.1139,0.0767,0.0265,0.0215,0.0331,0.0111,0.0088,0.0158,0.0122,0.0038,0.0101,0.0228,0.0124,M 104 | 0.0162,0.0253,0.0262,0.0386,0.0645,0.0472,0.1056,0.1388,0.0598,0.1334,0.2969,0.4754,0.5677,0.5690,0.6421,0.7487,0.8999,1.0000,0.9690,0.9032,0.7685,0.6998,0.6644,0.5964,0.3711,0.0921,0.0481,0.0876,0.1040,0.1714,0.3264,0.4612,0.3939,0.5050,0.4833,0.3511,0.2319,0.4029,0.3676,0.1510,0.0745,0.1395,0.1552,0.0377,0.0636,0.0443,0.0264,0.0223,0.0187,0.0077,0.0137,0.0071,0.0082,0.0232,0.0198,0.0074,0.0035,0.0100,0.0048,0.0019,M 105 | 0.0307,0.0523,0.0653,0.0521,0.0611,0.0577,0.0665,0.0664,0.1460,0.2792,0.3877,0.4992,0.4981,0.4972,0.5607,0.7339,0.8230,0.9173,0.9975,0.9911,0.8240,0.6498,0.5980,0.4862,0.3150,0.1543,0.0989,0.0284,0.1008,0.2636,0.2694,0.2930,0.2925,0.3998,0.3660,0.3172,0.4609,0.4374,0.1820,0.3376,0.6202,0.4448,0.1863,0.1420,0.0589,0.0576,0.0672,0.0269,0.0245,0.0190,0.0063,0.0321,0.0189,0.0137,0.0277,0.0152,0.0052,0.0121,0.0124,0.0055,M 106 | 0.0116,0.0179,0.0449,0.1096,0.1913,0.0924,0.0761,0.1092,0.0757,0.1006,0.2500,0.3988,0.3809,0.4753,0.6165,0.6464,0.8024,0.9208,0.9832,0.9634,0.8646,0.8325,0.8276,0.8007,0.6102,0.4853,0.4355,0.4307,0.4399,0.3833,0.3032,0.3035,0.3197,0.2292,0.2131,0.2347,0.3201,0.4455,0.3655,0.2715,0.1747,0.1781,0.2199,0.1056,0.0573,0.0307,0.0237,0.0470,0.0102,0.0057,0.0031,0.0163,0.0099,0.0084,0.0270,0.0277,0.0097,0.0054,0.0148,0.0092,M 107 | 0.0331,0.0423,0.0474,0.0818,0.0835,0.0756,0.0374,0.0961,0.0548,0.0193,0.0897,0.1734,0.1936,0.2803,0.3313,0.5020,0.6360,0.7096,0.8333,0.8730,0.8073,0.7507,0.7526,0.7298,0.6177,0.4946,0.4531,0.4099,0.4540,0.4124,0.3139,0.3194,0.3692,0.3776,0.4469,0.4777,0.4716,0.4664,0.3893,0.4255,0.4064,0.3712,0.3863,0.2802,0.1283,0.1117,0.1303,0.0787,0.0436,0.0224,0.0133,0.0078,0.0174,0.0176,0.0038,0.0129,0.0066,0.0044,0.0134,0.0092,M 108 | 0.0428,0.0555,0.0708,0.0618,0.1215,0.1524,0.1543,0.0391,0.0610,0.0113,0.1255,0.2473,0.3011,0.3747,0.4520,0.5392,0.6588,0.7113,0.7602,0.8672,0.8416,0.7974,0.8385,0.9317,0.8555,0.6162,0.4139,0.3269,0.3108,0.2554,0.3367,0.4465,0.5000,0.5111,0.5194,0.4619,0.4234,0.4372,0.4277,0.4433,0.3700,0.3324,0.2564,0.2527,0.2137,0.1789,0.1010,0.0528,0.0453,0.0118,0.0009,0.0142,0.0179,0.0079,0.0060,0.0131,0.0089,0.0084,0.0113,0.0049,M 109 | 0.0599,0.0474,0.0498,0.0387,0.1026,0.0773,0.0853,0.0447,0.1094,0.0351,0.1582,0.2023,0.2268,0.2829,0.3819,0.4665,0.6687,0.8647,0.9361,0.9367,0.9144,0.9162,0.9311,0.8604,0.7327,0.5763,0.4162,0.4113,0.4146,0.3149,0.2936,0.3169,0.3149,0.4132,0.3994,0.4195,0.4532,0.4419,0.4737,0.3431,0.3194,0.3370,0.2493,0.2650,0.1748,0.0932,0.0530,0.0081,0.0342,0.0137,0.0028,0.0013,0.0005,0.0227,0.0209,0.0081,0.0117,0.0114,0.0112,0.0100,M 110 | 0.0264,0.0071,0.0342,0.0793,0.1043,0.0783,0.1417,0.1176,0.0453,0.0945,0.1132,0.0840,0.0717,0.1968,0.2633,0.4191,0.5050,0.6711,0.7922,0.8381,0.8759,0.9422,1.0000,0.9931,0.9575,0.8647,0.7215,0.5801,0.4964,0.4886,0.4079,0.2443,0.1768,0.2472,0.3518,0.3762,0.2909,0.2311,0.3168,0.3554,0.3741,0.4443,0.3261,0.1963,0.0864,0.1688,0.1991,0.1217,0.0628,0.0323,0.0253,0.0214,0.0262,0.0177,0.0037,0.0068,0.0121,0.0077,0.0078,0.0066,M 111 | 0.0210,0.0121,0.0203,0.1036,0.1675,0.0418,0.0723,0.0828,0.0494,0.0686,0.1125,0.1741,0.2710,0.3087,0.3575,0.4998,0.6011,0.6470,0.8067,0.9008,0.8906,0.9338,1.0000,0.9102,0.8496,0.7867,0.7688,0.7718,0.6268,0.4301,0.2077,0.1198,0.1660,0.2618,0.3862,0.3958,0.3248,0.2302,0.3250,0.4022,0.4344,0.4008,0.3370,0.2518,0.2101,0.1181,0.1150,0.0550,0.0293,0.0183,0.0104,0.0117,0.0101,0.0061,0.0031,0.0099,0.0080,0.0107,0.0161,0.0133,M 112 | 0.0530,0.0885,0.1997,0.2604,0.3225,0.2247,0.0617,0.2287,0.0950,0.0740,0.1610,0.2226,0.2703,0.3365,0.4266,0.4144,0.5655,0.6921,0.8547,0.9234,0.9171,1.0000,0.9532,0.9101,0.8337,0.7053,0.6534,0.4483,0.2460,0.2020,0.1446,0.0994,0.1510,0.2392,0.4434,0.5023,0.4441,0.4571,0.3927,0.2900,0.3408,0.4990,0.3632,0.1387,0.1800,0.1299,0.0523,0.0817,0.0469,0.0114,0.0299,0.0244,0.0199,0.0257,0.0082,0.0151,0.0171,0.0146,0.0134,0.0056,M 113 | 0.0454,0.0472,0.0697,0.1021,0.1397,0.1493,0.1487,0.0771,0.1171,0.1675,0.2799,0.3323,0.4012,0.4296,0.5350,0.5411,0.6870,0.8045,0.9194,0.9169,1.0000,0.9972,0.9093,0.7918,0.6705,0.5324,0.3572,0.2484,0.3161,0.3775,0.3138,0.1713,0.2937,0.5234,0.5926,0.5437,0.4516,0.3379,0.3215,0.2178,0.1674,0.2634,0.2980,0.2037,0.1155,0.0919,0.0882,0.0228,0.0380,0.0142,0.0137,0.0120,0.0042,0.0238,0.0129,0.0084,0.0218,0.0321,0.0154,0.0053,M 114 | 0.0283,0.0599,0.0656,0.0229,0.0839,0.1673,0.1154,0.1098,0.1370,0.1767,0.1995,0.2869,0.3275,0.3769,0.4169,0.5036,0.6180,0.8025,0.9333,0.9399,0.9275,0.9450,0.8328,0.7773,0.7007,0.6154,0.5810,0.4454,0.3707,0.2891,0.2185,0.1711,0.3578,0.3947,0.2867,0.2401,0.3619,0.3314,0.3763,0.4767,0.4059,0.3661,0.2320,0.1450,0.1017,0.1111,0.0655,0.0271,0.0244,0.0179,0.0109,0.0147,0.0170,0.0158,0.0046,0.0073,0.0054,0.0033,0.0045,0.0079,M 115 | 0.0114,0.0222,0.0269,0.0384,0.1217,0.2062,0.1489,0.0929,0.1350,0.1799,0.2486,0.2973,0.3672,0.4394,0.5258,0.6755,0.7402,0.8284,0.9033,0.9584,1.0000,0.9982,0.8899,0.7493,0.6367,0.6744,0.7207,0.6821,0.5512,0.4789,0.3924,0.2533,0.1089,0.1390,0.2551,0.3301,0.2818,0.2142,0.2266,0.2142,0.2354,0.2871,0.2596,0.1925,0.1256,0.1003,0.0951,0.1210,0.0728,0.0174,0.0213,0.0269,0.0152,0.0257,0.0097,0.0041,0.0050,0.0145,0.0103,0.0025,M 116 | 0.0414,0.0436,0.0447,0.0844,0.0419,0.1215,0.2002,0.1516,0.0818,0.1975,0.2309,0.3025,0.3938,0.5050,0.5872,0.6610,0.7417,0.8006,0.8456,0.7939,0.8804,0.8384,0.7852,0.8479,0.7434,0.6433,0.5514,0.3519,0.3168,0.3346,0.2056,0.1032,0.3168,0.4040,0.4282,0.4538,0.3704,0.3741,0.3839,0.3494,0.4380,0.4265,0.2854,0.2808,0.2395,0.0369,0.0805,0.0541,0.0177,0.0065,0.0222,0.0045,0.0136,0.0113,0.0053,0.0165,0.0141,0.0077,0.0246,0.0198,M 117 | 0.0094,0.0333,0.0306,0.0376,0.1296,0.1795,0.1909,0.1692,0.1870,0.1725,0.2228,0.3106,0.4144,0.5157,0.5369,0.5107,0.6441,0.7326,0.8164,0.8856,0.9891,1.0000,0.8750,0.8631,0.9074,0.8674,0.7750,0.6600,0.5615,0.4016,0.2331,0.1164,0.1095,0.0431,0.0619,0.1956,0.2120,0.3242,0.4102,0.2939,0.1911,0.1702,0.1010,0.1512,0.1427,0.1097,0.1173,0.0972,0.0703,0.0281,0.0216,0.0153,0.0112,0.0241,0.0164,0.0055,0.0078,0.0055,0.0091,0.0067,M 118 | 0.0228,0.0106,0.0130,0.0842,0.1117,0.1506,0.1776,0.0997,0.1428,0.2227,0.2621,0.3109,0.2859,0.3316,0.3755,0.4499,0.4765,0.6254,0.7304,0.8702,0.9349,0.9614,0.9126,0.9443,1.0000,0.9455,0.8815,0.7520,0.7068,0.5986,0.3857,0.2510,0.2162,0.0968,0.1323,0.1344,0.2250,0.3244,0.3939,0.3806,0.3258,0.3654,0.2983,0.1779,0.1535,0.1199,0.0959,0.0765,0.0649,0.0313,0.0185,0.0098,0.0178,0.0077,0.0074,0.0095,0.0055,0.0045,0.0063,0.0039,M 119 | 0.0363,0.0478,0.0298,0.0210,0.1409,0.1916,0.1349,0.1613,0.1703,0.1444,0.1989,0.2154,0.2863,0.3570,0.3980,0.4359,0.5334,0.6304,0.6995,0.7435,0.8379,0.8641,0.9014,0.9432,0.9536,1.0000,0.9547,0.9745,0.8962,0.7196,0.5462,0.3156,0.2525,0.1969,0.2189,0.1533,0.0711,0.1498,0.1755,0.2276,0.1322,0.1056,0.1973,0.1692,0.1881,0.1177,0.0779,0.0495,0.0492,0.0194,0.0250,0.0115,0.0190,0.0055,0.0096,0.0050,0.0066,0.0114,0.0073,0.0033,M 120 | 0.0261,0.0266,0.0223,0.0749,0.1364,0.1513,0.1316,0.1654,0.1864,0.2013,0.2890,0.3650,0.3510,0.3495,0.4325,0.5398,0.6237,0.6876,0.7329,0.8107,0.8396,0.8632,0.8747,0.9607,0.9716,0.9121,0.8576,0.8798,0.7720,0.5711,0.4264,0.2860,0.3114,0.2066,0.1165,0.0185,0.1302,0.2480,0.1637,0.1103,0.2144,0.2033,0.1887,0.1370,0.1376,0.0307,0.0373,0.0606,0.0399,0.0169,0.0135,0.0222,0.0175,0.0127,0.0022,0.0124,0.0054,0.0021,0.0028,0.0023,M 121 | 0.0346,0.0509,0.0079,0.0243,0.0432,0.0735,0.0938,0.1134,0.1228,0.1508,0.1809,0.2390,0.2947,0.2866,0.4010,0.5325,0.5486,0.5823,0.6041,0.6749,0.7084,0.7890,0.9284,0.9781,0.9738,1.0000,0.9702,0.9956,0.8235,0.6020,0.5342,0.4867,0.3526,0.1566,0.0946,0.1613,0.2824,0.3390,0.3019,0.2945,0.2978,0.2676,0.2055,0.2069,0.1625,0.1216,0.1013,0.0744,0.0386,0.0050,0.0146,0.0040,0.0122,0.0107,0.0112,0.0102,0.0052,0.0024,0.0079,0.0031,M 122 | 0.0162,0.0041,0.0239,0.0441,0.0630,0.0921,0.1368,0.1078,0.1552,0.1779,0.2164,0.2568,0.3089,0.3829,0.4393,0.5335,0.5996,0.6728,0.7309,0.8092,0.8941,0.9668,1.0000,0.9893,0.9376,0.8991,0.9184,0.9128,0.7811,0.6018,0.3765,0.3300,0.2280,0.0212,0.1117,0.1788,0.2373,0.2843,0.2241,0.2715,0.3363,0.2546,0.1867,0.2160,0.1278,0.0768,0.1070,0.0946,0.0636,0.0227,0.0128,0.0173,0.0135,0.0114,0.0062,0.0157,0.0088,0.0036,0.0053,0.0030,M 123 | 0.0249,0.0119,0.0277,0.0760,0.1218,0.1538,0.1192,0.1229,0.2119,0.2531,0.2855,0.2961,0.3341,0.4287,0.5205,0.6087,0.7236,0.7577,0.7726,0.8098,0.8995,0.9247,0.9365,0.9853,0.9776,1.0000,0.9896,0.9076,0.7306,0.5758,0.4469,0.3719,0.2079,0.0955,0.0488,0.1406,0.2554,0.2054,0.1614,0.2232,0.1773,0.2293,0.2521,0.1464,0.0673,0.0965,0.1492,0.1128,0.0463,0.0193,0.0140,0.0027,0.0068,0.0150,0.0012,0.0133,0.0048,0.0244,0.0077,0.0074,M 124 | 0.0270,0.0163,0.0341,0.0247,0.0822,0.1256,0.1323,0.1584,0.2017,0.2122,0.2210,0.2399,0.2964,0.4061,0.5095,0.5512,0.6613,0.6804,0.6520,0.6788,0.7811,0.8369,0.8969,0.9856,1.0000,0.9395,0.8917,0.8105,0.6828,0.5572,0.4301,0.3339,0.2035,0.0798,0.0809,0.1525,0.2626,0.2456,0.1980,0.2412,0.2409,0.1901,0.2077,0.1767,0.1119,0.0779,0.1344,0.0960,0.0598,0.0330,0.0197,0.0189,0.0204,0.0085,0.0043,0.0092,0.0138,0.0094,0.0105,0.0093,M 125 | 0.0388,0.0324,0.0688,0.0898,0.1267,0.1515,0.2134,0.2613,0.2832,0.2718,0.3645,0.3934,0.3843,0.4677,0.5364,0.4823,0.4835,0.5862,0.7579,0.6997,0.6918,0.8633,0.9107,0.9346,0.7884,0.8585,0.9261,0.7080,0.5779,0.5215,0.4505,0.3129,0.1448,0.1046,0.1820,0.1519,0.1017,0.1438,0.1986,0.2039,0.2778,0.2879,0.1331,0.1140,0.1310,0.1433,0.0624,0.0100,0.0098,0.0131,0.0152,0.0255,0.0071,0.0263,0.0079,0.0111,0.0107,0.0068,0.0097,0.0067,M 126 | 0.0228,0.0853,0.1000,0.0428,0.1117,0.1651,0.1597,0.2116,0.3295,0.3517,0.3330,0.3643,0.4020,0.4731,0.5196,0.6573,0.8426,0.8476,0.8344,0.8453,0.7999,0.8537,0.9642,1.0000,0.9357,0.9409,0.9070,0.7104,0.6320,0.5667,0.3501,0.2447,0.1698,0.3290,0.3674,0.2331,0.2413,0.2556,0.1892,0.1940,0.3074,0.2785,0.0308,0.1238,0.1854,0.1753,0.1079,0.0728,0.0242,0.0191,0.0159,0.0172,0.0191,0.0260,0.0140,0.0125,0.0116,0.0093,0.0012,0.0036,M 127 | 0.0715,0.0849,0.0587,0.0218,0.0862,0.1801,0.1916,0.1896,0.2960,0.4186,0.4867,0.5249,0.5959,0.6855,0.8573,0.9718,0.8693,0.8711,0.8954,0.9922,0.8980,0.8158,0.8373,0.7541,0.5893,0.5488,0.5643,0.5406,0.4783,0.4439,0.3698,0.2574,0.1478,0.1743,0.1229,0.1588,0.1803,0.1436,0.1667,0.2630,0.2234,0.1239,0.0869,0.2092,0.1499,0.0676,0.0899,0.0927,0.0658,0.0086,0.0216,0.0153,0.0121,0.0096,0.0196,0.0042,0.0066,0.0099,0.0083,0.0124,M 128 | 0.0209,0.0261,0.0120,0.0768,0.1064,0.1680,0.3016,0.3460,0.3314,0.4125,0.3943,0.1334,0.4622,0.9970,0.9137,0.8292,0.6994,0.7825,0.8789,0.8501,0.8920,0.9473,1.0000,0.8975,0.7806,0.8321,0.6502,0.4548,0.4732,0.3391,0.2747,0.0978,0.0477,0.1403,0.1834,0.2148,0.1271,0.1912,0.3391,0.3444,0.2369,0.1195,0.2665,0.2587,0.1393,0.1083,0.1383,0.1321,0.1069,0.0325,0.0316,0.0057,0.0159,0.0085,0.0372,0.0101,0.0127,0.0288,0.0129,0.0023,M 129 | 0.0374,0.0586,0.0628,0.0534,0.0255,0.1422,0.2072,0.2734,0.3070,0.2597,0.3483,0.3999,0.4574,0.5950,0.7924,0.8272,0.8087,0.8977,0.9828,0.8982,0.8890,0.9367,0.9122,0.7936,0.6718,0.6318,0.4865,0.3388,0.4832,0.3822,0.3075,0.1267,0.0743,0.1510,0.1906,0.1817,0.1709,0.0946,0.2829,0.3006,0.1602,0.1483,0.2875,0.2047,0.1064,0.1395,0.1065,0.0527,0.0395,0.0183,0.0353,0.0118,0.0063,0.0237,0.0032,0.0087,0.0124,0.0113,0.0098,0.0126,M 130 | 0.1371,0.1226,0.1385,0.1484,0.1776,0.1428,0.1773,0.2161,0.1630,0.2067,0.4257,0.5484,0.7131,0.7003,0.6777,0.7939,0.9382,0.8925,0.9146,0.7832,0.7960,0.7983,0.7716,0.6615,0.4860,0.5572,0.4697,0.5640,0.4517,0.3369,0.2684,0.2339,0.3052,0.3016,0.2753,0.1041,0.1757,0.3156,0.3603,0.2736,0.1301,0.2458,0.3404,0.1753,0.0679,0.1062,0.0643,0.0532,0.0531,0.0272,0.0171,0.0118,0.0129,0.0344,0.0065,0.0067,0.0022,0.0079,0.0146,0.0051,M 131 | 0.0443,0.0446,0.0235,0.1008,0.2252,0.2611,0.2061,0.1668,0.1801,0.3083,0.3794,0.5364,0.6173,0.7842,0.8392,0.9016,1.0000,0.8911,0.8753,0.7886,0.7156,0.7581,0.6372,0.3210,0.2076,0.2279,0.3309,0.2847,0.1949,0.1671,0.1025,0.1362,0.2212,0.1124,0.1677,0.1039,0.2562,0.2624,0.2236,0.1180,0.1103,0.2831,0.2385,0.0255,0.1967,0.1483,0.0434,0.0627,0.0513,0.0473,0.0248,0.0274,0.0205,0.0141,0.0185,0.0055,0.0045,0.0115,0.0152,0.0100,M 132 | 0.1150,0.1163,0.0866,0.0358,0.0232,0.1267,0.2417,0.2661,0.4346,0.5378,0.3816,0.0991,0.0616,0.1795,0.3907,0.3602,0.3041,0.2428,0.4060,0.8395,0.9777,0.4680,0.0610,0.2143,0.1348,0.2854,0.1617,0.2649,0.4565,0.6502,0.2848,0.3296,0.5370,0.6627,0.8626,0.8547,0.7848,0.9016,0.8827,0.6086,0.2810,0.0906,0.1177,0.2694,0.5214,0.4232,0.2340,0.1928,0.1092,0.0507,0.0228,0.0099,0.0065,0.0085,0.0166,0.0110,0.0190,0.0141,0.0068,0.0086,M 133 | 0.0968,0.0821,0.0629,0.0608,0.0617,0.1207,0.0944,0.4223,0.5744,0.5025,0.3488,0.1700,0.2076,0.3087,0.4224,0.5312,0.2436,0.1884,0.1908,0.8321,1.0000,0.4076,0.0960,0.1928,0.2419,0.3790,0.2893,0.3451,0.3777,0.5213,0.2316,0.3335,0.4781,0.6116,0.6705,0.7375,0.7356,0.7792,0.6788,0.5259,0.2762,0.1545,0.2019,0.2231,0.4221,0.3067,0.1329,0.1349,0.1057,0.0499,0.0206,0.0073,0.0081,0.0303,0.0190,0.0212,0.0126,0.0201,0.0210,0.0041,M 134 | 0.0790,0.0707,0.0352,0.1660,0.1330,0.0226,0.0771,0.2678,0.5664,0.6609,0.5002,0.2583,0.1650,0.4347,0.4515,0.4579,0.3366,0.4000,0.5325,0.9010,0.9939,0.3689,0.1012,0.0248,0.2318,0.3981,0.2259,0.5247,0.6898,0.8316,0.4326,0.3741,0.5756,0.8043,0.7963,0.7174,0.7056,0.8148,0.7601,0.6034,0.4554,0.4729,0.4478,0.3722,0.4693,0.3839,0.0768,0.1467,0.0777,0.0469,0.0193,0.0298,0.0390,0.0294,0.0175,0.0249,0.0141,0.0073,0.0025,0.0101,M 135 | 0.1083,0.1070,0.0257,0.0837,0.0748,0.1125,0.3322,0.4590,0.5526,0.5966,0.5304,0.2251,0.2402,0.2689,0.6646,0.6632,0.1674,0.0837,0.4331,0.8718,0.7992,0.3712,0.1703,0.1611,0.2086,0.2847,0.2211,0.6134,0.5807,0.6925,0.3825,0.4303,0.7791,0.8703,1.0000,0.9212,0.9386,0.9303,0.7314,0.4791,0.2087,0.2016,0.1669,0.2872,0.4374,0.3097,0.1578,0.0553,0.0334,0.0209,0.0172,0.0180,0.0110,0.0234,0.0276,0.0032,0.0084,0.0122,0.0082,0.0143,M 136 | 0.0094,0.0611,0.1136,0.1203,0.0403,0.1227,0.2495,0.4566,0.6587,0.5079,0.3350,0.0834,0.3004,0.3957,0.3769,0.3828,0.1247,0.1363,0.2678,0.9188,0.9779,0.3236,0.1944,0.1874,0.0885,0.3443,0.2953,0.5908,0.4564,0.7334,0.1969,0.2790,0.6212,0.8681,0.8621,0.9380,0.8327,0.9480,0.6721,0.4436,0.5163,0.3809,0.1557,0.1449,0.2662,0.1806,0.1699,0.2559,0.1129,0.0201,0.0480,0.0234,0.0175,0.0352,0.0158,0.0326,0.0201,0.0168,0.0245,0.0154,M 137 | 0.1088,0.1278,0.0926,0.1234,0.1276,0.1731,0.1948,0.4262,0.6828,0.5761,0.4733,0.2362,0.1023,0.2904,0.4713,0.4659,0.1415,0.0849,0.3257,0.9007,0.9312,0.4856,0.1346,0.1604,0.2737,0.5609,0.3654,0.6139,0.5470,0.8474,0.5638,0.5443,0.5086,0.6253,0.8497,0.8406,0.8420,0.9136,0.7713,0.4882,0.3724,0.4469,0.4586,0.4491,0.5616,0.4305,0.0945,0.0794,0.0274,0.0154,0.0140,0.0455,0.0213,0.0082,0.0124,0.0167,0.0103,0.0205,0.0178,0.0187,M 138 | 0.0430,0.0902,0.0833,0.0813,0.0165,0.0277,0.0569,0.2057,0.3887,0.7106,0.7342,0.5033,0.3000,0.1951,0.2767,0.3737,0.2507,0.2507,0.3292,0.4871,0.6527,0.8454,0.9739,1.0000,0.6665,0.5323,0.4024,0.3444,0.4239,0.4182,0.4393,0.1162,0.4336,0.6553,0.6172,0.4373,0.4118,0.3641,0.4572,0.4367,0.2964,0.4312,0.4155,0.1824,0.1487,0.0138,0.1164,0.2052,0.1069,0.0199,0.0208,0.0176,0.0197,0.0210,0.0141,0.0049,0.0027,0.0162,0.0059,0.0021,M 139 | 0.0731,0.1249,0.1665,0.1496,0.1443,0.2770,0.2555,0.1712,0.0466,0.1114,0.1739,0.3160,0.3249,0.2164,0.2031,0.2580,0.1796,0.2422,0.3609,0.1810,0.2604,0.6572,0.9734,0.9757,0.8079,0.6521,0.4915,0.5363,0.7649,0.5250,0.5101,0.4219,0.4160,0.1906,0.0223,0.4219,0.5496,0.2483,0.2034,0.2729,0.2837,0.4463,0.3178,0.0807,0.1192,0.2134,0.3241,0.2945,0.1474,0.0211,0.0361,0.0444,0.0230,0.0290,0.0141,0.0161,0.0177,0.0194,0.0207,0.0057,M 140 | 0.0164,0.0627,0.0738,0.0608,0.0233,0.1048,0.1338,0.0644,0.1522,0.0780,0.1791,0.2681,0.1788,0.1039,0.1980,0.3234,0.3748,0.2586,0.3680,0.3508,0.5606,0.5231,0.5469,0.6954,0.6352,0.6757,0.8499,0.8025,0.6563,0.8591,0.6655,0.5369,0.3118,0.3763,0.2801,0.0875,0.3319,0.4237,0.1801,0.3743,0.4627,0.1614,0.2494,0.3202,0.2265,0.1146,0.0476,0.0943,0.0824,0.0171,0.0244,0.0258,0.0143,0.0226,0.0187,0.0185,0.0110,0.0094,0.0078,0.0112,M 141 | 0.0412,0.1135,0.0518,0.0232,0.0646,0.1124,0.1787,0.2407,0.2682,0.2058,0.1546,0.2671,0.3141,0.2904,0.3531,0.5079,0.4639,0.1859,0.4474,0.4079,0.5400,0.4786,0.4332,0.6113,0.5091,0.4606,0.7243,0.8987,0.8826,0.9201,0.8005,0.6033,0.2120,0.2866,0.4033,0.2803,0.3087,0.3550,0.2545,0.1432,0.5869,0.6431,0.5826,0.4286,0.4894,0.5777,0.4315,0.2640,0.1794,0.0772,0.0798,0.0376,0.0143,0.0272,0.0127,0.0166,0.0095,0.0225,0.0098,0.0085,M 142 | 0.0707,0.1252,0.1447,0.1644,0.1693,0.0844,0.0715,0.0947,0.1583,0.1247,0.2340,0.1764,0.2284,0.3115,0.4725,0.5543,0.5386,0.3746,0.4583,0.5961,0.7464,0.7644,0.5711,0.6257,0.6695,0.7131,0.7567,0.8077,0.8477,0.9289,0.9513,0.7995,0.4362,0.4048,0.4952,0.1712,0.3652,0.3763,0.2841,0.0427,0.5331,0.6952,0.4288,0.3063,0.5835,0.5692,0.2630,0.1196,0.0983,0.0374,0.0291,0.0156,0.0197,0.0135,0.0127,0.0138,0.0133,0.0131,0.0154,0.0218,M 143 | 0.0526,0.0563,0.1219,0.1206,0.0246,0.1022,0.0539,0.0439,0.2291,0.1632,0.2544,0.2807,0.3011,0.3361,0.3024,0.2285,0.2910,0.1316,0.1151,0.3404,0.5562,0.6379,0.6553,0.7384,0.6534,0.5423,0.6877,0.7325,0.7726,0.8229,0.8787,0.9108,0.6705,0.6092,0.7505,0.4775,0.1666,0.3749,0.3776,0.2106,0.5886,0.5628,0.2577,0.5245,0.6149,0.5123,0.3385,0.1499,0.0546,0.0270,0.0380,0.0339,0.0149,0.0335,0.0376,0.0174,0.0132,0.0103,0.0364,0.0208,M 144 | 0.0516,0.0944,0.0622,0.0415,0.0995,0.2431,0.1777,0.2018,0.2611,0.1294,0.2646,0.2778,0.4432,0.3672,0.2035,0.2764,0.3252,0.1536,0.2784,0.3508,0.5187,0.7052,0.7143,0.6814,0.5100,0.5308,0.6131,0.8388,0.9031,0.8607,0.9656,0.9168,0.7132,0.6898,0.7310,0.4134,0.1580,0.1819,0.1381,0.2960,0.6935,0.8246,0.5351,0.4403,0.6448,0.6214,0.3016,0.1379,0.0364,0.0355,0.0456,0.0432,0.0274,0.0152,0.0120,0.0129,0.0020,0.0109,0.0074,0.0078,M 145 | 0.0299,0.0688,0.0992,0.1021,0.0800,0.0629,0.0130,0.0813,0.1761,0.0998,0.0523,0.0904,0.2655,0.3099,0.3520,0.3892,0.3962,0.2449,0.2355,0.3045,0.3112,0.4698,0.5534,0.4532,0.4464,0.4670,0.4621,0.6988,0.7626,0.7025,0.7382,0.7446,0.7927,0.5227,0.3967,0.3042,0.1309,0.2408,0.1780,0.1598,0.5657,0.6443,0.4241,0.4567,0.5760,0.5293,0.3287,0.1283,0.0698,0.0334,0.0342,0.0459,0.0277,0.0172,0.0087,0.0046,0.0203,0.0130,0.0115,0.0015,M 146 | 0.0721,0.1574,0.1112,0.1085,0.0666,0.1800,0.1108,0.2794,0.1408,0.0795,0.2534,0.3920,0.3375,0.1610,0.1889,0.3308,0.2282,0.2177,0.1853,0.5167,0.5342,0.6298,0.8437,0.6756,0.5825,0.6141,0.8809,0.8375,0.3869,0.5051,0.5455,0.4241,0.1534,0.4950,0.6983,0.7109,0.5647,0.4870,0.5515,0.4433,0.5250,0.6075,0.5251,0.1359,0.4268,0.4442,0.2193,0.0900,0.1200,0.0628,0.0234,0.0309,0.0127,0.0082,0.0281,0.0117,0.0092,0.0147,0.0157,0.0129,M 147 | 0.1021,0.0830,0.0577,0.0627,0.0635,0.1328,0.0988,0.1787,0.1199,0.1369,0.2509,0.2631,0.2796,0.2977,0.3823,0.3129,0.3956,0.2093,0.3218,0.3345,0.3184,0.2887,0.3610,0.2566,0.4106,0.4591,0.4722,0.7278,0.7591,0.6579,0.7514,0.6666,0.4903,0.5962,0.6552,0.4014,0.1188,0.3245,0.3107,0.1354,0.5109,0.7988,0.7517,0.5508,0.5858,0.7292,0.5522,0.3339,0.1608,0.0475,0.1004,0.0709,0.0317,0.0309,0.0252,0.0087,0.0177,0.0214,0.0227,0.0106,M 148 | 0.0654,0.0649,0.0737,0.1132,0.2482,0.1257,0.1797,0.0989,0.2460,0.3422,0.2128,0.1377,0.4032,0.5684,0.2398,0.4331,0.5954,0.5772,0.8176,0.8835,0.5248,0.6373,0.8375,0.6699,0.7756,0.8750,0.8300,0.6896,0.3372,0.6405,0.7138,0.8202,0.6657,0.5254,0.2960,0.0704,0.0970,0.3941,0.6028,0.3521,0.3924,0.4808,0.4602,0.4164,0.5438,0.5649,0.3195,0.2484,0.1299,0.0825,0.0243,0.0210,0.0361,0.0239,0.0447,0.0394,0.0355,0.0440,0.0243,0.0098,M 149 | 0.0712,0.0901,0.1276,0.1497,0.1284,0.1165,0.1285,0.1684,0.1830,0.2127,0.2891,0.3985,0.4576,0.5821,0.5027,0.1930,0.2579,0.3177,0.2745,0.6186,0.8958,0.7442,0.5188,0.2811,0.1773,0.6607,0.7576,0.5122,0.4701,0.5479,0.4347,0.1276,0.0846,0.0927,0.0313,0.0998,0.1781,0.1586,0.3001,0.2208,0.1455,0.2895,0.3203,0.1414,0.0629,0.0734,0.0805,0.0608,0.0565,0.0286,0.0154,0.0154,0.0156,0.0054,0.0030,0.0048,0.0087,0.0101,0.0095,0.0068,M 150 | 0.0207,0.0535,0.0334,0.0818,0.0740,0.0324,0.0918,0.1070,0.1553,0.1234,0.1796,0.1787,0.1247,0.2577,0.3370,0.3990,0.1647,0.2266,0.3219,0.5356,0.8159,1.0000,0.8701,0.6889,0.6299,0.5738,0.5707,0.5976,0.4301,0.2058,0.1000,0.2247,0.2308,0.3977,0.3317,0.1726,0.1429,0.2168,0.1967,0.2140,0.3674,0.2023,0.0778,0.0925,0.2388,0.3400,0.2594,0.1102,0.0911,0.0462,0.0171,0.0033,0.0050,0.0190,0.0103,0.0121,0.0042,0.0090,0.0070,0.0099,M 151 | 0.0209,0.0278,0.0115,0.0445,0.0427,0.0766,0.1458,0.1430,0.1894,0.1853,0.1748,0.1556,0.1476,0.1378,0.2584,0.3827,0.4784,0.5360,0.6192,0.7912,0.9264,1.0000,0.9080,0.7435,0.5557,0.3172,0.1295,0.0598,0.2722,0.3616,0.3293,0.4855,0.3936,0.1845,0.0342,0.2489,0.3837,0.3514,0.2654,0.1760,0.1599,0.0866,0.0590,0.0813,0.0492,0.0417,0.0495,0.0367,0.0115,0.0118,0.0133,0.0096,0.0014,0.0049,0.0039,0.0029,0.0078,0.0047,0.0021,0.0011,M 152 | 0.0231,0.0315,0.0170,0.0226,0.0410,0.0116,0.0223,0.0805,0.2365,0.2461,0.2245,0.1520,0.1732,0.3099,0.4380,0.5595,0.6820,0.6164,0.6803,0.8435,0.9921,1.0000,0.7983,0.5426,0.3952,0.5179,0.5650,0.3042,0.1881,0.3960,0.2286,0.3544,0.4187,0.2398,0.1847,0.3760,0.4331,0.3626,0.2519,0.1870,0.1046,0.2339,0.1991,0.1100,0.0684,0.0303,0.0674,0.0785,0.0455,0.0246,0.0151,0.0125,0.0036,0.0123,0.0043,0.0114,0.0052,0.0091,0.0008,0.0092,M 153 | 0.0131,0.0201,0.0045,0.0217,0.0230,0.0481,0.0742,0.0333,0.1369,0.2079,0.2295,0.1990,0.1184,0.1891,0.2949,0.5343,0.6850,0.7923,0.8220,0.7290,0.7352,0.7918,0.8057,0.4898,0.1934,0.2924,0.6255,0.8546,0.8966,0.7821,0.5168,0.4840,0.4038,0.3411,0.2849,0.2353,0.2699,0.4442,0.4323,0.3314,0.1195,0.1669,0.3702,0.3072,0.0945,0.1545,0.1394,0.0772,0.0615,0.0230,0.0111,0.0168,0.0086,0.0045,0.0062,0.0065,0.0030,0.0066,0.0029,0.0053,M 154 | 0.0233,0.0394,0.0416,0.0547,0.0993,0.1515,0.1674,0.1513,0.1723,0.2078,0.1239,0.0236,0.1771,0.3115,0.4990,0.6707,0.7655,0.8485,0.9805,1.0000,1.0000,0.9992,0.9067,0.6803,0.5103,0.4716,0.4980,0.6196,0.7171,0.6316,0.3554,0.2897,0.4316,0.3791,0.2421,0.0944,0.0351,0.0844,0.0436,0.1130,0.2045,0.1937,0.0834,0.1502,0.1675,0.1058,0.1111,0.0849,0.0596,0.0201,0.0071,0.0104,0.0062,0.0026,0.0025,0.0061,0.0038,0.0101,0.0078,0.0006,M 155 | 0.0117,0.0069,0.0279,0.0583,0.0915,0.1267,0.1577,0.1927,0.2361,0.2169,0.1180,0.0754,0.2782,0.3758,0.5093,0.6592,0.7071,0.7532,0.8357,0.8593,0.9615,0.9838,0.8705,0.6403,0.5067,0.5395,0.6934,0.8487,0.8213,0.5962,0.2950,0.2758,0.2885,0.1893,0.1446,0.0955,0.0888,0.0836,0.0894,0.1547,0.2318,0.2225,0.1035,0.1721,0.2017,0.1787,0.1112,0.0398,0.0305,0.0084,0.0039,0.0053,0.0029,0.0020,0.0013,0.0029,0.0020,0.0062,0.0026,0.0052,M 156 | 0.0211,0.0128,0.0015,0.0450,0.0711,0.1563,0.1518,0.1206,0.1666,0.1345,0.0785,0.0367,0.1227,0.2614,0.4280,0.6122,0.7435,0.8130,0.9006,0.9603,0.9162,0.9140,0.7851,0.5134,0.3439,0.3290,0.2571,0.3685,0.5765,0.6190,0.4613,0.3615,0.4434,0.3864,0.3093,0.2138,0.1112,0.1386,0.1523,0.0996,0.1644,0.1902,0.1313,0.1776,0.2000,0.0765,0.0727,0.0749,0.0449,0.0134,0.0174,0.0117,0.0023,0.0047,0.0049,0.0031,0.0024,0.0039,0.0051,0.0015,M 157 | 0.0047,0.0059,0.0080,0.0554,0.0883,0.1278,0.1674,0.1373,0.2922,0.3469,0.3265,0.3263,0.2301,0.1253,0.2102,0.2401,0.1928,0.1673,0.1228,0.0902,0.1557,0.3291,0.5268,0.6740,0.7906,0.8938,0.9395,0.9493,0.9040,0.9151,0.8828,0.8086,0.7180,0.6720,0.6447,0.6879,0.6241,0.4936,0.4144,0.4240,0.4546,0.4392,0.4323,0.4921,0.4710,0.3196,0.2241,0.1806,0.0990,0.0251,0.0129,0.0095,0.0126,0.0069,0.0039,0.0068,0.0060,0.0045,0.0002,0.0029,M 158 | 0.0201,0.0178,0.0274,0.0232,0.0724,0.0833,0.1232,0.1298,0.2085,0.2720,0.2188,0.3037,0.2959,0.2059,0.0906,0.1610,0.1800,0.2180,0.2026,0.1506,0.0521,0.2143,0.4333,0.5943,0.6926,0.7576,0.8787,0.9060,0.8528,0.9087,0.9657,0.9306,0.7774,0.6643,0.6604,0.6884,0.6938,0.5932,0.5774,0.6223,0.5841,0.4527,0.4911,0.5762,0.5013,0.4042,0.3123,0.2232,0.1085,0.0414,0.0253,0.0131,0.0049,0.0104,0.0102,0.0092,0.0083,0.0020,0.0048,0.0036,M 159 | 0.0107,0.0453,0.0289,0.0713,0.1075,0.1019,0.1606,0.2119,0.3061,0.2936,0.3104,0.3431,0.2456,0.1887,0.1184,0.2080,0.2736,0.3274,0.2344,0.1260,0.0576,0.1241,0.3239,0.4357,0.5734,0.7825,0.9252,0.9349,0.9348,1.0000,0.9308,0.8478,0.7605,0.7040,0.7539,0.7990,0.7673,0.5955,0.4731,0.4840,0.4340,0.3954,0.4837,0.5379,0.4485,0.2674,0.1541,0.1359,0.0941,0.0261,0.0079,0.0164,0.0120,0.0113,0.0021,0.0097,0.0072,0.0060,0.0017,0.0036,M 160 | 0.0235,0.0220,0.0167,0.0516,0.0746,0.1121,0.1258,0.1717,0.3074,0.3199,0.2946,0.2484,0.2510,0.1806,0.1413,0.3019,0.3635,0.3887,0.2980,0.2219,0.1624,0.1343,0.2046,0.3791,0.5771,0.7545,0.8406,0.8547,0.9036,1.0000,0.9646,0.7912,0.6412,0.5986,0.6835,0.7771,0.8084,0.7426,0.6295,0.5708,0.4433,0.3361,0.3795,0.4950,0.4373,0.2404,0.1128,0.1654,0.0933,0.0225,0.0214,0.0221,0.0152,0.0083,0.0058,0.0023,0.0057,0.0052,0.0027,0.0021,M 161 | 0.0258,0.0433,0.0547,0.0681,0.0784,0.1250,0.1296,0.1729,0.2794,0.2954,0.2506,0.2601,0.2249,0.2115,0.1270,0.1193,0.1794,0.2185,0.1646,0.0740,0.0625,0.2381,0.4824,0.6372,0.7531,0.8959,0.9941,0.9957,0.9328,0.9344,0.8854,0.7690,0.6865,0.6390,0.6378,0.6629,0.5983,0.4565,0.3129,0.4158,0.4325,0.4031,0.4201,0.4557,0.3955,0.2966,0.2095,0.1558,0.0884,0.0265,0.0121,0.0091,0.0062,0.0019,0.0045,0.0079,0.0031,0.0063,0.0048,0.0050,M 162 | 0.0305,0.0363,0.0214,0.0227,0.0456,0.0665,0.0939,0.0972,0.2535,0.3127,0.2192,0.2621,0.2419,0.2179,0.1159,0.1237,0.0886,0.1755,0.1758,0.1540,0.0512,0.1805,0.4039,0.5697,0.6577,0.7474,0.8543,0.9085,0.8668,0.8892,0.9065,0.8522,0.7204,0.6200,0.6253,0.6848,0.7337,0.6281,0.5725,0.6119,0.5597,0.4965,0.5027,0.5772,0.5907,0.4803,0.3877,0.2779,0.1427,0.0424,0.0271,0.0200,0.0070,0.0070,0.0086,0.0089,0.0074,0.0042,0.0055,0.0021,M 163 | 0.0217,0.0152,0.0346,0.0346,0.0484,0.0526,0.0773,0.0862,0.1451,0.2110,0.2343,0.2087,0.1645,0.1689,0.1650,0.1967,0.2934,0.3709,0.4309,0.4161,0.5116,0.6501,0.7717,0.8491,0.9104,0.8912,0.8189,0.6779,0.5368,0.5207,0.5651,0.5749,0.5250,0.4255,0.3330,0.2331,0.1451,0.1648,0.2694,0.3730,0.4467,0.4133,0.3743,0.3021,0.2069,0.1790,0.1689,0.1341,0.0769,0.0222,0.0205,0.0123,0.0067,0.0011,0.0026,0.0049,0.0029,0.0022,0.0022,0.0032,M 164 | 0.0072,0.0027,0.0089,0.0061,0.0420,0.0865,0.1182,0.0999,0.1976,0.2318,0.2472,0.2880,0.2126,0.0708,0.1194,0.2808,0.4221,0.5279,0.5857,0.6153,0.6753,0.7873,0.8974,0.9828,1.0000,0.8460,0.6055,0.3036,0.0144,0.2526,0.4335,0.4918,0.5409,0.5961,0.5248,0.3777,0.2369,0.1720,0.1878,0.3250,0.2575,0.2423,0.2706,0.2323,0.1724,0.1457,0.1175,0.0868,0.0392,0.0131,0.0092,0.0078,0.0071,0.0081,0.0034,0.0064,0.0037,0.0036,0.0012,0.0037,M 165 | 0.0163,0.0198,0.0202,0.0386,0.0752,0.1444,0.1487,0.1484,0.2442,0.2822,0.3691,0.3750,0.3927,0.3308,0.1085,0.1139,0.3446,0.5441,0.6470,0.7276,0.7894,0.8264,0.8697,0.7836,0.7140,0.5698,0.2908,0.4636,0.6409,0.7405,0.8069,0.8420,1.0000,0.9536,0.6755,0.3905,0.1249,0.3629,0.6356,0.8116,0.7664,0.5417,0.2614,0.1723,0.2814,0.2764,0.1985,0.1502,0.1219,0.0493,0.0027,0.0077,0.0026,0.0031,0.0083,0.0020,0.0084,0.0108,0.0083,0.0033,M 166 | 0.0221,0.0065,0.0164,0.0487,0.0519,0.0849,0.0812,0.1833,0.2228,0.1810,0.2549,0.2984,0.2624,0.1893,0.0668,0.2666,0.4274,0.6291,0.7782,0.7686,0.8099,0.8493,0.9440,0.9450,0.9655,0.8045,0.4969,0.3960,0.3856,0.5574,0.7309,0.8549,0.9425,0.8726,0.6673,0.4694,0.1546,0.1748,0.3607,0.5208,0.5177,0.3702,0.2240,0.0816,0.0395,0.0785,0.1052,0.1034,0.0764,0.0216,0.0167,0.0089,0.0051,0.0015,0.0075,0.0058,0.0016,0.0070,0.0074,0.0038,M 167 | 0.0411,0.0277,0.0604,0.0525,0.0489,0.0385,0.0611,0.1117,0.1237,0.2300,0.1370,0.1335,0.2137,0.1526,0.0775,0.1196,0.0903,0.0689,0.2071,0.2975,0.2836,0.3353,0.3622,0.3202,0.3452,0.3562,0.3892,0.6622,0.9254,1.0000,0.8528,0.6297,0.5250,0.4012,0.2901,0.2007,0.3356,0.4799,0.6147,0.6246,0.4973,0.3492,0.2662,0.3137,0.4282,0.4262,0.3511,0.2458,0.1259,0.0327,0.0181,0.0217,0.0038,0.0019,0.0065,0.0132,0.0108,0.0050,0.0085,0.0044,M 168 | 0.0137,0.0297,0.0116,0.0082,0.0241,0.0253,0.0279,0.0130,0.0489,0.0874,0.1100,0.1084,0.1094,0.1023,0.0601,0.0906,0.1313,0.2758,0.3660,0.5269,0.5810,0.6181,0.5875,0.4639,0.5424,0.7367,0.9089,1.0000,0.8247,0.5441,0.3349,0.0877,0.1600,0.4169,0.6576,0.7390,0.7963,0.7493,0.6795,0.4713,0.2355,0.1704,0.2728,0.4016,0.4125,0.3470,0.2739,0.1790,0.0922,0.0276,0.0169,0.0081,0.0040,0.0025,0.0036,0.0058,0.0067,0.0035,0.0043,0.0033,M 169 | 0.0015,0.0186,0.0289,0.0195,0.0515,0.0817,0.1005,0.0124,0.1168,0.1476,0.2118,0.2575,0.2354,0.1334,0.0092,0.1951,0.3685,0.4646,0.5418,0.6260,0.7420,0.8257,0.8609,0.8400,0.8949,0.9945,1.0000,0.9649,0.8747,0.6257,0.2184,0.2945,0.3645,0.5012,0.7843,0.9361,0.8195,0.6207,0.4513,0.3004,0.2674,0.2241,0.3141,0.3693,0.2986,0.2226,0.0849,0.0359,0.0289,0.0122,0.0045,0.0108,0.0075,0.0089,0.0036,0.0029,0.0013,0.0010,0.0032,0.0047,M 170 | 0.0130,0.0120,0.0436,0.0624,0.0428,0.0349,0.0384,0.0446,0.1318,0.1375,0.2026,0.2389,0.2112,0.1444,0.0742,0.1533,0.3052,0.4116,0.5466,0.5933,0.6663,0.7333,0.7136,0.7014,0.7758,0.9137,0.9964,1.0000,0.8881,0.6585,0.2707,0.1746,0.2709,0.4853,0.7184,0.8209,0.7536,0.6496,0.4708,0.3482,0.3508,0.3181,0.3524,0.3659,0.2846,0.1714,0.0694,0.0303,0.0292,0.0116,0.0024,0.0084,0.0100,0.0018,0.0035,0.0058,0.0011,0.0009,0.0033,0.0026,M 171 | 0.0134,0.0172,0.0178,0.0363,0.0444,0.0744,0.0800,0.0456,0.0368,0.1250,0.2405,0.2325,0.2523,0.1472,0.0669,0.1100,0.2353,0.3282,0.4416,0.5167,0.6508,0.7793,0.7978,0.7786,0.8587,0.9321,0.9454,0.8645,0.7220,0.4850,0.1357,0.2951,0.4715,0.6036,0.8083,0.9870,0.8800,0.6411,0.4276,0.2702,0.2642,0.3342,0.4335,0.4542,0.3960,0.2525,0.1084,0.0372,0.0286,0.0099,0.0046,0.0094,0.0048,0.0047,0.0016,0.0008,0.0042,0.0024,0.0027,0.0041,M 172 | 0.0179,0.0136,0.0408,0.0633,0.0596,0.0808,0.2090,0.3465,0.5276,0.5965,0.6254,0.4507,0.3693,0.2864,0.1635,0.0422,0.1785,0.4394,0.6950,0.8097,0.8550,0.8717,0.8601,0.9201,0.8729,0.8084,0.8694,0.8411,0.5793,0.3754,0.3485,0.4639,0.6495,0.6901,0.5666,0.5188,0.5060,0.3885,0.3762,0.3738,0.2605,0.1591,0.1875,0.2267,0.1577,0.1211,0.0883,0.0850,0.0355,0.0219,0.0086,0.0123,0.0060,0.0187,0.0111,0.0126,0.0081,0.0155,0.0160,0.0085,M 173 | 0.0180,0.0444,0.0476,0.0698,0.1615,0.0887,0.0596,0.1071,0.3175,0.2918,0.3273,0.3035,0.3033,0.2587,0.1682,0.1308,0.2803,0.4519,0.6641,0.7683,0.6960,0.4393,0.2432,0.2886,0.4974,0.8172,1.0000,0.9238,0.8519,0.7722,0.5772,0.5190,0.6824,0.6220,0.5054,0.3578,0.3809,0.3813,0.3359,0.2771,0.3648,0.3834,0.3453,0.2096,0.1031,0.0798,0.0701,0.0526,0.0241,0.0117,0.0122,0.0122,0.0114,0.0098,0.0027,0.0025,0.0026,0.0050,0.0073,0.0022,M 174 | 0.0329,0.0216,0.0386,0.0627,0.1158,0.1482,0.2054,0.1605,0.2532,0.2672,0.3056,0.3161,0.2314,0.2067,0.1804,0.2808,0.4423,0.5947,0.6601,0.5844,0.4539,0.4789,0.5646,0.5281,0.7115,1.0000,0.9564,0.6090,0.5112,0.4000,0.0482,0.1852,0.2186,0.1436,0.1757,0.1428,0.1644,0.3089,0.3648,0.4441,0.3859,0.2813,0.1238,0.0953,0.1201,0.0825,0.0618,0.0141,0.0108,0.0124,0.0104,0.0095,0.0151,0.0059,0.0015,0.0053,0.0016,0.0042,0.0053,0.0074,M 175 | 0.0191,0.0173,0.0291,0.0301,0.0463,0.0690,0.0576,0.1103,0.2423,0.3134,0.4786,0.5239,0.4393,0.3440,0.2869,0.3889,0.4420,0.3892,0.4088,0.5006,0.7271,0.9385,1.0000,0.9831,0.9932,0.9161,0.8237,0.6957,0.4536,0.3281,0.2522,0.3964,0.4154,0.3308,0.1445,0.1923,0.3208,0.3367,0.5683,0.5505,0.3231,0.0448,0.3131,0.3387,0.4130,0.3639,0.2069,0.0859,0.0600,0.0267,0.0125,0.0040,0.0136,0.0137,0.0172,0.0132,0.0110,0.0122,0.0114,0.0068,M 176 | 0.0294,0.0123,0.0117,0.0113,0.0497,0.0998,0.1326,0.1117,0.2984,0.3473,0.4231,0.5044,0.5237,0.4398,0.3236,0.2956,0.3286,0.3231,0.4528,0.6339,0.7044,0.8314,0.8449,0.8512,0.9138,0.9985,1.0000,0.7544,0.4661,0.3924,0.3849,0.4674,0.4245,0.3095,0.0752,0.2885,0.4072,0.3170,0.2863,0.2634,0.0541,0.1874,0.3459,0.4646,0.4366,0.2581,0.1319,0.0505,0.0112,0.0059,0.0041,0.0056,0.0104,0.0079,0.0014,0.0054,0.0015,0.0006,0.0081,0.0043,M 177 | 0.0635,0.0709,0.0453,0.0333,0.0185,0.1260,0.1015,0.1918,0.3362,0.3900,0.4674,0.5632,0.5506,0.4343,0.3052,0.3492,0.3975,0.3875,0.5280,0.7198,0.7702,0.8562,0.8688,0.9236,1.0000,0.9662,0.9822,0.7360,0.4158,0.2918,0.3280,0.3690,0.3450,0.2863,0.0864,0.3724,0.4649,0.3488,0.1817,0.1142,0.1220,0.2621,0.4461,0.4726,0.3263,0.1423,0.0390,0.0406,0.0311,0.0086,0.0154,0.0048,0.0025,0.0087,0.0072,0.0095,0.0086,0.0085,0.0040,0.0051,M 178 | 0.0201,0.0165,0.0344,0.0330,0.0397,0.0443,0.0684,0.0903,0.1739,0.2571,0.2931,0.3108,0.3603,0.3002,0.2718,0.2007,0.1801,0.2234,0.3568,0.5492,0.7209,0.8318,0.8864,0.9520,0.9637,1.0000,0.9673,0.8664,0.7896,0.6345,0.5351,0.4056,0.2563,0.2894,0.3588,0.4296,0.4773,0.4516,0.3765,0.3051,0.1921,0.1184,0.1984,0.1570,0.0660,0.1294,0.0797,0.0052,0.0233,0.0152,0.0125,0.0054,0.0057,0.0137,0.0109,0.0035,0.0056,0.0105,0.0082,0.0036,M 179 | 0.0197,0.0394,0.0384,0.0076,0.0251,0.0629,0.0747,0.0578,0.1357,0.1695,0.1734,0.2470,0.3141,0.3297,0.2759,0.2056,0.1162,0.1884,0.3390,0.3926,0.4282,0.5418,0.6448,0.7223,0.7853,0.7984,0.8847,0.9582,0.8990,0.6831,0.6108,0.5480,0.5058,0.4476,0.2401,0.1405,0.1772,0.1742,0.3326,0.4021,0.3009,0.2075,0.1206,0.0255,0.0298,0.0691,0.0781,0.0777,0.0369,0.0057,0.0091,0.0134,0.0097,0.0042,0.0058,0.0072,0.0041,0.0045,0.0047,0.0054,M 180 | 0.0394,0.0420,0.0446,0.0551,0.0597,0.1416,0.0956,0.0802,0.1618,0.2558,0.3078,0.3404,0.3400,0.3951,0.3352,0.2252,0.2086,0.2248,0.3382,0.4578,0.6474,0.6708,0.7007,0.7619,0.7745,0.6767,0.7373,0.7834,0.9619,1.0000,0.8086,0.5558,0.5409,0.4988,0.3108,0.2897,0.2244,0.0960,0.2287,0.3228,0.3454,0.3882,0.3240,0.0926,0.1173,0.0566,0.0766,0.0969,0.0588,0.0050,0.0118,0.0146,0.0040,0.0114,0.0032,0.0062,0.0101,0.0068,0.0053,0.0087,M 181 | 0.0310,0.0221,0.0433,0.0191,0.0964,0.1827,0.1106,0.1702,0.2804,0.4432,0.5222,0.5611,0.5379,0.4048,0.2245,0.1784,0.2297,0.2720,0.5209,0.6898,0.8202,0.8780,0.7600,0.7616,0.7152,0.7288,0.8686,0.9509,0.8348,0.5730,0.4363,0.4289,0.4240,0.3156,0.1287,0.1477,0.2062,0.2400,0.5173,0.5168,0.1491,0.2407,0.3415,0.4494,0.4624,0.2001,0.0775,0.1232,0.0783,0.0089,0.0249,0.0204,0.0059,0.0053,0.0079,0.0037,0.0015,0.0056,0.0067,0.0054,M 182 | 0.0423,0.0321,0.0709,0.0108,0.1070,0.0973,0.0961,0.1323,0.2462,0.2696,0.3412,0.4292,0.3682,0.3940,0.2965,0.3172,0.2825,0.3050,0.2408,0.5420,0.6802,0.6320,0.5824,0.6805,0.5984,0.8412,0.9911,0.9187,0.8005,0.6713,0.5632,0.7332,0.6038,0.2575,0.0349,0.1799,0.3039,0.4760,0.5756,0.4254,0.5046,0.7179,0.6163,0.5663,0.5749,0.3593,0.2526,0.2299,0.1271,0.0356,0.0367,0.0176,0.0035,0.0093,0.0121,0.0075,0.0056,0.0021,0.0043,0.0017,M 183 | 0.0095,0.0308,0.0539,0.0411,0.0613,0.1039,0.1016,0.1394,0.2592,0.3745,0.4229,0.4499,0.5404,0.4303,0.3333,0.3496,0.3426,0.2851,0.4062,0.6833,0.7650,0.6670,0.5703,0.5995,0.6484,0.8614,0.9819,0.9380,0.8435,0.6074,0.5403,0.6890,0.5977,0.3244,0.0516,0.3157,0.3590,0.3881,0.5716,0.4314,0.3051,0.4393,0.4302,0.4831,0.5084,0.1952,0.1539,0.2037,0.1054,0.0251,0.0357,0.0181,0.0019,0.0102,0.0133,0.0040,0.0042,0.0030,0.0031,0.0033,M 184 | 0.0096,0.0404,0.0682,0.0688,0.0887,0.0932,0.0955,0.2140,0.2546,0.2952,0.4025,0.5148,0.4901,0.4127,0.3575,0.3447,0.3068,0.2945,0.4351,0.7264,0.8147,0.8103,0.6665,0.6958,0.7748,0.8688,1.0000,0.9941,0.8793,0.6482,0.5876,0.6408,0.4972,0.2755,0.0300,0.3356,0.3167,0.4133,0.6281,0.4977,0.2613,0.4697,0.4806,0.4921,0.5294,0.2216,0.1401,0.1888,0.0947,0.0134,0.0310,0.0237,0.0078,0.0144,0.0170,0.0012,0.0109,0.0036,0.0043,0.0018,M 185 | 0.0269,0.0383,0.0505,0.0707,0.1313,0.2103,0.2263,0.2524,0.3595,0.5915,0.6675,0.5679,0.5175,0.3334,0.2002,0.2856,0.2937,0.3424,0.5949,0.7526,0.8959,0.8147,0.7109,0.7378,0.7201,0.8254,0.8917,0.9820,0.8179,0.4848,0.3203,0.2775,0.2382,0.2911,0.1675,0.3156,0.1869,0.3391,0.5993,0.4124,0.1181,0.3651,0.4655,0.4777,0.3517,0.0920,0.1227,0.1785,0.1085,0.0300,0.0346,0.0167,0.0199,0.0145,0.0081,0.0045,0.0043,0.0027,0.0055,0.0057,M 186 | 0.0340,0.0625,0.0381,0.0257,0.0441,0.1027,0.1287,0.1850,0.2647,0.4117,0.5245,0.5341,0.5554,0.3915,0.2950,0.3075,0.3021,0.2719,0.5443,0.7932,0.8751,0.8667,0.7107,0.6911,0.7287,0.8792,1.0000,0.9816,0.8984,0.6048,0.4934,0.5371,0.4586,0.2908,0.0774,0.2249,0.1602,0.3958,0.6117,0.5196,0.2321,0.4370,0.3797,0.4322,0.4892,0.1901,0.0940,0.1364,0.0906,0.0144,0.0329,0.0141,0.0019,0.0067,0.0099,0.0042,0.0057,0.0051,0.0033,0.0058,M 187 | 0.0209,0.0191,0.0411,0.0321,0.0698,0.1579,0.1438,0.1402,0.3048,0.3914,0.3504,0.3669,0.3943,0.3311,0.3331,0.3002,0.2324,0.1381,0.3450,0.4428,0.4890,0.3677,0.4379,0.4864,0.6207,0.7256,0.6624,0.7689,0.7981,0.8577,0.9273,0.7009,0.4851,0.3409,0.1406,0.1147,0.1433,0.1820,0.3605,0.5529,0.5988,0.5077,0.5512,0.5027,0.7034,0.5904,0.4069,0.2761,0.1584,0.0510,0.0054,0.0078,0.0201,0.0104,0.0039,0.0031,0.0062,0.0087,0.0070,0.0042,M 188 | 0.0368,0.0279,0.0103,0.0566,0.0759,0.0679,0.0970,0.1473,0.2164,0.2544,0.2936,0.2935,0.2657,0.3187,0.2794,0.2534,0.1980,0.1929,0.2826,0.3245,0.3504,0.3324,0.4217,0.4774,0.4808,0.6325,0.8334,0.9458,1.0000,0.8425,0.5524,0.4795,0.5200,0.3968,0.1940,0.1519,0.2010,0.1736,0.1029,0.2244,0.3717,0.4449,0.3939,0.2030,0.2010,0.2187,0.1840,0.1477,0.0971,0.0224,0.0151,0.0105,0.0024,0.0018,0.0057,0.0092,0.0009,0.0086,0.0110,0.0052,M 189 | 0.0089,0.0274,0.0248,0.0237,0.0224,0.0845,0.1488,0.1224,0.1569,0.2119,0.3003,0.3094,0.2743,0.2547,0.1870,0.1452,0.1457,0.2429,0.3259,0.3679,0.3355,0.3100,0.3914,0.5280,0.6409,0.7707,0.8754,1.0000,0.9806,0.6969,0.4973,0.5020,0.5359,0.3842,0.1848,0.1149,0.1570,0.1311,0.1583,0.2631,0.3103,0.4512,0.3785,0.1269,0.1459,0.1092,0.1485,0.1385,0.0716,0.0176,0.0199,0.0096,0.0103,0.0093,0.0025,0.0044,0.0021,0.0069,0.0060,0.0018,M 190 | 0.0158,0.0239,0.0150,0.0494,0.0988,0.1425,0.1463,0.1219,0.1697,0.1923,0.2361,0.2719,0.3049,0.2986,0.2226,0.1745,0.2459,0.3100,0.3572,0.4283,0.4268,0.3735,0.4585,0.6094,0.7221,0.7595,0.8706,1.0000,0.9815,0.7187,0.5848,0.4192,0.3756,0.3263,0.1944,0.1394,0.1670,0.1275,0.1666,0.2574,0.2258,0.2777,0.1613,0.1335,0.1976,0.1234,0.1554,0.1057,0.0490,0.0097,0.0223,0.0121,0.0108,0.0057,0.0028,0.0079,0.0034,0.0046,0.0022,0.0021,M 191 | 0.0156,0.0210,0.0282,0.0596,0.0462,0.0779,0.1365,0.0780,0.1038,0.1567,0.2476,0.2783,0.2896,0.2956,0.3189,0.1892,0.1730,0.2226,0.2427,0.3149,0.4102,0.3808,0.4896,0.6292,0.7519,0.7985,0.8830,0.9915,0.9223,0.6981,0.6167,0.5069,0.3921,0.3524,0.2183,0.1245,0.1592,0.1626,0.2356,0.2483,0.2437,0.2715,0.1184,0.1157,0.1449,0.1883,0.1954,0.1492,0.0511,0.0155,0.0189,0.0150,0.0060,0.0082,0.0091,0.0038,0.0056,0.0056,0.0048,0.0024,M 192 | 0.0315,0.0252,0.0167,0.0479,0.0902,0.1057,0.1024,0.1209,0.1241,0.1533,0.2128,0.2536,0.2686,0.2803,0.1886,0.1485,0.2160,0.2417,0.2989,0.3341,0.3786,0.3956,0.5232,0.6913,0.7868,0.8337,0.9199,1.0000,0.8990,0.6456,0.5967,0.4355,0.2997,0.2294,0.1866,0.0922,0.1829,0.1743,0.2452,0.2407,0.2518,0.3184,0.1685,0.0675,0.1186,0.1833,0.1878,0.1114,0.0310,0.0143,0.0138,0.0108,0.0062,0.0044,0.0072,0.0007,0.0054,0.0035,0.0001,0.0055,M 193 | 0.0056,0.0267,0.0221,0.0561,0.0936,0.1146,0.0706,0.0996,0.1673,0.1859,0.2481,0.2712,0.2934,0.2637,0.1880,0.1405,0.2028,0.2613,0.2778,0.3346,0.3830,0.4003,0.5114,0.6860,0.7490,0.7843,0.9021,1.0000,0.8888,0.6511,0.6083,0.4463,0.2948,0.1729,0.1488,0.0801,0.1770,0.1382,0.2404,0.2046,0.1970,0.2778,0.1377,0.0685,0.0664,0.1665,0.1807,0.1245,0.0516,0.0044,0.0185,0.0072,0.0055,0.0074,0.0068,0.0084,0.0037,0.0024,0.0034,0.0007,M 194 | 0.0203,0.0121,0.0380,0.0128,0.0537,0.0874,0.1021,0.0852,0.1136,0.1747,0.2198,0.2721,0.2105,0.1727,0.2040,0.1786,0.1318,0.2260,0.2358,0.3107,0.3906,0.3631,0.4809,0.6531,0.7812,0.8395,0.9180,0.9769,0.8937,0.7022,0.6500,0.5069,0.3903,0.3009,0.1565,0.0985,0.2200,0.2243,0.2736,0.2152,0.2438,0.3154,0.2112,0.0991,0.0594,0.1940,0.1937,0.1082,0.0336,0.0177,0.0209,0.0134,0.0094,0.0047,0.0045,0.0042,0.0028,0.0036,0.0013,0.0016,M 195 | 0.0392,0.0108,0.0267,0.0257,0.0410,0.0491,0.1053,0.1690,0.2105,0.2471,0.2680,0.3049,0.2863,0.2294,0.1165,0.2127,0.2062,0.2222,0.3241,0.4330,0.5071,0.5944,0.7078,0.7641,0.8878,0.9711,0.9880,0.9812,0.9464,0.8542,0.6457,0.3397,0.3828,0.3204,0.1331,0.0440,0.1234,0.2030,0.1652,0.1043,0.1066,0.2110,0.2417,0.1631,0.0769,0.0723,0.0912,0.0812,0.0496,0.0101,0.0089,0.0083,0.0080,0.0026,0.0079,0.0042,0.0071,0.0044,0.0022,0.0014,M 196 | 0.0129,0.0141,0.0309,0.0375,0.0767,0.0787,0.0662,0.1108,0.1777,0.2245,0.2431,0.3134,0.3206,0.2917,0.2249,0.2347,0.2143,0.2939,0.4898,0.6127,0.7531,0.7718,0.7432,0.8673,0.9308,0.9836,1.0000,0.9595,0.8722,0.6862,0.4901,0.3280,0.3115,0.1969,0.1019,0.0317,0.0756,0.0907,0.1066,0.1380,0.0665,0.1475,0.2470,0.2788,0.2709,0.2283,0.1818,0.1185,0.0546,0.0219,0.0204,0.0124,0.0093,0.0072,0.0019,0.0027,0.0054,0.0017,0.0024,0.0029,M 197 | 0.0050,0.0017,0.0270,0.0450,0.0958,0.0830,0.0879,0.1220,0.1977,0.2282,0.2521,0.3484,0.3309,0.2614,0.1782,0.2055,0.2298,0.3545,0.6218,0.7265,0.8346,0.8268,0.8366,0.9408,0.9510,0.9801,0.9974,1.0000,0.9036,0.6409,0.3857,0.2908,0.2040,0.1653,0.1769,0.1140,0.0740,0.0941,0.0621,0.0426,0.0572,0.1068,0.1909,0.2229,0.2203,0.2265,0.1766,0.1097,0.0558,0.0142,0.0281,0.0165,0.0056,0.0010,0.0027,0.0062,0.0024,0.0063,0.0017,0.0028,M 198 | 0.0366,0.0421,0.0504,0.0250,0.0596,0.0252,0.0958,0.0991,0.1419,0.1847,0.2222,0.2648,0.2508,0.2291,0.1555,0.1863,0.2387,0.3345,0.5233,0.6684,0.7766,0.7928,0.7940,0.9129,0.9498,0.9835,1.0000,0.9471,0.8237,0.6252,0.4181,0.3209,0.2658,0.2196,0.1588,0.0561,0.0948,0.1700,0.1215,0.1282,0.0386,0.1329,0.2331,0.2468,0.1960,0.1985,0.1570,0.0921,0.0549,0.0194,0.0166,0.0132,0.0027,0.0022,0.0059,0.0016,0.0025,0.0017,0.0027,0.0027,M 199 | 0.0238,0.0318,0.0422,0.0399,0.0788,0.0766,0.0881,0.1143,0.1594,0.2048,0.2652,0.3100,0.2381,0.1918,0.1430,0.1735,0.1781,0.2852,0.5036,0.6166,0.7616,0.8125,0.7793,0.8788,0.8813,0.9470,1.0000,0.9739,0.8446,0.6151,0.4302,0.3165,0.2869,0.2017,0.1206,0.0271,0.0580,0.1262,0.1072,0.1082,0.0360,0.1197,0.2061,0.2054,0.1878,0.2047,0.1716,0.1069,0.0477,0.0170,0.0186,0.0096,0.0071,0.0084,0.0038,0.0026,0.0028,0.0013,0.0035,0.0060,M 200 | 0.0116,0.0744,0.0367,0.0225,0.0076,0.0545,0.1110,0.1069,0.1708,0.2271,0.3171,0.2882,0.2657,0.2307,0.1889,0.1791,0.2298,0.3715,0.6223,0.7260,0.7934,0.8045,0.8067,0.9173,0.9327,0.9562,1.0000,0.9818,0.8684,0.6381,0.3997,0.3242,0.2835,0.2413,0.2321,0.1260,0.0693,0.0701,0.1439,0.1475,0.0438,0.0469,0.1476,0.1742,0.1555,0.1651,0.1181,0.0720,0.0321,0.0056,0.0202,0.0141,0.0103,0.0100,0.0034,0.0026,0.0037,0.0044,0.0057,0.0035,M 201 | 0.0131,0.0387,0.0329,0.0078,0.0721,0.1341,0.1626,0.1902,0.2610,0.3193,0.3468,0.3738,0.3055,0.1926,0.1385,0.2122,0.2758,0.4576,0.6487,0.7154,0.8010,0.7924,0.8793,1.0000,0.9865,0.9474,0.9474,0.9315,0.8326,0.6213,0.3772,0.2822,0.2042,0.2190,0.2223,0.1327,0.0521,0.0618,0.1416,0.1460,0.0846,0.1055,0.1639,0.1916,0.2085,0.2335,0.1964,0.1300,0.0633,0.0183,0.0137,0.0150,0.0076,0.0032,0.0037,0.0071,0.0040,0.0009,0.0015,0.0085,M 202 | 0.0335,0.0258,0.0398,0.0570,0.0529,0.1091,0.1709,0.1684,0.1865,0.2660,0.3188,0.3553,0.3116,0.1965,0.1780,0.2794,0.2870,0.3969,0.5599,0.6936,0.7969,0.7452,0.8203,0.9261,0.8810,0.8814,0.9301,0.9955,0.8576,0.6069,0.3934,0.2464,0.1645,0.1140,0.0956,0.0080,0.0702,0.0936,0.0894,0.1127,0.0873,0.1020,0.1964,0.2256,0.1814,0.2012,0.1688,0.1037,0.0501,0.0136,0.0130,0.0120,0.0039,0.0053,0.0062,0.0046,0.0045,0.0022,0.0005,0.0031,M 203 | 0.0272,0.0378,0.0488,0.0848,0.1127,0.1103,0.1349,0.2337,0.3113,0.3997,0.3941,0.3309,0.2926,0.1760,0.1739,0.2043,0.2088,0.2678,0.2434,0.1839,0.2802,0.6172,0.8015,0.8313,0.8440,0.8494,0.9168,1.0000,0.7896,0.5371,0.6472,0.6505,0.4959,0.2175,0.0990,0.0434,0.1708,0.1979,0.1880,0.1108,0.1702,0.0585,0.0638,0.1391,0.0638,0.0581,0.0641,0.1044,0.0732,0.0275,0.0146,0.0091,0.0045,0.0043,0.0043,0.0098,0.0054,0.0051,0.0065,0.0103,M 204 | 0.0187,0.0346,0.0168,0.0177,0.0393,0.1630,0.2028,0.1694,0.2328,0.2684,0.3108,0.2933,0.2275,0.0994,0.1801,0.2200,0.2732,0.2862,0.2034,0.1740,0.4130,0.6879,0.8120,0.8453,0.8919,0.9300,0.9987,1.0000,0.8104,0.6199,0.6041,0.5547,0.4160,0.1472,0.0849,0.0608,0.0969,0.1411,0.1676,0.1200,0.1201,0.1036,0.1977,0.1339,0.0902,0.1085,0.1521,0.1363,0.0858,0.0290,0.0203,0.0116,0.0098,0.0199,0.0033,0.0101,0.0065,0.0115,0.0193,0.0157,M 205 | 0.0323,0.0101,0.0298,0.0564,0.0760,0.0958,0.0990,0.1018,0.1030,0.2154,0.3085,0.3425,0.2990,0.1402,0.1235,0.1534,0.1901,0.2429,0.2120,0.2395,0.3272,0.5949,0.8302,0.9045,0.9888,0.9912,0.9448,1.0000,0.9092,0.7412,0.7691,0.7117,0.5304,0.2131,0.0928,0.1297,0.1159,0.1226,0.1768,0.0345,0.1562,0.0824,0.1149,0.1694,0.0954,0.0080,0.0790,0.1255,0.0647,0.0179,0.0051,0.0061,0.0093,0.0135,0.0063,0.0063,0.0034,0.0032,0.0062,0.0067,M 206 | 0.0522,0.0437,0.0180,0.0292,0.0351,0.1171,0.1257,0.1178,0.1258,0.2529,0.2716,0.2374,0.1878,0.0983,0.0683,0.1503,0.1723,0.2339,0.1962,0.1395,0.3164,0.5888,0.7631,0.8473,0.9424,0.9986,0.9699,1.0000,0.8630,0.6979,0.7717,0.7305,0.5197,0.1786,0.1098,0.1446,0.1066,0.1440,0.1929,0.0325,0.1490,0.0328,0.0537,0.1309,0.0910,0.0757,0.1059,0.1005,0.0535,0.0235,0.0155,0.0160,0.0029,0.0051,0.0062,0.0089,0.0140,0.0138,0.0077,0.0031,M 207 | 0.0303,0.0353,0.0490,0.0608,0.0167,0.1354,0.1465,0.1123,0.1945,0.2354,0.2898,0.2812,0.1578,0.0273,0.0673,0.1444,0.2070,0.2645,0.2828,0.4293,0.5685,0.6990,0.7246,0.7622,0.9242,1.0000,0.9979,0.8297,0.7032,0.7141,0.6893,0.4961,0.2584,0.0969,0.0776,0.0364,0.1572,0.1823,0.1349,0.0849,0.0492,0.1367,0.1552,0.1548,0.1319,0.0985,0.1258,0.0954,0.0489,0.0241,0.0042,0.0086,0.0046,0.0126,0.0036,0.0035,0.0034,0.0079,0.0036,0.0048,M 208 | 0.0260,0.0363,0.0136,0.0272,0.0214,0.0338,0.0655,0.1400,0.1843,0.2354,0.2720,0.2442,0.1665,0.0336,0.1302,0.1708,0.2177,0.3175,0.3714,0.4552,0.5700,0.7397,0.8062,0.8837,0.9432,1.0000,0.9375,0.7603,0.7123,0.8358,0.7622,0.4567,0.1715,0.1549,0.1641,0.1869,0.2655,0.1713,0.0959,0.0768,0.0847,0.2076,0.2505,0.1862,0.1439,0.1470,0.0991,0.0041,0.0154,0.0116,0.0181,0.0146,0.0129,0.0047,0.0039,0.0061,0.0040,0.0036,0.0061,0.0115,M 209 | --------------------------------------------------------------------------------