├── utils.py ├── .gitignore ├── pca.py ├── README.md ├── kmeans.py ├── quality_metrics.py ├── svm_multi_class.py ├── regression.py ├── mldata ├── wine.data └── housing.data ├── LICENSE ├── NumericTables_example.ipynb └── SVM_example.ipynb /utils.py: -------------------------------------------------------------------------------- 1 | from daal.data_management import HomogenNumericTable, BlockDescriptor_Float64, readOnly 2 | 3 | def getArrayFromNT(table, nrows=0): 4 | bd = BlockDescriptor_Float64() 5 | if nrows == 0: 6 | nrows = table.getNumberOfRows() 7 | table.getBlockOfRows(0, nrows, readOnly, bd) 8 | npa = bd.getArray() 9 | table.releaseBlockOfRows(bd) 10 | return npa 11 | 12 | def printNT(table, nrows = 0, message=''): 13 | npa = getArrayFromNT(table, nrows) 14 | print(message, '\n', npa) 15 | 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | .venv/ 83 | venv/ 84 | ENV/ 85 | 86 | # Spyder project settings 87 | .spyderproject 88 | 89 | # Rope project settings 90 | .ropeproject 91 | -------------------------------------------------------------------------------- /pca.py: -------------------------------------------------------------------------------- 1 | 2 | """ A class for PCA using pyDAAL """ 3 | 4 | __author__ = 'Zhang Zhang' 5 | __email__ = 'zhang.zhang@intel.com' 6 | 7 | import daal.algorithms.pca as pca 8 | from daal.data_management import HomogenNumericTable 9 | 10 | import numpy as np 11 | 12 | 13 | class PCA: 14 | 15 | def __init__(self, method = 'correlation'): 16 | """Initialize class parameters 17 | 18 | Args: 19 | method: The default method is based on correation matrix. It 20 | can also be the SVD method ('svd') 21 | """ 22 | 23 | if method != 'correlation' and method != 'svd': 24 | warnings.warn(method + 25 | ' method is not supported. Default method is used', 26 | UserWarning) 27 | 28 | self.method_ = method 29 | self.eigenvalues_ = None 30 | self.eigenvectors_ = None 31 | 32 | 33 | def compute(self, data): 34 | """Compute PCA the input data 35 | 36 | Args: 37 | data: Input data 38 | """ 39 | 40 | # Create an algorithm object for PCA 41 | if self.method_ == 'svd': 42 | pca_alg = pca.Batch_Float64SvdDense() 43 | else: 44 | pca_alg = pca.Batch_Float64CorrelationDense() 45 | 46 | # Set input 47 | pca_alg.input.setDataset(pca.data, data) 48 | # compute 49 | result = pca_alg.compute() 50 | self.eigenvalues_ = result.get(pca.eigenvalues) 51 | self.eigenvectors_ = result.get(pca.eigenvectors) 52 | 53 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pyDAAL Tutorials 2 | This is a set of tutorials for uisng pyDAAL, i.e. the Python API of [Intel Data Analytics Acceleration Library](https://software.intel.com/en-us/intel-daal). 3 | It is designed to provide a quick introduction to pyDAAL features and the API 4 | for Python developers who are already familiar with basic concepts and techniques in 5 | machine learning. 6 | 7 | The tutorials are spread across a collection of [Jupyter notebooks](http://jupyter.org/). 8 | The proper way of using these notebooks is to install [Intel Distribution for 9 | Python](https://software.intel.com/en-us/intel-distribution-for-python) on your 10 | computer, which 11 | consists of a large set of commonly used mathematic and statistical Python 12 | packages that are optimized for Intel architectures. 13 | 14 | ### Install Intel Distribution for Python through [conda](https://www.continuum.io/downloads) 15 | 1. Install the latest version of [Anaconda](https://www.continuum.io/downloads). 16 | - Choose the Python 3.5 version 17 | 2. From the shell prompt (on Windows, use **Anaconda Prompt**), execute these 18 | commands: 19 | 20 | ```bash 21 | conda config --add channels intel 22 | conda create --name idp intelpython3_full python=3 23 | source activate idp (on Linux and OS X) 24 | activate idp (on Windows) 25 | ``` 26 | More detailed instructions can be found from [this online article](https://software.intel.com/en-us/articles/using-intel-distribution-for-python-with-anaconda). 27 | 28 | ### Notebooks 29 | * [Data management in pyDAAL](./NumericTables_example.ipynb) 30 | * [K-Means and PCA](./kmeans_example.ipynb) 31 | * [Linear regression](./LR_example.ipynb) 32 | * [SVM and multi-class classifier](./SVM_example.ipynb) 33 | 34 | Data files used in the tutorials are in the `./mldata` folder. These data files 35 | are downloaded from the [UCI Machine Learning Repository](https://archive.ics.uci.edu/ml/datasets). 36 | 37 | -------------------------------------------------------------------------------- /kmeans.py: -------------------------------------------------------------------------------- 1 | 2 | """ A class for K-Means clustering """ 3 | 4 | __author__ = 'Zhang Zhang' 5 | __email__ = 'zhang.zhang@intel.com' 6 | 7 | import daal.algorithms.kmeans as kmeans 8 | from daal.algorithms.kmeans import init 9 | from daal.data_management import HomogenNumericTable 10 | 11 | import numpy as np 12 | 13 | 14 | class KMeans: 15 | 16 | def __init__(self, nclusters, randomseed = None): 17 | """Initialize class parameters 18 | 19 | Args: 20 | nclusters: Number of clusters 21 | randomseed: An integer used to seed the random number generator 22 | """ 23 | 24 | self.nclusters_ = nclusters 25 | self.seed_ = 1234 if randomseed is None else randomseed 26 | self.centroids_ = None 27 | self.assignments_ = None 28 | self.goalfunction_ = None 29 | self.niterations_ = None 30 | 31 | 32 | def compute(self, data, centroids = None, maxiters = 100): 33 | """Compute K-Means clustering for the input data 34 | 35 | Args: 36 | data: Input data to be clustered 37 | centroids: User defined input centroids. If None then initial 38 | centroids will be randomly chosen 39 | maxiters: The maximum number of iterations 40 | """ 41 | 42 | if centroids is not None: 43 | # Create an algorithm object for centroids initialization 44 | init_alg = init.Batch_Float64RandomDense(self.nclusters_) 45 | # Set input 46 | init_alg.input.set(init.data, data) 47 | # Set parameters 48 | init_alg.parameter.seed = self.seed_ 49 | # Compute initial centroids 50 | self.centroids_ = init_alg.compute().get(init.centroids) 51 | else: 52 | self.centroids_ = centroids 53 | 54 | # Create an algorithm object for clustering 55 | clustering_alg = kmeans.Batch_Float64LloydDense( 56 | self.nclusters_, 57 | maxiters) 58 | # Set input 59 | clustering_alg.input.set(kmeans.data, data) 60 | clustering_alg.input.set(kmeans.inputCentroids, self.centroids_) 61 | # compute 62 | result = clustering_alg.compute() 63 | self.centroids_ = result.get(kmeans.centroids) 64 | self.assignments_ = result.get(kmeans.assignments) 65 | self.goalfunction_ = result.get(kmeans.goalFunction) 66 | self.niterations_ = result.get(kmeans.nIterations) 67 | 68 | 69 | -------------------------------------------------------------------------------- /quality_metrics.py: -------------------------------------------------------------------------------- 1 | 2 | """ A class for classifier quality metrics.""" 3 | 4 | __author__ = 'Zhang Zhang' 5 | __email__ = 'zhang.zhang@intel.com' 6 | 7 | from daal.algorithms.multi_class_classifier import quality_metric_set as multiclass_quality 8 | from daal.algorithms.classifier.quality_metric import multiclass_confusion_matrix 9 | from daal.algorithms.svm import quality_metric_set as twoclass_quality 10 | from daal.algorithms.classifier.quality_metric import binary_confusion_matrix 11 | from daal.data_management import BlockDescriptor_Float64, readOnly 12 | 13 | from collections import namedtuple 14 | 15 | 16 | # Two-class quality metrics type 17 | TwoClassMetrics = namedtuple('TwoClassMetrics', 18 | ['accuracy', 'precision', 'recall', 'fscore', 'specificity', 'auc']) 19 | 20 | # Multi-class quality metrics type 21 | MultiClassMetrics = namedtuple('MultiClassMetrics', 22 | ['accuracy', 'error_rate', 'micro_precision', 'micro_recall', 23 | 'micro_fscore', 'macro_precision', 'macro_recall', 'macro_fscore']) 24 | 25 | 26 | class ClassifierQualityMetrics: 27 | 28 | 29 | def __init__(self, truth, predictions, nclasses = 2): 30 | """Initialize class parameters 31 | 32 | Args: 33 | truth: ground truth 34 | predictions: predicted labels 35 | nclasses: number of classes 36 | """ 37 | 38 | self._truth = truth 39 | self._predictions = predictions 40 | if nclasses == 2: 41 | self._computeTwoclassQualityMetrics() 42 | elif nclasses > 2: 43 | self._computeMulticlassQualityMetrics(nclasses) 44 | else: 45 | raise ValueError('nclasses must be at least 2') 46 | 47 | 48 | 49 | def get(self, metric): 50 | """Get a metric from the quality metrics collection 51 | 52 | Args: 53 | metric: name of the metric to return 54 | 55 | Returns: 56 | A numeric value for the given metric 57 | """ 58 | 59 | return getattr(self._metrics, metric) 60 | 61 | 62 | 63 | def _computeTwoclassQualityMetrics(self): 64 | # Alg object for quality metrics computation 65 | quality_alg = twoclass_quality.Batch() 66 | # Get access to the input parameter 67 | input = quality_alg.getInputDataCollection().getInput( 68 | twoclass_quality.confusionMatrix) 69 | # Pass ground truth and predictions as input 70 | input.set(binary_confusion_matrix.groundTruthLabels, self._truth) 71 | input.set(binary_confusion_matrix.predictedLabels, self._predictions) 72 | # Compute confusion matrix 73 | confusion = quality_alg.compute().getResult(twoclass_quality.confusionMatrix) 74 | # Retrieve quality metrics from the confusion matrix 75 | metrics = confusion.get(binary_confusion_matrix.binaryMetrics) 76 | # Convert the metrics into a Python namedtuple and return it 77 | block = BlockDescriptor_Float64() 78 | metrics.getBlockOfRows(0, 1, readOnly, block) 79 | x = block.getArray().flatten() 80 | self._metrics = TwoClassMetrics(*x) 81 | metrics.releaseBlockOfRows(block) 82 | 83 | 84 | 85 | def _computeMulticlassQualityMetrics(self, nclasses): 86 | # Alg object for quality metrics computation 87 | quality_alg = multiclass_quality.Batch(nclasses) 88 | # Get access to the input parameter 89 | input = quality_alg.getInputDataCollection().getInput( 90 | multiclass_quality.confusionMatrix) 91 | # Pass ground truth and predictions as input 92 | input.set(multiclass_confusion_matrix.groundTruthLabels, self._truth) 93 | input.set(multiclass_confusion_matrix.predictedLabels, self._predictions) 94 | # Compute confusion matrix 95 | confusion = quality_alg.compute().getResult(multiclass_quality.confusionMatrix) 96 | # Retrieve quality metrics from the confusion matrix 97 | metrics = confusion.get(multiclass_confusion_matrix.multiClassMetrics) 98 | # Convert the metrics into a Python namedtuple and return it 99 | block = BlockDescriptor_Float64() 100 | metrics.getBlockOfRows(0, 1, readOnly, block) 101 | x = block.getArray().flatten() 102 | self._metrics = MultiClassMetrics(*x) 103 | metrics.releaseBlockOfRows(block) 104 | 105 | 106 | -------------------------------------------------------------------------------- /svm_multi_class.py: -------------------------------------------------------------------------------- 1 | 2 | """ A class for multi-class classifier based on SVM algorithm""" 3 | 4 | __author__ = 'Zhang Zhang' 5 | __email__ = 'zhang.zhang@intel.com' 6 | 7 | from daal.algorithms.svm import training as svm_training 8 | from daal.algorithms.svm import prediction as svm_prediction 9 | from daal.algorithms.kernel_function import linear, rbf 10 | from daal.algorithms.multi_class_classifier import training as multiclass_training 11 | from daal.algorithms.multi_class_classifier import prediction as multiclass_prediction 12 | from daal.algorithms.classifier import training as training_params 13 | from daal.algorithms.classifier import prediction as prediction_params 14 | 15 | class MulticlassSVM: 16 | 17 | 18 | def __init__(self, nclasses): 19 | """Initialize class parameters 20 | 21 | Args: 22 | nclasses: number of classes 23 | """ 24 | 25 | self._nclasses = nclasses 26 | # Create an SVM two-class classifier object for training 27 | self._svm_training_alg = svm_training.Batch_Float64DefaultDense() 28 | # Create an SVM two-class classifier object for prediction 29 | self._svm_prediction_alg = svm_prediction.Batch_Float64DefaultDense() 30 | 31 | 32 | def setSVMParams(self, 33 | cachesize = 1000000000, 34 | C = 1.0, 35 | sigma = 1.0, 36 | kernel = linear.Batch_Float64DefaultDense(), 37 | shrinking = False): 38 | """Tweak SVM training and prediction algorithm parameters 39 | 40 | Args: 41 | cachesize: size of chache in bytes for storing kernel matrix 42 | kernel: SVM kernel, can be either linear or rbf 43 | sigma: Coefficient of the rbf kernel 44 | shrinking: whether do shrinking optimization or not 45 | """ 46 | 47 | self._svm_training_alg.parameter.cacheSize = cachesize 48 | self._svm_training_alg.parameter.C = C 49 | if getattr(kernel.parameter, 'sigma', None): 50 | kernel.parameter.sigma = sigma 51 | self._svm_training_alg.parameter.kernel = kernel 52 | self._svm_prediction_alg.parameter.kernel = kernel 53 | self._svm_training_alg.parameter.doShrinking = shrinking 54 | 55 | 56 | 57 | def train(self, data, labels): 58 | """Train an SVM model. 59 | 60 | Args: 61 | data: training data 62 | labels: ground truth known for training data 63 | 64 | Returns: 65 | An SVM model object 66 | """ 67 | 68 | # Create a multiclass classifier object based on the 69 | # SVM two-class classifier 70 | multiclass_training_alg = multiclass_training.Batch_Float64OneAgainstOne() 71 | multiclass_training_alg.parameter.nClasses = self._nclasses 72 | multiclass_training_alg.parameter.training = self._svm_training_alg 73 | multiclass_training_alg.parameter.prediction = self._svm_prediction_alg 74 | 75 | # Pass training data and labels 76 | multiclass_training_alg.input.set(training_params.data, data) 77 | multiclass_training_alg.input.set(training_params.labels, labels) 78 | 79 | # Build the model and return it 80 | return multiclass_training_alg.compute().get(training_params.model) 81 | 82 | 83 | def predict(self, model, testdata): 84 | """Make predictions for unseen data using a learned model. 85 | 86 | Args: 87 | model: a learned SVM model 88 | testdata: new data 89 | 90 | Returns: 91 | A NumericTable containing predicted labels 92 | """ 93 | 94 | # Create a multiclass classifier object based on the 95 | # SVM two-class classifier 96 | multiclass_prediction_alg = multiclass_prediction.Batch_Float64DefaultDenseOneAgainstOne() 97 | multiclass_prediction_alg.parameter.nClasses = self._nclasses 98 | multiclass_prediction_alg.parameter.training = self._svm_training_alg 99 | multiclass_prediction_alg.parameter.prediction = self._svm_prediction_alg 100 | 101 | # Pass a model and input data 102 | multiclass_prediction_alg.input.setModel(prediction_params.model, model) 103 | multiclass_prediction_alg.input.setTable(prediction_params.data, testdata) 104 | 105 | # Return prediction results 106 | results = multiclass_prediction_alg.compute() 107 | return results.get(prediction_params.prediction) 108 | 109 | -------------------------------------------------------------------------------- /regression.py: -------------------------------------------------------------------------------- 1 | 2 | """ Classes for Linear Regression and Ridge Regression """ 3 | 4 | __author__ = 'Zhang Zhang' 5 | __email__ = 'zhang.zhang@intel.com' 6 | 7 | from daal.algorithms.linear_regression import training as lr_training 8 | from daal.algorithms.linear_regression import prediction as lr_prediction 9 | from daal.algorithms.ridge_regression import training as ridge_training 10 | from daal.algorithms.ridge_regression import prediction as ridge_prediction 11 | from daal.data_management import HomogenNumericTable 12 | 13 | from utils import * 14 | 15 | import numpy as np 16 | 17 | 18 | def getBetas(linear_model): 19 | """Return regression coefficients for a given linear model 20 | 21 | Args: 22 | linear_model: A trained model 23 | 24 | Returns: 25 | A n-by-(k+1) NumericTable contains betas, where n is the number of dependent 26 | variables; k is the number of features (independent variables) 27 | """ 28 | 29 | return linear_model.getBeta() 30 | 31 | 32 | 33 | 34 | def mse(values, fitted_values): 35 | """Return Mean Squared Errors for fitted values w.r.t. true values 36 | 37 | Args: 38 | values: True values. NumericTable, nsamples-by-noutputs 39 | fitted_values: True values. NumericTable, nsamples-by-noutputs 40 | 41 | Returns: 42 | A tuple contains MSE's 43 | """ 44 | 45 | y_t = getArrayFromNT(values) 46 | y_p = getArrayFromNT(fitted_values) 47 | rss = ((y_t - y_p) ** 2).sum(axis = 0) 48 | mse = rss / y_t.shape[0] 49 | return tuple(mse) 50 | 51 | 52 | 53 | 54 | def score(y_true, y_pred): 55 | """Compute R-squared and adjusted R-squared 56 | 57 | Args: 58 | y_true: True values. NumericTable, shape = (nsamples, noutputs) 59 | y_pred: Predicted values. NumericTable, shape = (nsamples, noutputs) 60 | 61 | Returns: 62 | R2: A tuple with noutputs values 63 | """ 64 | 65 | y_t = getArrayFromNT(y_true) 66 | y_p = getArrayFromNT(y_pred) 67 | rss = ((y_t - y_p) ** 2).sum(axis = 0) 68 | tss = ((y_t - y_t.mean(axis = 0)) ** 2).sum(axis = 0) 69 | return (1 - rss/tss) 70 | 71 | 72 | 73 | class LinearRegression: 74 | 75 | 76 | def __init__(self, method = 'normEq'): 77 | """Initialize class parameters 78 | 79 | Args: 80 | method: The default method is based on Normal Equation ('normEq'). It 81 | can also be QR method ('qr') 82 | """ 83 | 84 | if method != 'normEq' and method != 'qr': 85 | warnings.warn(method + 86 | ' method is not supported. Default method is used', 87 | UserWarning) 88 | 89 | self.method_ = method 90 | 91 | 92 | 93 | def train(self, data, responses): 94 | """Train a Linear Regression model. 95 | 96 | Args: 97 | data: Training data 98 | responses: Known responses to the training data 99 | 100 | Returns: 101 | A Linear Regression model object 102 | """ 103 | 104 | # Create a training algorithm object 105 | if self.method_ == 'qr': 106 | lr_training_alg = lr_training.Batch_Float64QrDense() 107 | else: 108 | lr_training_alg = lr_training.Batch_Float64NormEqDense() 109 | # Set input 110 | lr_training_alg.input.set(lr_training.data, data) 111 | lr_training_alg.input.set(lr_training.dependentVariables, responses) 112 | # Compute 113 | results = lr_training_alg.compute() 114 | # Return the trained model 115 | return results.get(lr_training.model) 116 | 117 | 118 | 119 | def predict(self, model, testdata, intercept = True): 120 | """Make prediction for unseen data using a trained model 121 | 122 | Args: 123 | model: A trained model 124 | testdata: New data 125 | intercept: A boolean to inidicate if intercept needs to be computed 126 | 127 | Returns: 128 | A NumericTable containing predicted responses 129 | """ 130 | 131 | # Create a prediction algorithm object 132 | lr_prediction_alg = lr_prediction.Batch_Float64DefaultDense() 133 | # Set input 134 | lr_prediction_alg.input.setModel(lr_prediction.model, model) 135 | lr_prediction_alg.input.setTable(lr_prediction.data, testdata) 136 | # Set parameters 137 | lr_prediction_alg.parameter.interceptFlag = intercept 138 | # Compute 139 | results = lr_prediction_alg.compute() 140 | return results.get(lr_prediction.prediction) 141 | 142 | 143 | 144 | class Ridge: 145 | 146 | def __init__(self): 147 | pass 148 | 149 | 150 | 151 | def train(self, data, responses, alpha = 1.0): 152 | """Train a Ridge Regression model. 153 | 154 | Args: 155 | data: Training data 156 | responses: Known responses to the training data 157 | alpha: Regularization parameter, a small positive value with default 158 | 1.0 159 | 160 | Returns: 161 | A Ridge Regression model object 162 | """ 163 | 164 | # Create a training algorithm object 165 | ridge_training_alg = ridge_training.Batch_Float64DefaultDense() 166 | # Set input 167 | ridge_training_alg.input.set(ridge_training.data, data) 168 | ridge_training_alg.input.set(ridge_training.dependentVariables, responses) 169 | # Set parameter 170 | alpha_nt = HomogenNumericTable(np.array([alpha], ndmin=2)) 171 | ridge_training_alg.parameter.ridgeParameters = alpha_nt 172 | # Compute 173 | results = ridge_training_alg.compute() 174 | # Return the trained model 175 | return results.get(ridge_training.model) 176 | 177 | 178 | 179 | def predict(self, model, testdata, intercept = True): 180 | """Make prediction for unseen data using a trained model 181 | 182 | Args: 183 | model: A trained model 184 | testdata: New data 185 | intercept: A boolean to inidicate if intercept needs to be computed 186 | 187 | Returns: 188 | A NumericTable containing predicted responses 189 | """ 190 | 191 | # Create a prediction algorithm object 192 | ridge_prediction_alg = ridge_prediction.Batch_Float64DefaultDense() 193 | # Set input 194 | ridge_prediction_alg.input.setModelInput(ridge_prediction.model, model) 195 | ridge_prediction_alg.input.setNumericTableInput(ridge_prediction.data, testdata) 196 | # Set parameters 197 | ridge_prediction_alg.parameter.interceptFlag = intercept 198 | # Compute 199 | results = ridge_prediction_alg.compute() 200 | return results.get(ridge_prediction.prediction) 201 | 202 | 203 | 204 | -------------------------------------------------------------------------------- /mldata/wine.data: -------------------------------------------------------------------------------- 1 | 1,14.23,1.71,2.43,15.6,127,2.8,3.06,.28,2.29,5.64,1.04,3.92,1065 2 | 1,13.2,1.78,2.14,11.2,100,2.65,2.76,.26,1.28,4.38,1.05,3.4,1050 3 | 1,13.16,2.36,2.67,18.6,101,2.8,3.24,.3,2.81,5.68,1.03,3.17,1185 4 | 1,14.37,1.95,2.5,16.8,113,3.85,3.49,.24,2.18,7.8,.86,3.45,1480 5 | 1,13.24,2.59,2.87,21,118,2.8,2.69,.39,1.82,4.32,1.04,2.93,735 6 | 1,14.2,1.76,2.45,15.2,112,3.27,3.39,.34,1.97,6.75,1.05,2.85,1450 7 | 1,14.39,1.87,2.45,14.6,96,2.5,2.52,.3,1.98,5.25,1.02,3.58,1290 8 | 1,14.06,2.15,2.61,17.6,121,2.6,2.51,.31,1.25,5.05,1.06,3.58,1295 9 | 1,14.83,1.64,2.17,14,97,2.8,2.98,.29,1.98,5.2,1.08,2.85,1045 10 | 1,13.86,1.35,2.27,16,98,2.98,3.15,.22,1.85,7.22,1.01,3.55,1045 11 | 1,14.1,2.16,2.3,18,105,2.95,3.32,.22,2.38,5.75,1.25,3.17,1510 12 | 1,14.12,1.48,2.32,16.8,95,2.2,2.43,.26,1.57,5,1.17,2.82,1280 13 | 1,13.75,1.73,2.41,16,89,2.6,2.76,.29,1.81,5.6,1.15,2.9,1320 14 | 1,14.75,1.73,2.39,11.4,91,3.1,3.69,.43,2.81,5.4,1.25,2.73,1150 15 | 1,14.38,1.87,2.38,12,102,3.3,3.64,.29,2.96,7.5,1.2,3,1547 16 | 1,13.63,1.81,2.7,17.2,112,2.85,2.91,.3,1.46,7.3,1.28,2.88,1310 17 | 1,14.3,1.92,2.72,20,120,2.8,3.14,.33,1.97,6.2,1.07,2.65,1280 18 | 1,13.83,1.57,2.62,20,115,2.95,3.4,.4,1.72,6.6,1.13,2.57,1130 19 | 1,14.19,1.59,2.48,16.5,108,3.3,3.93,.32,1.86,8.7,1.23,2.82,1680 20 | 1,13.64,3.1,2.56,15.2,116,2.7,3.03,.17,1.66,5.1,.96,3.36,845 21 | 1,14.06,1.63,2.28,16,126,3,3.17,.24,2.1,5.65,1.09,3.71,780 22 | 1,12.93,3.8,2.65,18.6,102,2.41,2.41,.25,1.98,4.5,1.03,3.52,770 23 | 1,13.71,1.86,2.36,16.6,101,2.61,2.88,.27,1.69,3.8,1.11,4,1035 24 | 1,12.85,1.6,2.52,17.8,95,2.48,2.37,.26,1.46,3.93,1.09,3.63,1015 25 | 1,13.5,1.81,2.61,20,96,2.53,2.61,.28,1.66,3.52,1.12,3.82,845 26 | 1,13.05,2.05,3.22,25,124,2.63,2.68,.47,1.92,3.58,1.13,3.2,830 27 | 1,13.39,1.77,2.62,16.1,93,2.85,2.94,.34,1.45,4.8,.92,3.22,1195 28 | 1,13.3,1.72,2.14,17,94,2.4,2.19,.27,1.35,3.95,1.02,2.77,1285 29 | 1,13.87,1.9,2.8,19.4,107,2.95,2.97,.37,1.76,4.5,1.25,3.4,915 30 | 1,14.02,1.68,2.21,16,96,2.65,2.33,.26,1.98,4.7,1.04,3.59,1035 31 | 1,13.73,1.5,2.7,22.5,101,3,3.25,.29,2.38,5.7,1.19,2.71,1285 32 | 1,13.58,1.66,2.36,19.1,106,2.86,3.19,.22,1.95,6.9,1.09,2.88,1515 33 | 1,13.68,1.83,2.36,17.2,104,2.42,2.69,.42,1.97,3.84,1.23,2.87,990 34 | 1,13.76,1.53,2.7,19.5,132,2.95,2.74,.5,1.35,5.4,1.25,3,1235 35 | 1,13.51,1.8,2.65,19,110,2.35,2.53,.29,1.54,4.2,1.1,2.87,1095 36 | 1,13.48,1.81,2.41,20.5,100,2.7,2.98,.26,1.86,5.1,1.04,3.47,920 37 | 1,13.28,1.64,2.84,15.5,110,2.6,2.68,.34,1.36,4.6,1.09,2.78,880 38 | 1,13.05,1.65,2.55,18,98,2.45,2.43,.29,1.44,4.25,1.12,2.51,1105 39 | 1,13.07,1.5,2.1,15.5,98,2.4,2.64,.28,1.37,3.7,1.18,2.69,1020 40 | 1,14.22,3.99,2.51,13.2,128,3,3.04,.2,2.08,5.1,.89,3.53,760 41 | 1,13.56,1.71,2.31,16.2,117,3.15,3.29,.34,2.34,6.13,.95,3.38,795 42 | 1,13.41,3.84,2.12,18.8,90,2.45,2.68,.27,1.48,4.28,.91,3,1035 43 | 1,13.88,1.89,2.59,15,101,3.25,3.56,.17,1.7,5.43,.88,3.56,1095 44 | 1,13.24,3.98,2.29,17.5,103,2.64,2.63,.32,1.66,4.36,.82,3,680 45 | 1,13.05,1.77,2.1,17,107,3,3,.28,2.03,5.04,.88,3.35,885 46 | 1,14.21,4.04,2.44,18.9,111,2.85,2.65,.3,1.25,5.24,.87,3.33,1080 47 | 1,14.38,3.59,2.28,16,102,3.25,3.17,.27,2.19,4.9,1.04,3.44,1065 48 | 1,13.9,1.68,2.12,16,101,3.1,3.39,.21,2.14,6.1,.91,3.33,985 49 | 1,14.1,2.02,2.4,18.8,103,2.75,2.92,.32,2.38,6.2,1.07,2.75,1060 50 | 1,13.94,1.73,2.27,17.4,108,2.88,3.54,.32,2.08,8.90,1.12,3.1,1260 51 | 1,13.05,1.73,2.04,12.4,92,2.72,3.27,.17,2.91,7.2,1.12,2.91,1150 52 | 1,13.83,1.65,2.6,17.2,94,2.45,2.99,.22,2.29,5.6,1.24,3.37,1265 53 | 1,13.82,1.75,2.42,14,111,3.88,3.74,.32,1.87,7.05,1.01,3.26,1190 54 | 1,13.77,1.9,2.68,17.1,115,3,2.79,.39,1.68,6.3,1.13,2.93,1375 55 | 1,13.74,1.67,2.25,16.4,118,2.6,2.9,.21,1.62,5.85,.92,3.2,1060 56 | 1,13.56,1.73,2.46,20.5,116,2.96,2.78,.2,2.45,6.25,.98,3.03,1120 57 | 1,14.22,1.7,2.3,16.3,118,3.2,3,.26,2.03,6.38,.94,3.31,970 58 | 1,13.29,1.97,2.68,16.8,102,3,3.23,.31,1.66,6,1.07,2.84,1270 59 | 1,13.72,1.43,2.5,16.7,108,3.4,3.67,.19,2.04,6.8,.89,2.87,1285 60 | 2,12.37,.94,1.36,10.6,88,1.98,.57,.28,.42,1.95,1.05,1.82,520 61 | 2,12.33,1.1,2.28,16,101,2.05,1.09,.63,.41,3.27,1.25,1.67,680 62 | 2,12.64,1.36,2.02,16.8,100,2.02,1.41,.53,.62,5.75,.98,1.59,450 63 | 2,13.67,1.25,1.92,18,94,2.1,1.79,.32,.73,3.8,1.23,2.46,630 64 | 2,12.37,1.13,2.16,19,87,3.5,3.1,.19,1.87,4.45,1.22,2.87,420 65 | 2,12.17,1.45,2.53,19,104,1.89,1.75,.45,1.03,2.95,1.45,2.23,355 66 | 2,12.37,1.21,2.56,18.1,98,2.42,2.65,.37,2.08,4.6,1.19,2.3,678 67 | 2,13.11,1.01,1.7,15,78,2.98,3.18,.26,2.28,5.3,1.12,3.18,502 68 | 2,12.37,1.17,1.92,19.6,78,2.11,2,.27,1.04,4.68,1.12,3.48,510 69 | 2,13.34,.94,2.36,17,110,2.53,1.3,.55,.42,3.17,1.02,1.93,750 70 | 2,12.21,1.19,1.75,16.8,151,1.85,1.28,.14,2.5,2.85,1.28,3.07,718 71 | 2,12.29,1.61,2.21,20.4,103,1.1,1.02,.37,1.46,3.05,.906,1.82,870 72 | 2,13.86,1.51,2.67,25,86,2.95,2.86,.21,1.87,3.38,1.36,3.16,410 73 | 2,13.49,1.66,2.24,24,87,1.88,1.84,.27,1.03,3.74,.98,2.78,472 74 | 2,12.99,1.67,2.6,30,139,3.3,2.89,.21,1.96,3.35,1.31,3.5,985 75 | 2,11.96,1.09,2.3,21,101,3.38,2.14,.13,1.65,3.21,.99,3.13,886 76 | 2,11.66,1.88,1.92,16,97,1.61,1.57,.34,1.15,3.8,1.23,2.14,428 77 | 2,13.03,.9,1.71,16,86,1.95,2.03,.24,1.46,4.6,1.19,2.48,392 78 | 2,11.84,2.89,2.23,18,112,1.72,1.32,.43,.95,2.65,.96,2.52,500 79 | 2,12.33,.99,1.95,14.8,136,1.9,1.85,.35,2.76,3.4,1.06,2.31,750 80 | 2,12.7,3.87,2.4,23,101,2.83,2.55,.43,1.95,2.57,1.19,3.13,463 81 | 2,12,.92,2,19,86,2.42,2.26,.3,1.43,2.5,1.38,3.12,278 82 | 2,12.72,1.81,2.2,18.8,86,2.2,2.53,.26,1.77,3.9,1.16,3.14,714 83 | 2,12.08,1.13,2.51,24,78,2,1.58,.4,1.4,2.2,1.31,2.72,630 84 | 2,13.05,3.86,2.32,22.5,85,1.65,1.59,.61,1.62,4.8,.84,2.01,515 85 | 2,11.84,.89,2.58,18,94,2.2,2.21,.22,2.35,3.05,.79,3.08,520 86 | 2,12.67,.98,2.24,18,99,2.2,1.94,.3,1.46,2.62,1.23,3.16,450 87 | 2,12.16,1.61,2.31,22.8,90,1.78,1.69,.43,1.56,2.45,1.33,2.26,495 88 | 2,11.65,1.67,2.62,26,88,1.92,1.61,.4,1.34,2.6,1.36,3.21,562 89 | 2,11.64,2.06,2.46,21.6,84,1.95,1.69,.48,1.35,2.8,1,2.75,680 90 | 2,12.08,1.33,2.3,23.6,70,2.2,1.59,.42,1.38,1.74,1.07,3.21,625 91 | 2,12.08,1.83,2.32,18.5,81,1.6,1.5,.52,1.64,2.4,1.08,2.27,480 92 | 2,12,1.51,2.42,22,86,1.45,1.25,.5,1.63,3.6,1.05,2.65,450 93 | 2,12.69,1.53,2.26,20.7,80,1.38,1.46,.58,1.62,3.05,.96,2.06,495 94 | 2,12.29,2.83,2.22,18,88,2.45,2.25,.25,1.99,2.15,1.15,3.3,290 95 | 2,11.62,1.99,2.28,18,98,3.02,2.26,.17,1.35,3.25,1.16,2.96,345 96 | 2,12.47,1.52,2.2,19,162,2.5,2.27,.32,3.28,2.6,1.16,2.63,937 97 | 2,11.81,2.12,2.74,21.5,134,1.6,.99,.14,1.56,2.5,.95,2.26,625 98 | 2,12.29,1.41,1.98,16,85,2.55,2.5,.29,1.77,2.9,1.23,2.74,428 99 | 2,12.37,1.07,2.1,18.5,88,3.52,3.75,.24,1.95,4.5,1.04,2.77,660 100 | 2,12.29,3.17,2.21,18,88,2.85,2.99,.45,2.81,2.3,1.42,2.83,406 101 | 2,12.08,2.08,1.7,17.5,97,2.23,2.17,.26,1.4,3.3,1.27,2.96,710 102 | 2,12.6,1.34,1.9,18.5,88,1.45,1.36,.29,1.35,2.45,1.04,2.77,562 103 | 2,12.34,2.45,2.46,21,98,2.56,2.11,.34,1.31,2.8,.8,3.38,438 104 | 2,11.82,1.72,1.88,19.5,86,2.5,1.64,.37,1.42,2.06,.94,2.44,415 105 | 2,12.51,1.73,1.98,20.5,85,2.2,1.92,.32,1.48,2.94,1.04,3.57,672 106 | 2,12.42,2.55,2.27,22,90,1.68,1.84,.66,1.42,2.7,.86,3.3,315 107 | 2,12.25,1.73,2.12,19,80,1.65,2.03,.37,1.63,3.4,1,3.17,510 108 | 2,12.72,1.75,2.28,22.5,84,1.38,1.76,.48,1.63,3.3,.88,2.42,488 109 | 2,12.22,1.29,1.94,19,92,2.36,2.04,.39,2.08,2.7,.86,3.02,312 110 | 2,11.61,1.35,2.7,20,94,2.74,2.92,.29,2.49,2.65,.96,3.26,680 111 | 2,11.46,3.74,1.82,19.5,107,3.18,2.58,.24,3.58,2.9,.75,2.81,562 112 | 2,12.52,2.43,2.17,21,88,2.55,2.27,.26,1.22,2,.9,2.78,325 113 | 2,11.76,2.68,2.92,20,103,1.75,2.03,.6,1.05,3.8,1.23,2.5,607 114 | 2,11.41,.74,2.5,21,88,2.48,2.01,.42,1.44,3.08,1.1,2.31,434 115 | 2,12.08,1.39,2.5,22.5,84,2.56,2.29,.43,1.04,2.9,.93,3.19,385 116 | 2,11.03,1.51,2.2,21.5,85,2.46,2.17,.52,2.01,1.9,1.71,2.87,407 117 | 2,11.82,1.47,1.99,20.8,86,1.98,1.6,.3,1.53,1.95,.95,3.33,495 118 | 2,12.42,1.61,2.19,22.5,108,2,2.09,.34,1.61,2.06,1.06,2.96,345 119 | 2,12.77,3.43,1.98,16,80,1.63,1.25,.43,.83,3.4,.7,2.12,372 120 | 2,12,3.43,2,19,87,2,1.64,.37,1.87,1.28,.93,3.05,564 121 | 2,11.45,2.4,2.42,20,96,2.9,2.79,.32,1.83,3.25,.8,3.39,625 122 | 2,11.56,2.05,3.23,28.5,119,3.18,5.08,.47,1.87,6,.93,3.69,465 123 | 2,12.42,4.43,2.73,26.5,102,2.2,2.13,.43,1.71,2.08,.92,3.12,365 124 | 2,13.05,5.8,2.13,21.5,86,2.62,2.65,.3,2.01,2.6,.73,3.1,380 125 | 2,11.87,4.31,2.39,21,82,2.86,3.03,.21,2.91,2.8,.75,3.64,380 126 | 2,12.07,2.16,2.17,21,85,2.6,2.65,.37,1.35,2.76,.86,3.28,378 127 | 2,12.43,1.53,2.29,21.5,86,2.74,3.15,.39,1.77,3.94,.69,2.84,352 128 | 2,11.79,2.13,2.78,28.5,92,2.13,2.24,.58,1.76,3,.97,2.44,466 129 | 2,12.37,1.63,2.3,24.5,88,2.22,2.45,.4,1.9,2.12,.89,2.78,342 130 | 2,12.04,4.3,2.38,22,80,2.1,1.75,.42,1.35,2.6,.79,2.57,580 131 | 3,12.86,1.35,2.32,18,122,1.51,1.25,.21,.94,4.1,.76,1.29,630 132 | 3,12.88,2.99,2.4,20,104,1.3,1.22,.24,.83,5.4,.74,1.42,530 133 | 3,12.81,2.31,2.4,24,98,1.15,1.09,.27,.83,5.7,.66,1.36,560 134 | 3,12.7,3.55,2.36,21.5,106,1.7,1.2,.17,.84,5,.78,1.29,600 135 | 3,12.51,1.24,2.25,17.5,85,2,.58,.6,1.25,5.45,.75,1.51,650 136 | 3,12.6,2.46,2.2,18.5,94,1.62,.66,.63,.94,7.1,.73,1.58,695 137 | 3,12.25,4.72,2.54,21,89,1.38,.47,.53,.8,3.85,.75,1.27,720 138 | 3,12.53,5.51,2.64,25,96,1.79,.6,.63,1.1,5,.82,1.69,515 139 | 3,13.49,3.59,2.19,19.5,88,1.62,.48,.58,.88,5.7,.81,1.82,580 140 | 3,12.84,2.96,2.61,24,101,2.32,.6,.53,.81,4.92,.89,2.15,590 141 | 3,12.93,2.81,2.7,21,96,1.54,.5,.53,.75,4.6,.77,2.31,600 142 | 3,13.36,2.56,2.35,20,89,1.4,.5,.37,.64,5.6,.7,2.47,780 143 | 3,13.52,3.17,2.72,23.5,97,1.55,.52,.5,.55,4.35,.89,2.06,520 144 | 3,13.62,4.95,2.35,20,92,2,.8,.47,1.02,4.4,.91,2.05,550 145 | 3,12.25,3.88,2.2,18.5,112,1.38,.78,.29,1.14,8.21,.65,2,855 146 | 3,13.16,3.57,2.15,21,102,1.5,.55,.43,1.3,4,.6,1.68,830 147 | 3,13.88,5.04,2.23,20,80,.98,.34,.4,.68,4.9,.58,1.33,415 148 | 3,12.87,4.61,2.48,21.5,86,1.7,.65,.47,.86,7.65,.54,1.86,625 149 | 3,13.32,3.24,2.38,21.5,92,1.93,.76,.45,1.25,8.42,.55,1.62,650 150 | 3,13.08,3.9,2.36,21.5,113,1.41,1.39,.34,1.14,9.40,.57,1.33,550 151 | 3,13.5,3.12,2.62,24,123,1.4,1.57,.22,1.25,8.60,.59,1.3,500 152 | 3,12.79,2.67,2.48,22,112,1.48,1.36,.24,1.26,10.8,.48,1.47,480 153 | 3,13.11,1.9,2.75,25.5,116,2.2,1.28,.26,1.56,7.1,.61,1.33,425 154 | 3,13.23,3.3,2.28,18.5,98,1.8,.83,.61,1.87,10.52,.56,1.51,675 155 | 3,12.58,1.29,2.1,20,103,1.48,.58,.53,1.4,7.6,.58,1.55,640 156 | 3,13.17,5.19,2.32,22,93,1.74,.63,.61,1.55,7.9,.6,1.48,725 157 | 3,13.84,4.12,2.38,19.5,89,1.8,.83,.48,1.56,9.01,.57,1.64,480 158 | 3,12.45,3.03,2.64,27,97,1.9,.58,.63,1.14,7.5,.67,1.73,880 159 | 3,14.34,1.68,2.7,25,98,2.8,1.31,.53,2.7,13,.57,1.96,660 160 | 3,13.48,1.67,2.64,22.5,89,2.6,1.1,.52,2.29,11.75,.57,1.78,620 161 | 3,12.36,3.83,2.38,21,88,2.3,.92,.5,1.04,7.65,.56,1.58,520 162 | 3,13.69,3.26,2.54,20,107,1.83,.56,.5,.8,5.88,.96,1.82,680 163 | 3,12.85,3.27,2.58,22,106,1.65,.6,.6,.96,5.58,.87,2.11,570 164 | 3,12.96,3.45,2.35,18.5,106,1.39,.7,.4,.94,5.28,.68,1.75,675 165 | 3,13.78,2.76,2.3,22,90,1.35,.68,.41,1.03,9.58,.7,1.68,615 166 | 3,13.73,4.36,2.26,22.5,88,1.28,.47,.52,1.15,6.62,.78,1.75,520 167 | 3,13.45,3.7,2.6,23,111,1.7,.92,.43,1.46,10.68,.85,1.56,695 168 | 3,12.82,3.37,2.3,19.5,88,1.48,.66,.4,.97,10.26,.72,1.75,685 169 | 3,13.58,2.58,2.69,24.5,105,1.55,.84,.39,1.54,8.66,.74,1.8,750 170 | 3,13.4,4.6,2.86,25,112,1.98,.96,.27,1.11,8.5,.67,1.92,630 171 | 3,12.2,3.03,2.32,19,96,1.25,.49,.4,.73,5.5,.66,1.83,510 172 | 3,12.77,2.39,2.28,19.5,86,1.39,.51,.48,.64,9.899999,.57,1.63,470 173 | 3,14.16,2.51,2.48,20,91,1.68,.7,.44,1.24,9.7,.62,1.71,660 174 | 3,13.71,5.65,2.45,20.5,95,1.68,.61,.52,1.06,7.7,.64,1.74,740 175 | 3,13.4,3.91,2.48,23,102,1.8,.75,.43,1.41,7.3,.7,1.56,750 176 | 3,13.27,4.28,2.26,20,120,1.59,.69,.43,1.35,10.2,.59,1.56,835 177 | 3,13.17,2.59,2.37,20,120,1.65,.68,.53,1.46,9.3,.6,1.62,840 178 | 3,14.13,4.1,2.74,24.5,96,2.05,.76,.56,1.35,9.2,.61,1.6,560 179 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /NumericTables_example.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# Boilerplate\n", 12 | "%matplotlib inline\n", 13 | "\n", 14 | "# Intel DAAL related imports\n", 15 | "from daal.data_management import HomogenNumericTable\n", 16 | "\n", 17 | "# Helpersfor getArrayFromNT and printNT. See utils.py\n", 18 | "from utils import *\n", 19 | "\n", 20 | "# Import numpy, matplotlib, seaborn\n", 21 | "import numpy as np\n", 22 | "import matplotlib\n", 23 | "import matplotlib.pyplot as plt\n", 24 | "\n", 25 | "# Plotting configurations\n", 26 | "%config InlineBackend.figure_format = 'retina'\n", 27 | "plt.rcParams[\"figure.figsize\"] = (12, 9)" 28 | ] 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "metadata": {}, 33 | "source": [ 34 | "# Data Management in pyDAAL\n", 35 | "\n", 36 | "### Tutorial brief\n", 37 | "As a high performance data analytics library for Python, pyDAAL has a set of data structures specifically designed to be performance oriented, while still versatile enough to accommodate data of different memory layouts. These data structures are centered around `NumericTable`, a generic data type for representing data in memory. In this section, we first learn the general concept of `NumericTable`. We then focus on two most important variants of `NumericTables`: `HomogenNumericTable` for homogenuous dense data, and `CSRNumericTable` for sparse data.\n", 38 | "\n", 39 | "It is critical for pyDAAL to be able to work seamlessly with other mathematic and statistical Python packages, such as NumPy, SciPy, Pandas, scikit-learn, etc. These packages are being widely used in the mainstream Python data analytics community. And the goal of pyDAAL is to provide high performance alternatives to some of the algorithms that these popular packages offer. In this section we illustrate, using several simple examples, how pyDAAL can work with the data types in these packages.\n", 40 | "\n", 41 | "### Learning objectives\n", 42 | "* To learn `NumericTable`, the central concepts and main data types for data management in pyDAAL.\n", 43 | "* To get familar with the `HomogenNumericTable` and the `CSRNumericTable` API.\n", 44 | "* To see how `NumericTables` interact with data types in NumPy, SciPy, Pandas, etc.\n", 45 | "\n", 46 | "### NumericTables\n", 47 | "A conceptual model about data in data analytics is a 2-dimensional structure with each row being an _observation_ (_sample_), and each column being a _feature_ (_variable_). \n", 48 | "\n", 49 | "![](https://software.intel.com/sites/products/documentation/doclib/daal/daal-user-and-reference-guides/daal_prog_guide/GUID-65FAD60A-A92A-460F-B43D-4F8C2C39F662-low.png \"Dataset\")\n", 50 | "\n", 51 | "`NumericTables` in DAAL are modeled after this concept. Every algorithm in DAAL takes `NumericTables` as input and produces `NumericTables` as output. There are several kinds of `NumericTables`, for example,\n", 52 | "* **`HomogenNumericTable`** - This is a type for storing dense data where all featuers are of the same type. Supported types include `int`, `float32`, and `float64`. A `HomogenNuericTable` has the C-contiguous memory layout, that is, rows are laid out in contiguously in memory. It is essentially the same as a 2D matrix.\n", 53 | "\n", 54 | "* **`CSRNumericTable`** - This is a type for storing sparse data where all features are of the same type. It is equivalent to a CSR sparse matrix. The CSR format is the most used memory storage format for sparse matrices. `CSRNumericTable` in pyDAAL is compatible with `scipy.sparse.csr_matrix`. \n", 55 | "![](https://software.intel.com/sites/products/documentation/doclib/daal/daal-user-and-reference-guides/daal_prog_guide/GUID-B89DE139-3E29-41DA-AB45-BB0B655716C3-low.png \"CSR 0-based indexing\")\n", 56 | "\n", 57 | "![](https://software.intel.com/sites/products/documentation/doclib/daal/daal-user-and-reference-guides/daal_prog_guide/GUID-F488A72A-68BB-4E64-9D46-9C5FFAD0D431-low.png \"CSR 1-based indexing\")\n", 58 | "\n", 59 | "* **`AOSNumericTable`** - This table is to represent heterogenuous data, that is, features (columns) in the table can be of different data types. This table uses the row-majored memory layout, rows are stored in contiguous memory blocks.\n", 60 | "![](https://software.intel.com/sites/products/documentation/doclib/daal/daal-user-and-reference-guides/daal_prog_guide/GUID-F0B9F856-5C57-4AE0-972E-8E0B70F3BDA4-low.png \"AOSNumericTable\")\n", 61 | "\n", 62 | "* **`SOANumericTable`** - Another type of table to represent heterogenuous data. But this one uses the column-majored memory layout.\n", 63 | "![](https://software.intel.com/sites/products/documentation/doclib/daal/daal-user-and-reference-guides/daal_prog_guide/GUID-02052873-BCB8-44CD-A506-7270567D79F7-low.png \"SOANumericTable\")\n", 64 | "\n", 65 | "After discussions of concepts, we are now interested in putting `NumericTables` into action. In particular, we are interested in learning how to interact with the data types of other Python numeric packages. The following examples use `HomogenNumericTable` or `CSRNumericTable`. But the principles carry over to other types of `NumericTable`. \n", 66 | "\n", 67 | "### Interoperability with NumPy ndarrays\n", 68 | "NumPy ndarray is the common denominator in many numeric packages. SciPy, Pandas, scikit-learn, and plotting tools such as matplotlib can either work directly with ndarrays, or have data types built on top of ndarrays. The code below shows how to easily convert an ndarray to a `HomogenNumericTable`. It's worth to stress that \n", 69 | "\n", 70 | "

This works only if the ndarray is C-contiguous

" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 2, 76 | "metadata": { 77 | "collapsed": true 78 | }, 79 | "outputs": [ 80 | { 81 | "name": "stdout", 82 | "output_type": "stream", 83 | "text": [ 84 | "1 6\n", 85 | "6 1\n", 86 | "2 3\n", 87 | "False\n" 88 | ] 89 | } 90 | ], 91 | "source": [ 92 | "import numpy as np\n", 93 | "from daal.data_management import HomogenNumericTable\n", 94 | "\n", 95 | "# The reshape is necessary because HomogenNumericTable constructor only takes array with fully defined dimensions. \n", 96 | "x = np.array([1., 2., 3., 4., 5., 6.]).reshape(1, 6)\n", 97 | "x_nt = HomogenNumericTable(x)\n", 98 | "print(x_nt.getNumberOfRows(), x_nt.getNumberOfColumns())\n", 99 | "\n", 100 | "y_nt = HomogenNumericTable(x.reshape(6, 1))\n", 101 | "print(y_nt.getNumberOfRows(), y_nt.getNumberOfColumns())\n", 102 | "\n", 103 | "z_nt = HomogenNumericTable(x.reshape(2, 3))\n", 104 | "print(z_nt.getNumberOfRows(), z_nt.getNumberOfColumns())\n", 105 | "\n", 106 | "s = x.reshape(2, 3)\n", 107 | "s_slice = s[:, :-1]\n", 108 | "print(s_slice.flags['C'])\n", 109 | "\n", 110 | "# DON'T DO THIS. s_slice is not C-contiguous!\n", 111 | "# bad_nt = HomogenNumericTable(s_slice)" 112 | ] 113 | }, 114 | { 115 | "cell_type": "markdown", 116 | "metadata": {}, 117 | "source": [ 118 | "Going from a HomogenNumericTable to an ndarray is also possible, see below. The operation is so common that we've defined a function `getArrayFromNT` in file [utils.py](./utils.py) based on the same logic. You can use this function for the rest of the lab." 119 | ] 120 | }, 121 | { 122 | "cell_type": "code", 123 | "execution_count": 3, 124 | "metadata": { 125 | "collapsed": true 126 | }, 127 | "outputs": [ 128 | { 129 | "name": "stdout", 130 | "output_type": "stream", 131 | "text": [ 132 | "[[ 1. 2. 3.]\n", 133 | " [ 4. 5. 6.]]\n" 134 | ] 135 | } 136 | ], 137 | "source": [ 138 | "from daal.data_management import BlockDescriptor_Float64, readOnly\n", 139 | "\n", 140 | "bd = BlockDescriptor_Float64()\n", 141 | "z_nt.getBlockOfRows(0, z_nt.getNumberOfRows(), readOnly, bd)\n", 142 | "z = bd.getArray()\n", 143 | "z_nt.releaseBlockOfRows(bd)\n", 144 | "print(z)" 145 | ] 146 | }, 147 | { 148 | "cell_type": "markdown", 149 | "metadata": {}, 150 | "source": [ 151 | "### Example: Load data from a file\n", 152 | "We often need to get data from a file, typically a file of the CSV format. It's noteworthy that pyDAAL provides data source connectors that can read data from a CSV file. However, more than often than not, NumPy's `genfromtxt` function just works like a charm. \n", 153 | "\n", 154 | "Example below reads the first 5 rows from a data file, and excludes the first column (column index 0)." 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": 4, 160 | "metadata": { 161 | "collapsed": true 162 | }, 163 | "outputs": [ 164 | { 165 | "name": "stdout", 166 | "output_type": "stream", 167 | "text": [ 168 | "True\n", 169 | "5 13\n" 170 | ] 171 | } 172 | ], 173 | "source": [ 174 | "data = np.genfromtxt('./mldata/wine.data', dtype=np.double, delimiter=',', usecols=list(range(1, 14)), max_rows=5)\n", 175 | "print(data.flags['C'])\n", 176 | "data_nt = HomogenNumericTable(data)\n", 177 | "print(data_nt.getNumberOfRows(), data_nt.getNumberOfColumns())" 178 | ] 179 | }, 180 | { 181 | "cell_type": "markdown", 182 | "metadata": {}, 183 | "source": [ 184 | "### Example: Pandas DataFrames\n", 185 | "Pandas DataFrames can be converted to ndarrays, and then to `NumericTables`. We can also go the other direction through ndarrays, see example below. The `getArrayFromNT` function is imported from [utils.py](./utils.py)." 186 | ] 187 | }, 188 | { 189 | "cell_type": "code", 190 | "execution_count": 5, 191 | "metadata": { 192 | "collapsed": true 193 | }, 194 | "outputs": [ 195 | { 196 | "name": "stdout", 197 | "output_type": "stream", 198 | "text": [ 199 | "True\n", 200 | "(10, 5)\n", 201 | "10 5\n", 202 | " a b c d e\n", 203 | "0 0.724781 0.761530 -0.644497 0.391697 -0.988857\n", 204 | "1 0.603173 -0.484664 0.913673 3.054833 0.687377\n", 205 | "2 -0.548924 0.057363 0.292965 -0.499336 -1.093813\n", 206 | "3 -0.501176 0.650918 -0.614840 0.323459 0.304761\n", 207 | "4 0.415223 -0.764296 -1.146337 1.892307 -0.998742\n", 208 | "5 -0.473059 -0.812457 -0.611840 2.189755 -2.248139\n", 209 | "6 -1.135596 1.485732 0.370990 0.343459 -1.903275\n", 210 | "7 -1.789672 0.878171 -1.190615 -0.037734 0.707674\n", 211 | "8 -0.105340 -1.285028 0.324998 -0.202555 1.126162\n", 212 | "9 -1.017113 0.768547 1.148915 1.763926 1.198747\n" 213 | ] 214 | } 215 | ], 216 | "source": [ 217 | "import pandas as pd\n", 218 | "from utils import *\n", 219 | "\n", 220 | "df = pd.DataFrame(np.random.randn(10, 5), columns = ['a', 'b', 'c', 'd', 'e'])\n", 221 | "array = df.values\n", 222 | "print(array.flags['C'])\n", 223 | "print(array.shape)\n", 224 | "\n", 225 | "array_nt = HomogenNumericTable(array)\n", 226 | "print(array_nt.getNumberOfRows(), array_nt.getNumberOfColumns())\n", 227 | "\n", 228 | "d = getArrayFromNT(array_nt)\n", 229 | "df2 = pd.DataFrame(d, columns = ['a', 'b', 'c', 'd', 'e'])\n", 230 | "print(df2)" 231 | ] 232 | }, 233 | { 234 | "cell_type": "markdown", 235 | "metadata": {}, 236 | "source": [ 237 | "### Example: scikit-learn datasets\n", 238 | "Scikit-learn has some functions to load popular datasets on the Internet. These datasets are available through [sklearn.datasets](http://scikit-learn.org/stable/datasets). For example, the [load_digits](http://scikit-learn.org/stable/modules/generated/sklearn.datasets.load_digits.html#sklearn.datasets.load_digits) method loads and returns the digits dataset. Because the dataset internally uses NumPy ndarray to store information, we can convert it to DAAL `NumericTables`, and pass them to DAAL algorithms. \n", 239 | "\n", 240 | "Extreme caution must be taken, however, because sometimes the data loaded is not C-contiguous. We need to make it right before constructing a `NumericTable` from the data. The code below shows how it works." 241 | ] 242 | }, 243 | { 244 | "cell_type": "code", 245 | "execution_count": 6, 246 | "metadata": { 247 | "collapsed": true 248 | }, 249 | "outputs": [ 250 | { 251 | "name": "stdout", 252 | "output_type": "stream", 253 | "text": [ 254 | "False\n", 255 | "100 64\n" 256 | ] 257 | } 258 | ], 259 | "source": [ 260 | "from sklearn.datasets import load_digits\n", 261 | "\n", 262 | "digits = load_digits()\n", 263 | "print(digits.data.flags['C'])\n", 264 | "# digits.data is NOT C-contiguous. We need to make it into the C-contiguous memory layout.\n", 265 | "data = np.ascontiguousarray(digits.data, dtype = np.double)\n", 266 | "data_nt = HomogenNumericTable(data[-100:])\n", 267 | "print(data_nt.getNumberOfRows(), data_nt.getNumberOfColumns())" 268 | ] 269 | }, 270 | { 271 | "cell_type": "markdown", 272 | "metadata": {}, 273 | "source": [ 274 | "### Example: SciPy sparse matrix\n", 275 | "The last example illustrates `CSRNumericTable`, which is essentially a sparse matrix of the CSR storeage format. The CSR format uses three 1D arrays to represent a sparse matrix:\n", 276 | "* `values` - All non-zero values are lumped into a dense array.\n", 277 | "* `col_ind` - An array of column indices for non-zero values.\n", 278 | "* `row_offset` - An array whose $i$-th element is the index in the `data` array for the value corresponding to the first non-zero element of the $i$-th row of the matrix. The last element of this array equals to _nnz_, the number of non-zeros.\n", 279 | "\n", 280 | "`CSRNumericTable` is compatible with [`scipy.sparse.csr_matrix`](http://docs.scipy.org/doc/scipy/reference/generated/scipy.sparse.csr_matrix.html#scipy.sparse.csr_matrix). The code below shows how to convert from a SciPy sparse matrix to a `CSRNumericTable`. One peculiar thing to note when constructing a `CSRNumericTable` is that, the indices arrays (`col_ind` and `row_offset`) must be 64-bit integers. " 281 | ] 282 | }, 283 | { 284 | "cell_type": "code", 285 | "execution_count": 7, 286 | "metadata": { 287 | "collapsed": true 288 | }, 289 | "outputs": [ 290 | { 291 | "name": "stdout", 292 | "output_type": "stream", 293 | "text": [ 294 | "[[ 2. 0. 6.4 0. 0. 1.7 0. ]\n", 295 | " [ 0. 0. 0. 3.1 0. 0. 0. ]\n", 296 | " [ 0. 0. 0. 0. 0. 0. 0. ]\n", 297 | " [ 0. 2.2 0. 0. 2.1 0. 0. ]\n", 298 | " [ 0. 0. 0. 0. 0. 3.8 5.5]]\n", 299 | "5 7\n", 300 | "values = [ 2. 6.4 1.7 3.1 2.2 2.1 3.8 5.5]\n", 301 | "col_ind = [0 2 5 3 1 4 5 6]\n", 302 | "row_offset = [0 3 4 4 6 8]\n" 303 | ] 304 | } 305 | ], 306 | "source": [ 307 | "from scipy.sparse import csr_matrix\n", 308 | "from daal.data_management import CSRNumericTable\n", 309 | "\n", 310 | "# First, create a sparse matrix\n", 311 | "values = np.array([2.0, 6.4, 1.7, 3.1, 2.2, 2.1, 3.8, 5.5])\n", 312 | "col_ind = np.array([0, 2, 5, 3, 1, 4, 5, 6])\n", 313 | "row_offset = np.array([0, 3, 4, 4, 6, 8])\n", 314 | "sp = csr_matrix((values, col_ind, row_offset), dtype=np.double, shape=(5, 7))\n", 315 | "print(sp.toarray())\n", 316 | "\n", 317 | "# Then, create a CSRNumericTable based on the sparse matrix\n", 318 | "sp_nt = CSRNumericTable(sp.data, sp.indices.astype(np.uint64), sp.indptr.astype(np.uint64), 7, 5)\n", 319 | "print(sp_nt.getNumberOfRows(), sp_nt.getNumberOfColumns())\n", 320 | "(values, col_ind, row_offset) = sp_nt.getArrays()\n", 321 | "print(\"values = \", values)\n", 322 | "print(\"col_ind = \", col_ind)\n", 323 | "print(\"row_offset = \", row_offset)" 324 | ] 325 | }, 326 | { 327 | "cell_type": "markdown", 328 | "metadata": {}, 329 | "source": [ 330 | "### Summary\n", 331 | "We learned the central concept of data management in pyDAAL: `NumericTables`. We got a glimpse of 4 types of `NumericTables` supported in DAAL. We practiced basic operations of `HomogenNumericTable` and `CSRNumericTable`, and their interoperability with NumPy, SciPy, Pandas, and scikit-learn." 332 | ] 333 | } 334 | ], 335 | "metadata": { 336 | "kernelspec": { 337 | "display_name": "Python 3", 338 | "language": "python", 339 | "name": "python3" 340 | }, 341 | "language_info": { 342 | "codemirror_mode": { 343 | "name": "ipython", 344 | "version": 3 345 | }, 346 | "file_extension": ".py", 347 | "mimetype": "text/x-python", 348 | "name": "python", 349 | "nbconvert_exporter": "python", 350 | "pygments_lexer": "ipython3", 351 | "version": "3.5.2" 352 | } 353 | }, 354 | "nbformat": 4, 355 | "nbformat_minor": 0 356 | } 357 | -------------------------------------------------------------------------------- /mldata/housing.data: -------------------------------------------------------------------------------- 1 | 0.00632 18.00 2.310 0 0.5380 6.5750 65.20 4.0900 1 296.0 15.30 396.90 4.98 24.00 2 | 0.02731 0.00 7.070 0 0.4690 6.4210 78.90 4.9671 2 242.0 17.80 396.90 9.14 21.60 3 | 0.02729 0.00 7.070 0 0.4690 7.1850 61.10 4.9671 2 242.0 17.80 392.83 4.03 34.70 4 | 0.03237 0.00 2.180 0 0.4580 6.9980 45.80 6.0622 3 222.0 18.70 394.63 2.94 33.40 5 | 0.06905 0.00 2.180 0 0.4580 7.1470 54.20 6.0622 3 222.0 18.70 396.90 5.33 36.20 6 | 0.02985 0.00 2.180 0 0.4580 6.4300 58.70 6.0622 3 222.0 18.70 394.12 5.21 28.70 7 | 0.08829 12.50 7.870 0 0.5240 6.0120 66.60 5.5605 5 311.0 15.20 395.60 12.43 22.90 8 | 0.14455 12.50 7.870 0 0.5240 6.1720 96.10 5.9505 5 311.0 15.20 396.90 19.15 27.10 9 | 0.21124 12.50 7.870 0 0.5240 5.6310 100.00 6.0821 5 311.0 15.20 386.63 29.93 16.50 10 | 0.17004 12.50 7.870 0 0.5240 6.0040 85.90 6.5921 5 311.0 15.20 386.71 17.10 18.90 11 | 0.22489 12.50 7.870 0 0.5240 6.3770 94.30 6.3467 5 311.0 15.20 392.52 20.45 15.00 12 | 0.11747 12.50 7.870 0 0.5240 6.0090 82.90 6.2267 5 311.0 15.20 396.90 13.27 18.90 13 | 0.09378 12.50 7.870 0 0.5240 5.8890 39.00 5.4509 5 311.0 15.20 390.50 15.71 21.70 14 | 0.62976 0.00 8.140 0 0.5380 5.9490 61.80 4.7075 4 307.0 21.00 396.90 8.26 20.40 15 | 0.63796 0.00 8.140 0 0.5380 6.0960 84.50 4.4619 4 307.0 21.00 380.02 10.26 18.20 16 | 0.62739 0.00 8.140 0 0.5380 5.8340 56.50 4.4986 4 307.0 21.00 395.62 8.47 19.90 17 | 1.05393 0.00 8.140 0 0.5380 5.9350 29.30 4.4986 4 307.0 21.00 386.85 6.58 23.10 18 | 0.78420 0.00 8.140 0 0.5380 5.9900 81.70 4.2579 4 307.0 21.00 386.75 14.67 17.50 19 | 0.80271 0.00 8.140 0 0.5380 5.4560 36.60 3.7965 4 307.0 21.00 288.99 11.69 20.20 20 | 0.72580 0.00 8.140 0 0.5380 5.7270 69.50 3.7965 4 307.0 21.00 390.95 11.28 18.20 21 | 1.25179 0.00 8.140 0 0.5380 5.5700 98.10 3.7979 4 307.0 21.00 376.57 21.02 13.60 22 | 0.85204 0.00 8.140 0 0.5380 5.9650 89.20 4.0123 4 307.0 21.00 392.53 13.83 19.60 23 | 1.23247 0.00 8.140 0 0.5380 6.1420 91.70 3.9769 4 307.0 21.00 396.90 18.72 15.20 24 | 0.98843 0.00 8.140 0 0.5380 5.8130 100.00 4.0952 4 307.0 21.00 394.54 19.88 14.50 25 | 0.75026 0.00 8.140 0 0.5380 5.9240 94.10 4.3996 4 307.0 21.00 394.33 16.30 15.60 26 | 0.84054 0.00 8.140 0 0.5380 5.5990 85.70 4.4546 4 307.0 21.00 303.42 16.51 13.90 27 | 0.67191 0.00 8.140 0 0.5380 5.8130 90.30 4.6820 4 307.0 21.00 376.88 14.81 16.60 28 | 0.95577 0.00 8.140 0 0.5380 6.0470 88.80 4.4534 4 307.0 21.00 306.38 17.28 14.80 29 | 0.77299 0.00 8.140 0 0.5380 6.4950 94.40 4.4547 4 307.0 21.00 387.94 12.80 18.40 30 | 1.00245 0.00 8.140 0 0.5380 6.6740 87.30 4.2390 4 307.0 21.00 380.23 11.98 21.00 31 | 1.13081 0.00 8.140 0 0.5380 5.7130 94.10 4.2330 4 307.0 21.00 360.17 22.60 12.70 32 | 1.35472 0.00 8.140 0 0.5380 6.0720 100.00 4.1750 4 307.0 21.00 376.73 13.04 14.50 33 | 1.38799 0.00 8.140 0 0.5380 5.9500 82.00 3.9900 4 307.0 21.00 232.60 27.71 13.20 34 | 1.15172 0.00 8.140 0 0.5380 5.7010 95.00 3.7872 4 307.0 21.00 358.77 18.35 13.10 35 | 1.61282 0.00 8.140 0 0.5380 6.0960 96.90 3.7598 4 307.0 21.00 248.31 20.34 13.50 36 | 0.06417 0.00 5.960 0 0.4990 5.9330 68.20 3.3603 5 279.0 19.20 396.90 9.68 18.90 37 | 0.09744 0.00 5.960 0 0.4990 5.8410 61.40 3.3779 5 279.0 19.20 377.56 11.41 20.00 38 | 0.08014 0.00 5.960 0 0.4990 5.8500 41.50 3.9342 5 279.0 19.20 396.90 8.77 21.00 39 | 0.17505 0.00 5.960 0 0.4990 5.9660 30.20 3.8473 5 279.0 19.20 393.43 10.13 24.70 40 | 0.02763 75.00 2.950 0 0.4280 6.5950 21.80 5.4011 3 252.0 18.30 395.63 4.32 30.80 41 | 0.03359 75.00 2.950 0 0.4280 7.0240 15.80 5.4011 3 252.0 18.30 395.62 1.98 34.90 42 | 0.12744 0.00 6.910 0 0.4480 6.7700 2.90 5.7209 3 233.0 17.90 385.41 4.84 26.60 43 | 0.14150 0.00 6.910 0 0.4480 6.1690 6.60 5.7209 3 233.0 17.90 383.37 5.81 25.30 44 | 0.15936 0.00 6.910 0 0.4480 6.2110 6.50 5.7209 3 233.0 17.90 394.46 7.44 24.70 45 | 0.12269 0.00 6.910 0 0.4480 6.0690 40.00 5.7209 3 233.0 17.90 389.39 9.55 21.20 46 | 0.17142 0.00 6.910 0 0.4480 5.6820 33.80 5.1004 3 233.0 17.90 396.90 10.21 19.30 47 | 0.18836 0.00 6.910 0 0.4480 5.7860 33.30 5.1004 3 233.0 17.90 396.90 14.15 20.00 48 | 0.22927 0.00 6.910 0 0.4480 6.0300 85.50 5.6894 3 233.0 17.90 392.74 18.80 16.60 49 | 0.25387 0.00 6.910 0 0.4480 5.3990 95.30 5.8700 3 233.0 17.90 396.90 30.81 14.40 50 | 0.21977 0.00 6.910 0 0.4480 5.6020 62.00 6.0877 3 233.0 17.90 396.90 16.20 19.40 51 | 0.08873 21.00 5.640 0 0.4390 5.9630 45.70 6.8147 4 243.0 16.80 395.56 13.45 19.70 52 | 0.04337 21.00 5.640 0 0.4390 6.1150 63.00 6.8147 4 243.0 16.80 393.97 9.43 20.50 53 | 0.05360 21.00 5.640 0 0.4390 6.5110 21.10 6.8147 4 243.0 16.80 396.90 5.28 25.00 54 | 0.04981 21.00 5.640 0 0.4390 5.9980 21.40 6.8147 4 243.0 16.80 396.90 8.43 23.40 55 | 0.01360 75.00 4.000 0 0.4100 5.8880 47.60 7.3197 3 469.0 21.10 396.90 14.80 18.90 56 | 0.01311 90.00 1.220 0 0.4030 7.2490 21.90 8.6966 5 226.0 17.90 395.93 4.81 35.40 57 | 0.02055 85.00 0.740 0 0.4100 6.3830 35.70 9.1876 2 313.0 17.30 396.90 5.77 24.70 58 | 0.01432 100.00 1.320 0 0.4110 6.8160 40.50 8.3248 5 256.0 15.10 392.90 3.95 31.60 59 | 0.15445 25.00 5.130 0 0.4530 6.1450 29.20 7.8148 8 284.0 19.70 390.68 6.86 23.30 60 | 0.10328 25.00 5.130 0 0.4530 5.9270 47.20 6.9320 8 284.0 19.70 396.90 9.22 19.60 61 | 0.14932 25.00 5.130 0 0.4530 5.7410 66.20 7.2254 8 284.0 19.70 395.11 13.15 18.70 62 | 0.17171 25.00 5.130 0 0.4530 5.9660 93.40 6.8185 8 284.0 19.70 378.08 14.44 16.00 63 | 0.11027 25.00 5.130 0 0.4530 6.4560 67.80 7.2255 8 284.0 19.70 396.90 6.73 22.20 64 | 0.12650 25.00 5.130 0 0.4530 6.7620 43.40 7.9809 8 284.0 19.70 395.58 9.50 25.00 65 | 0.01951 17.50 1.380 0 0.4161 7.1040 59.50 9.2229 3 216.0 18.60 393.24 8.05 33.00 66 | 0.03584 80.00 3.370 0 0.3980 6.2900 17.80 6.6115 4 337.0 16.10 396.90 4.67 23.50 67 | 0.04379 80.00 3.370 0 0.3980 5.7870 31.10 6.6115 4 337.0 16.10 396.90 10.24 19.40 68 | 0.05789 12.50 6.070 0 0.4090 5.8780 21.40 6.4980 4 345.0 18.90 396.21 8.10 22.00 69 | 0.13554 12.50 6.070 0 0.4090 5.5940 36.80 6.4980 4 345.0 18.90 396.90 13.09 17.40 70 | 0.12816 12.50 6.070 0 0.4090 5.8850 33.00 6.4980 4 345.0 18.90 396.90 8.79 20.90 71 | 0.08826 0.00 10.810 0 0.4130 6.4170 6.60 5.2873 4 305.0 19.20 383.73 6.72 24.20 72 | 0.15876 0.00 10.810 0 0.4130 5.9610 17.50 5.2873 4 305.0 19.20 376.94 9.88 21.70 73 | 0.09164 0.00 10.810 0 0.4130 6.0650 7.80 5.2873 4 305.0 19.20 390.91 5.52 22.80 74 | 0.19539 0.00 10.810 0 0.4130 6.2450 6.20 5.2873 4 305.0 19.20 377.17 7.54 23.40 75 | 0.07896 0.00 12.830 0 0.4370 6.2730 6.00 4.2515 5 398.0 18.70 394.92 6.78 24.10 76 | 0.09512 0.00 12.830 0 0.4370 6.2860 45.00 4.5026 5 398.0 18.70 383.23 8.94 21.40 77 | 0.10153 0.00 12.830 0 0.4370 6.2790 74.50 4.0522 5 398.0 18.70 373.66 11.97 20.00 78 | 0.08707 0.00 12.830 0 0.4370 6.1400 45.80 4.0905 5 398.0 18.70 386.96 10.27 20.80 79 | 0.05646 0.00 12.830 0 0.4370 6.2320 53.70 5.0141 5 398.0 18.70 386.40 12.34 21.20 80 | 0.08387 0.00 12.830 0 0.4370 5.8740 36.60 4.5026 5 398.0 18.70 396.06 9.10 20.30 81 | 0.04113 25.00 4.860 0 0.4260 6.7270 33.50 5.4007 4 281.0 19.00 396.90 5.29 28.00 82 | 0.04462 25.00 4.860 0 0.4260 6.6190 70.40 5.4007 4 281.0 19.00 395.63 7.22 23.90 83 | 0.03659 25.00 4.860 0 0.4260 6.3020 32.20 5.4007 4 281.0 19.00 396.90 6.72 24.80 84 | 0.03551 25.00 4.860 0 0.4260 6.1670 46.70 5.4007 4 281.0 19.00 390.64 7.51 22.90 85 | 0.05059 0.00 4.490 0 0.4490 6.3890 48.00 4.7794 3 247.0 18.50 396.90 9.62 23.90 86 | 0.05735 0.00 4.490 0 0.4490 6.6300 56.10 4.4377 3 247.0 18.50 392.30 6.53 26.60 87 | 0.05188 0.00 4.490 0 0.4490 6.0150 45.10 4.4272 3 247.0 18.50 395.99 12.86 22.50 88 | 0.07151 0.00 4.490 0 0.4490 6.1210 56.80 3.7476 3 247.0 18.50 395.15 8.44 22.20 89 | 0.05660 0.00 3.410 0 0.4890 7.0070 86.30 3.4217 2 270.0 17.80 396.90 5.50 23.60 90 | 0.05302 0.00 3.410 0 0.4890 7.0790 63.10 3.4145 2 270.0 17.80 396.06 5.70 28.70 91 | 0.04684 0.00 3.410 0 0.4890 6.4170 66.10 3.0923 2 270.0 17.80 392.18 8.81 22.60 92 | 0.03932 0.00 3.410 0 0.4890 6.4050 73.90 3.0921 2 270.0 17.80 393.55 8.20 22.00 93 | 0.04203 28.00 15.040 0 0.4640 6.4420 53.60 3.6659 4 270.0 18.20 395.01 8.16 22.90 94 | 0.02875 28.00 15.040 0 0.4640 6.2110 28.90 3.6659 4 270.0 18.20 396.33 6.21 25.00 95 | 0.04294 28.00 15.040 0 0.4640 6.2490 77.30 3.6150 4 270.0 18.20 396.90 10.59 20.60 96 | 0.12204 0.00 2.890 0 0.4450 6.6250 57.80 3.4952 2 276.0 18.00 357.98 6.65 28.40 97 | 0.11504 0.00 2.890 0 0.4450 6.1630 69.60 3.4952 2 276.0 18.00 391.83 11.34 21.40 98 | 0.12083 0.00 2.890 0 0.4450 8.0690 76.00 3.4952 2 276.0 18.00 396.90 4.21 38.70 99 | 0.08187 0.00 2.890 0 0.4450 7.8200 36.90 3.4952 2 276.0 18.00 393.53 3.57 43.80 100 | 0.06860 0.00 2.890 0 0.4450 7.4160 62.50 3.4952 2 276.0 18.00 396.90 6.19 33.20 101 | 0.14866 0.00 8.560 0 0.5200 6.7270 79.90 2.7778 5 384.0 20.90 394.76 9.42 27.50 102 | 0.11432 0.00 8.560 0 0.5200 6.7810 71.30 2.8561 5 384.0 20.90 395.58 7.67 26.50 103 | 0.22876 0.00 8.560 0 0.5200 6.4050 85.40 2.7147 5 384.0 20.90 70.80 10.63 18.60 104 | 0.21161 0.00 8.560 0 0.5200 6.1370 87.40 2.7147 5 384.0 20.90 394.47 13.44 19.30 105 | 0.13960 0.00 8.560 0 0.5200 6.1670 90.00 2.4210 5 384.0 20.90 392.69 12.33 20.10 106 | 0.13262 0.00 8.560 0 0.5200 5.8510 96.70 2.1069 5 384.0 20.90 394.05 16.47 19.50 107 | 0.17120 0.00 8.560 0 0.5200 5.8360 91.90 2.2110 5 384.0 20.90 395.67 18.66 19.50 108 | 0.13117 0.00 8.560 0 0.5200 6.1270 85.20 2.1224 5 384.0 20.90 387.69 14.09 20.40 109 | 0.12802 0.00 8.560 0 0.5200 6.4740 97.10 2.4329 5 384.0 20.90 395.24 12.27 19.80 110 | 0.26363 0.00 8.560 0 0.5200 6.2290 91.20 2.5451 5 384.0 20.90 391.23 15.55 19.40 111 | 0.10793 0.00 8.560 0 0.5200 6.1950 54.40 2.7778 5 384.0 20.90 393.49 13.00 21.70 112 | 0.10084 0.00 10.010 0 0.5470 6.7150 81.60 2.6775 6 432.0 17.80 395.59 10.16 22.80 113 | 0.12329 0.00 10.010 0 0.5470 5.9130 92.90 2.3534 6 432.0 17.80 394.95 16.21 18.80 114 | 0.22212 0.00 10.010 0 0.5470 6.0920 95.40 2.5480 6 432.0 17.80 396.90 17.09 18.70 115 | 0.14231 0.00 10.010 0 0.5470 6.2540 84.20 2.2565 6 432.0 17.80 388.74 10.45 18.50 116 | 0.17134 0.00 10.010 0 0.5470 5.9280 88.20 2.4631 6 432.0 17.80 344.91 15.76 18.30 117 | 0.13158 0.00 10.010 0 0.5470 6.1760 72.50 2.7301 6 432.0 17.80 393.30 12.04 21.20 118 | 0.15098 0.00 10.010 0 0.5470 6.0210 82.60 2.7474 6 432.0 17.80 394.51 10.30 19.20 119 | 0.13058 0.00 10.010 0 0.5470 5.8720 73.10 2.4775 6 432.0 17.80 338.63 15.37 20.40 120 | 0.14476 0.00 10.010 0 0.5470 5.7310 65.20 2.7592 6 432.0 17.80 391.50 13.61 19.30 121 | 0.06899 0.00 25.650 0 0.5810 5.8700 69.70 2.2577 2 188.0 19.10 389.15 14.37 22.00 122 | 0.07165 0.00 25.650 0 0.5810 6.0040 84.10 2.1974 2 188.0 19.10 377.67 14.27 20.30 123 | 0.09299 0.00 25.650 0 0.5810 5.9610 92.90 2.0869 2 188.0 19.10 378.09 17.93 20.50 124 | 0.15038 0.00 25.650 0 0.5810 5.8560 97.00 1.9444 2 188.0 19.10 370.31 25.41 17.30 125 | 0.09849 0.00 25.650 0 0.5810 5.8790 95.80 2.0063 2 188.0 19.10 379.38 17.58 18.80 126 | 0.16902 0.00 25.650 0 0.5810 5.9860 88.40 1.9929 2 188.0 19.10 385.02 14.81 21.40 127 | 0.38735 0.00 25.650 0 0.5810 5.6130 95.60 1.7572 2 188.0 19.10 359.29 27.26 15.70 128 | 0.25915 0.00 21.890 0 0.6240 5.6930 96.00 1.7883 4 437.0 21.20 392.11 17.19 16.20 129 | 0.32543 0.00 21.890 0 0.6240 6.4310 98.80 1.8125 4 437.0 21.20 396.90 15.39 18.00 130 | 0.88125 0.00 21.890 0 0.6240 5.6370 94.70 1.9799 4 437.0 21.20 396.90 18.34 14.30 131 | 0.34006 0.00 21.890 0 0.6240 6.4580 98.90 2.1185 4 437.0 21.20 395.04 12.60 19.20 132 | 1.19294 0.00 21.890 0 0.6240 6.3260 97.70 2.2710 4 437.0 21.20 396.90 12.26 19.60 133 | 0.59005 0.00 21.890 0 0.6240 6.3720 97.90 2.3274 4 437.0 21.20 385.76 11.12 23.00 134 | 0.32982 0.00 21.890 0 0.6240 5.8220 95.40 2.4699 4 437.0 21.20 388.69 15.03 18.40 135 | 0.97617 0.00 21.890 0 0.6240 5.7570 98.40 2.3460 4 437.0 21.20 262.76 17.31 15.60 136 | 0.55778 0.00 21.890 0 0.6240 6.3350 98.20 2.1107 4 437.0 21.20 394.67 16.96 18.10 137 | 0.32264 0.00 21.890 0 0.6240 5.9420 93.50 1.9669 4 437.0 21.20 378.25 16.90 17.40 138 | 0.35233 0.00 21.890 0 0.6240 6.4540 98.40 1.8498 4 437.0 21.20 394.08 14.59 17.10 139 | 0.24980 0.00 21.890 0 0.6240 5.8570 98.20 1.6686 4 437.0 21.20 392.04 21.32 13.30 140 | 0.54452 0.00 21.890 0 0.6240 6.1510 97.90 1.6687 4 437.0 21.20 396.90 18.46 17.80 141 | 0.29090 0.00 21.890 0 0.6240 6.1740 93.60 1.6119 4 437.0 21.20 388.08 24.16 14.00 142 | 1.62864 0.00 21.890 0 0.6240 5.0190 100.00 1.4394 4 437.0 21.20 396.90 34.41 14.40 143 | 3.32105 0.00 19.580 1 0.8710 5.4030 100.00 1.3216 5 403.0 14.70 396.90 26.82 13.40 144 | 4.09740 0.00 19.580 0 0.8710 5.4680 100.00 1.4118 5 403.0 14.70 396.90 26.42 15.60 145 | 2.77974 0.00 19.580 0 0.8710 4.9030 97.80 1.3459 5 403.0 14.70 396.90 29.29 11.80 146 | 2.37934 0.00 19.580 0 0.8710 6.1300 100.00 1.4191 5 403.0 14.70 172.91 27.80 13.80 147 | 2.15505 0.00 19.580 0 0.8710 5.6280 100.00 1.5166 5 403.0 14.70 169.27 16.65 15.60 148 | 2.36862 0.00 19.580 0 0.8710 4.9260 95.70 1.4608 5 403.0 14.70 391.71 29.53 14.60 149 | 2.33099 0.00 19.580 0 0.8710 5.1860 93.80 1.5296 5 403.0 14.70 356.99 28.32 17.80 150 | 2.73397 0.00 19.580 0 0.8710 5.5970 94.90 1.5257 5 403.0 14.70 351.85 21.45 15.40 151 | 1.65660 0.00 19.580 0 0.8710 6.1220 97.30 1.6180 5 403.0 14.70 372.80 14.10 21.50 152 | 1.49632 0.00 19.580 0 0.8710 5.4040 100.00 1.5916 5 403.0 14.70 341.60 13.28 19.60 153 | 1.12658 0.00 19.580 1 0.8710 5.0120 88.00 1.6102 5 403.0 14.70 343.28 12.12 15.30 154 | 2.14918 0.00 19.580 0 0.8710 5.7090 98.50 1.6232 5 403.0 14.70 261.95 15.79 19.40 155 | 1.41385 0.00 19.580 1 0.8710 6.1290 96.00 1.7494 5 403.0 14.70 321.02 15.12 17.00 156 | 3.53501 0.00 19.580 1 0.8710 6.1520 82.60 1.7455 5 403.0 14.70 88.01 15.02 15.60 157 | 2.44668 0.00 19.580 0 0.8710 5.2720 94.00 1.7364 5 403.0 14.70 88.63 16.14 13.10 158 | 1.22358 0.00 19.580 0 0.6050 6.9430 97.40 1.8773 5 403.0 14.70 363.43 4.59 41.30 159 | 1.34284 0.00 19.580 0 0.6050 6.0660 100.00 1.7573 5 403.0 14.70 353.89 6.43 24.30 160 | 1.42502 0.00 19.580 0 0.8710 6.5100 100.00 1.7659 5 403.0 14.70 364.31 7.39 23.30 161 | 1.27346 0.00 19.580 1 0.6050 6.2500 92.60 1.7984 5 403.0 14.70 338.92 5.50 27.00 162 | 1.46336 0.00 19.580 0 0.6050 7.4890 90.80 1.9709 5 403.0 14.70 374.43 1.73 50.00 163 | 1.83377 0.00 19.580 1 0.6050 7.8020 98.20 2.0407 5 403.0 14.70 389.61 1.92 50.00 164 | 1.51902 0.00 19.580 1 0.6050 8.3750 93.90 2.1620 5 403.0 14.70 388.45 3.32 50.00 165 | 2.24236 0.00 19.580 0 0.6050 5.8540 91.80 2.4220 5 403.0 14.70 395.11 11.64 22.70 166 | 2.92400 0.00 19.580 0 0.6050 6.1010 93.00 2.2834 5 403.0 14.70 240.16 9.81 25.00 167 | 2.01019 0.00 19.580 0 0.6050 7.9290 96.20 2.0459 5 403.0 14.70 369.30 3.70 50.00 168 | 1.80028 0.00 19.580 0 0.6050 5.8770 79.20 2.4259 5 403.0 14.70 227.61 12.14 23.80 169 | 2.30040 0.00 19.580 0 0.6050 6.3190 96.10 2.1000 5 403.0 14.70 297.09 11.10 23.80 170 | 2.44953 0.00 19.580 0 0.6050 6.4020 95.20 2.2625 5 403.0 14.70 330.04 11.32 22.30 171 | 1.20742 0.00 19.580 0 0.6050 5.8750 94.60 2.4259 5 403.0 14.70 292.29 14.43 17.40 172 | 2.31390 0.00 19.580 0 0.6050 5.8800 97.30 2.3887 5 403.0 14.70 348.13 12.03 19.10 173 | 0.13914 0.00 4.050 0 0.5100 5.5720 88.50 2.5961 5 296.0 16.60 396.90 14.69 23.10 174 | 0.09178 0.00 4.050 0 0.5100 6.4160 84.10 2.6463 5 296.0 16.60 395.50 9.04 23.60 175 | 0.08447 0.00 4.050 0 0.5100 5.8590 68.70 2.7019 5 296.0 16.60 393.23 9.64 22.60 176 | 0.06664 0.00 4.050 0 0.5100 6.5460 33.10 3.1323 5 296.0 16.60 390.96 5.33 29.40 177 | 0.07022 0.00 4.050 0 0.5100 6.0200 47.20 3.5549 5 296.0 16.60 393.23 10.11 23.20 178 | 0.05425 0.00 4.050 0 0.5100 6.3150 73.40 3.3175 5 296.0 16.60 395.60 6.29 24.60 179 | 0.06642 0.00 4.050 0 0.5100 6.8600 74.40 2.9153 5 296.0 16.60 391.27 6.92 29.90 180 | 0.05780 0.00 2.460 0 0.4880 6.9800 58.40 2.8290 3 193.0 17.80 396.90 5.04 37.20 181 | 0.06588 0.00 2.460 0 0.4880 7.7650 83.30 2.7410 3 193.0 17.80 395.56 7.56 39.80 182 | 0.06888 0.00 2.460 0 0.4880 6.1440 62.20 2.5979 3 193.0 17.80 396.90 9.45 36.20 183 | 0.09103 0.00 2.460 0 0.4880 7.1550 92.20 2.7006 3 193.0 17.80 394.12 4.82 37.90 184 | 0.10008 0.00 2.460 0 0.4880 6.5630 95.60 2.8470 3 193.0 17.80 396.90 5.68 32.50 185 | 0.08308 0.00 2.460 0 0.4880 5.6040 89.80 2.9879 3 193.0 17.80 391.00 13.98 26.40 186 | 0.06047 0.00 2.460 0 0.4880 6.1530 68.80 3.2797 3 193.0 17.80 387.11 13.15 29.60 187 | 0.05602 0.00 2.460 0 0.4880 7.8310 53.60 3.1992 3 193.0 17.80 392.63 4.45 50.00 188 | 0.07875 45.00 3.440 0 0.4370 6.7820 41.10 3.7886 5 398.0 15.20 393.87 6.68 32.00 189 | 0.12579 45.00 3.440 0 0.4370 6.5560 29.10 4.5667 5 398.0 15.20 382.84 4.56 29.80 190 | 0.08370 45.00 3.440 0 0.4370 7.1850 38.90 4.5667 5 398.0 15.20 396.90 5.39 34.90 191 | 0.09068 45.00 3.440 0 0.4370 6.9510 21.50 6.4798 5 398.0 15.20 377.68 5.10 37.00 192 | 0.06911 45.00 3.440 0 0.4370 6.7390 30.80 6.4798 5 398.0 15.20 389.71 4.69 30.50 193 | 0.08664 45.00 3.440 0 0.4370 7.1780 26.30 6.4798 5 398.0 15.20 390.49 2.87 36.40 194 | 0.02187 60.00 2.930 0 0.4010 6.8000 9.90 6.2196 1 265.0 15.60 393.37 5.03 31.10 195 | 0.01439 60.00 2.930 0 0.4010 6.6040 18.80 6.2196 1 265.0 15.60 376.70 4.38 29.10 196 | 0.01381 80.00 0.460 0 0.4220 7.8750 32.00 5.6484 4 255.0 14.40 394.23 2.97 50.00 197 | 0.04011 80.00 1.520 0 0.4040 7.2870 34.10 7.3090 2 329.0 12.60 396.90 4.08 33.30 198 | 0.04666 80.00 1.520 0 0.4040 7.1070 36.60 7.3090 2 329.0 12.60 354.31 8.61 30.30 199 | 0.03768 80.00 1.520 0 0.4040 7.2740 38.30 7.3090 2 329.0 12.60 392.20 6.62 34.60 200 | 0.03150 95.00 1.470 0 0.4030 6.9750 15.30 7.6534 3 402.0 17.00 396.90 4.56 34.90 201 | 0.01778 95.00 1.470 0 0.4030 7.1350 13.90 7.6534 3 402.0 17.00 384.30 4.45 32.90 202 | 0.03445 82.50 2.030 0 0.4150 6.1620 38.40 6.2700 2 348.0 14.70 393.77 7.43 24.10 203 | 0.02177 82.50 2.030 0 0.4150 7.6100 15.70 6.2700 2 348.0 14.70 395.38 3.11 42.30 204 | 0.03510 95.00 2.680 0 0.4161 7.8530 33.20 5.1180 4 224.0 14.70 392.78 3.81 48.50 205 | 0.02009 95.00 2.680 0 0.4161 8.0340 31.90 5.1180 4 224.0 14.70 390.55 2.88 50.00 206 | 0.13642 0.00 10.590 0 0.4890 5.8910 22.30 3.9454 4 277.0 18.60 396.90 10.87 22.60 207 | 0.22969 0.00 10.590 0 0.4890 6.3260 52.50 4.3549 4 277.0 18.60 394.87 10.97 24.40 208 | 0.25199 0.00 10.590 0 0.4890 5.7830 72.70 4.3549 4 277.0 18.60 389.43 18.06 22.50 209 | 0.13587 0.00 10.590 1 0.4890 6.0640 59.10 4.2392 4 277.0 18.60 381.32 14.66 24.40 210 | 0.43571 0.00 10.590 1 0.4890 5.3440 100.00 3.8750 4 277.0 18.60 396.90 23.09 20.00 211 | 0.17446 0.00 10.590 1 0.4890 5.9600 92.10 3.8771 4 277.0 18.60 393.25 17.27 21.70 212 | 0.37578 0.00 10.590 1 0.4890 5.4040 88.60 3.6650 4 277.0 18.60 395.24 23.98 19.30 213 | 0.21719 0.00 10.590 1 0.4890 5.8070 53.80 3.6526 4 277.0 18.60 390.94 16.03 22.40 214 | 0.14052 0.00 10.590 0 0.4890 6.3750 32.30 3.9454 4 277.0 18.60 385.81 9.38 28.10 215 | 0.28955 0.00 10.590 0 0.4890 5.4120 9.80 3.5875 4 277.0 18.60 348.93 29.55 23.70 216 | 0.19802 0.00 10.590 0 0.4890 6.1820 42.40 3.9454 4 277.0 18.60 393.63 9.47 25.00 217 | 0.04560 0.00 13.890 1 0.5500 5.8880 56.00 3.1121 5 276.0 16.40 392.80 13.51 23.30 218 | 0.07013 0.00 13.890 0 0.5500 6.6420 85.10 3.4211 5 276.0 16.40 392.78 9.69 28.70 219 | 0.11069 0.00 13.890 1 0.5500 5.9510 93.80 2.8893 5 276.0 16.40 396.90 17.92 21.50 220 | 0.11425 0.00 13.890 1 0.5500 6.3730 92.40 3.3633 5 276.0 16.40 393.74 10.50 23.00 221 | 0.35809 0.00 6.200 1 0.5070 6.9510 88.50 2.8617 8 307.0 17.40 391.70 9.71 26.70 222 | 0.40771 0.00 6.200 1 0.5070 6.1640 91.30 3.0480 8 307.0 17.40 395.24 21.46 21.70 223 | 0.62356 0.00 6.200 1 0.5070 6.8790 77.70 3.2721 8 307.0 17.40 390.39 9.93 27.50 224 | 0.61470 0.00 6.200 0 0.5070 6.6180 80.80 3.2721 8 307.0 17.40 396.90 7.60 30.10 225 | 0.31533 0.00 6.200 0 0.5040 8.2660 78.30 2.8944 8 307.0 17.40 385.05 4.14 44.80 226 | 0.52693 0.00 6.200 0 0.5040 8.7250 83.00 2.8944 8 307.0 17.40 382.00 4.63 50.00 227 | 0.38214 0.00 6.200 0 0.5040 8.0400 86.50 3.2157 8 307.0 17.40 387.38 3.13 37.60 228 | 0.41238 0.00 6.200 0 0.5040 7.1630 79.90 3.2157 8 307.0 17.40 372.08 6.36 31.60 229 | 0.29819 0.00 6.200 0 0.5040 7.6860 17.00 3.3751 8 307.0 17.40 377.51 3.92 46.70 230 | 0.44178 0.00 6.200 0 0.5040 6.5520 21.40 3.3751 8 307.0 17.40 380.34 3.76 31.50 231 | 0.53700 0.00 6.200 0 0.5040 5.9810 68.10 3.6715 8 307.0 17.40 378.35 11.65 24.30 232 | 0.46296 0.00 6.200 0 0.5040 7.4120 76.90 3.6715 8 307.0 17.40 376.14 5.25 31.70 233 | 0.57529 0.00 6.200 0 0.5070 8.3370 73.30 3.8384 8 307.0 17.40 385.91 2.47 41.70 234 | 0.33147 0.00 6.200 0 0.5070 8.2470 70.40 3.6519 8 307.0 17.40 378.95 3.95 48.30 235 | 0.44791 0.00 6.200 1 0.5070 6.7260 66.50 3.6519 8 307.0 17.40 360.20 8.05 29.00 236 | 0.33045 0.00 6.200 0 0.5070 6.0860 61.50 3.6519 8 307.0 17.40 376.75 10.88 24.00 237 | 0.52058 0.00 6.200 1 0.5070 6.6310 76.50 4.1480 8 307.0 17.40 388.45 9.54 25.10 238 | 0.51183 0.00 6.200 0 0.5070 7.3580 71.60 4.1480 8 307.0 17.40 390.07 4.73 31.50 239 | 0.08244 30.00 4.930 0 0.4280 6.4810 18.50 6.1899 6 300.0 16.60 379.41 6.36 23.70 240 | 0.09252 30.00 4.930 0 0.4280 6.6060 42.20 6.1899 6 300.0 16.60 383.78 7.37 23.30 241 | 0.11329 30.00 4.930 0 0.4280 6.8970 54.30 6.3361 6 300.0 16.60 391.25 11.38 22.00 242 | 0.10612 30.00 4.930 0 0.4280 6.0950 65.10 6.3361 6 300.0 16.60 394.62 12.40 20.10 243 | 0.10290 30.00 4.930 0 0.4280 6.3580 52.90 7.0355 6 300.0 16.60 372.75 11.22 22.20 244 | 0.12757 30.00 4.930 0 0.4280 6.3930 7.80 7.0355 6 300.0 16.60 374.71 5.19 23.70 245 | 0.20608 22.00 5.860 0 0.4310 5.5930 76.50 7.9549 7 330.0 19.10 372.49 12.50 17.60 246 | 0.19133 22.00 5.860 0 0.4310 5.6050 70.20 7.9549 7 330.0 19.10 389.13 18.46 18.50 247 | 0.33983 22.00 5.860 0 0.4310 6.1080 34.90 8.0555 7 330.0 19.10 390.18 9.16 24.30 248 | 0.19657 22.00 5.860 0 0.4310 6.2260 79.20 8.0555 7 330.0 19.10 376.14 10.15 20.50 249 | 0.16439 22.00 5.860 0 0.4310 6.4330 49.10 7.8265 7 330.0 19.10 374.71 9.52 24.50 250 | 0.19073 22.00 5.860 0 0.4310 6.7180 17.50 7.8265 7 330.0 19.10 393.74 6.56 26.20 251 | 0.14030 22.00 5.860 0 0.4310 6.4870 13.00 7.3967 7 330.0 19.10 396.28 5.90 24.40 252 | 0.21409 22.00 5.860 0 0.4310 6.4380 8.90 7.3967 7 330.0 19.10 377.07 3.59 24.80 253 | 0.08221 22.00 5.860 0 0.4310 6.9570 6.80 8.9067 7 330.0 19.10 386.09 3.53 29.60 254 | 0.36894 22.00 5.860 0 0.4310 8.2590 8.40 8.9067 7 330.0 19.10 396.90 3.54 42.80 255 | 0.04819 80.00 3.640 0 0.3920 6.1080 32.00 9.2203 1 315.0 16.40 392.89 6.57 21.90 256 | 0.03548 80.00 3.640 0 0.3920 5.8760 19.10 9.2203 1 315.0 16.40 395.18 9.25 20.90 257 | 0.01538 90.00 3.750 0 0.3940 7.4540 34.20 6.3361 3 244.0 15.90 386.34 3.11 44.00 258 | 0.61154 20.00 3.970 0 0.6470 8.7040 86.90 1.8010 5 264.0 13.00 389.70 5.12 50.00 259 | 0.66351 20.00 3.970 0 0.6470 7.3330 100.00 1.8946 5 264.0 13.00 383.29 7.79 36.00 260 | 0.65665 20.00 3.970 0 0.6470 6.8420 100.00 2.0107 5 264.0 13.00 391.93 6.90 30.10 261 | 0.54011 20.00 3.970 0 0.6470 7.2030 81.80 2.1121 5 264.0 13.00 392.80 9.59 33.80 262 | 0.53412 20.00 3.970 0 0.6470 7.5200 89.40 2.1398 5 264.0 13.00 388.37 7.26 43.10 263 | 0.52014 20.00 3.970 0 0.6470 8.3980 91.50 2.2885 5 264.0 13.00 386.86 5.91 48.80 264 | 0.82526 20.00 3.970 0 0.6470 7.3270 94.50 2.0788 5 264.0 13.00 393.42 11.25 31.00 265 | 0.55007 20.00 3.970 0 0.6470 7.2060 91.60 1.9301 5 264.0 13.00 387.89 8.10 36.50 266 | 0.76162 20.00 3.970 0 0.6470 5.5600 62.80 1.9865 5 264.0 13.00 392.40 10.45 22.80 267 | 0.78570 20.00 3.970 0 0.6470 7.0140 84.60 2.1329 5 264.0 13.00 384.07 14.79 30.70 268 | 0.57834 20.00 3.970 0 0.5750 8.2970 67.00 2.4216 5 264.0 13.00 384.54 7.44 50.00 269 | 0.54050 20.00 3.970 0 0.5750 7.4700 52.60 2.8720 5 264.0 13.00 390.30 3.16 43.50 270 | 0.09065 20.00 6.960 1 0.4640 5.9200 61.50 3.9175 3 223.0 18.60 391.34 13.65 20.70 271 | 0.29916 20.00 6.960 0 0.4640 5.8560 42.10 4.4290 3 223.0 18.60 388.65 13.00 21.10 272 | 0.16211 20.00 6.960 0 0.4640 6.2400 16.30 4.4290 3 223.0 18.60 396.90 6.59 25.20 273 | 0.11460 20.00 6.960 0 0.4640 6.5380 58.70 3.9175 3 223.0 18.60 394.96 7.73 24.40 274 | 0.22188 20.00 6.960 1 0.4640 7.6910 51.80 4.3665 3 223.0 18.60 390.77 6.58 35.20 275 | 0.05644 40.00 6.410 1 0.4470 6.7580 32.90 4.0776 4 254.0 17.60 396.90 3.53 32.40 276 | 0.09604 40.00 6.410 0 0.4470 6.8540 42.80 4.2673 4 254.0 17.60 396.90 2.98 32.00 277 | 0.10469 40.00 6.410 1 0.4470 7.2670 49.00 4.7872 4 254.0 17.60 389.25 6.05 33.20 278 | 0.06127 40.00 6.410 1 0.4470 6.8260 27.60 4.8628 4 254.0 17.60 393.45 4.16 33.10 279 | 0.07978 40.00 6.410 0 0.4470 6.4820 32.10 4.1403 4 254.0 17.60 396.90 7.19 29.10 280 | 0.21038 20.00 3.330 0 0.4429 6.8120 32.20 4.1007 5 216.0 14.90 396.90 4.85 35.10 281 | 0.03578 20.00 3.330 0 0.4429 7.8200 64.50 4.6947 5 216.0 14.90 387.31 3.76 45.40 282 | 0.03705 20.00 3.330 0 0.4429 6.9680 37.20 5.2447 5 216.0 14.90 392.23 4.59 35.40 283 | 0.06129 20.00 3.330 1 0.4429 7.6450 49.70 5.2119 5 216.0 14.90 377.07 3.01 46.00 284 | 0.01501 90.00 1.210 1 0.4010 7.9230 24.80 5.8850 1 198.0 13.60 395.52 3.16 50.00 285 | 0.00906 90.00 2.970 0 0.4000 7.0880 20.80 7.3073 1 285.0 15.30 394.72 7.85 32.20 286 | 0.01096 55.00 2.250 0 0.3890 6.4530 31.90 7.3073 1 300.0 15.30 394.72 8.23 22.00 287 | 0.01965 80.00 1.760 0 0.3850 6.2300 31.50 9.0892 1 241.0 18.20 341.60 12.93 20.10 288 | 0.03871 52.50 5.320 0 0.4050 6.2090 31.30 7.3172 6 293.0 16.60 396.90 7.14 23.20 289 | 0.04590 52.50 5.320 0 0.4050 6.3150 45.60 7.3172 6 293.0 16.60 396.90 7.60 22.30 290 | 0.04297 52.50 5.320 0 0.4050 6.5650 22.90 7.3172 6 293.0 16.60 371.72 9.51 24.80 291 | 0.03502 80.00 4.950 0 0.4110 6.8610 27.90 5.1167 4 245.0 19.20 396.90 3.33 28.50 292 | 0.07886 80.00 4.950 0 0.4110 7.1480 27.70 5.1167 4 245.0 19.20 396.90 3.56 37.30 293 | 0.03615 80.00 4.950 0 0.4110 6.6300 23.40 5.1167 4 245.0 19.20 396.90 4.70 27.90 294 | 0.08265 0.00 13.920 0 0.4370 6.1270 18.40 5.5027 4 289.0 16.00 396.90 8.58 23.90 295 | 0.08199 0.00 13.920 0 0.4370 6.0090 42.30 5.5027 4 289.0 16.00 396.90 10.40 21.70 296 | 0.12932 0.00 13.920 0 0.4370 6.6780 31.10 5.9604 4 289.0 16.00 396.90 6.27 28.60 297 | 0.05372 0.00 13.920 0 0.4370 6.5490 51.00 5.9604 4 289.0 16.00 392.85 7.39 27.10 298 | 0.14103 0.00 13.920 0 0.4370 5.7900 58.00 6.3200 4 289.0 16.00 396.90 15.84 20.30 299 | 0.06466 70.00 2.240 0 0.4000 6.3450 20.10 7.8278 5 358.0 14.80 368.24 4.97 22.50 300 | 0.05561 70.00 2.240 0 0.4000 7.0410 10.00 7.8278 5 358.0 14.80 371.58 4.74 29.00 301 | 0.04417 70.00 2.240 0 0.4000 6.8710 47.40 7.8278 5 358.0 14.80 390.86 6.07 24.80 302 | 0.03537 34.00 6.090 0 0.4330 6.5900 40.40 5.4917 7 329.0 16.10 395.75 9.50 22.00 303 | 0.09266 34.00 6.090 0 0.4330 6.4950 18.40 5.4917 7 329.0 16.10 383.61 8.67 26.40 304 | 0.10000 34.00 6.090 0 0.4330 6.9820 17.70 5.4917 7 329.0 16.10 390.43 4.86 33.10 305 | 0.05515 33.00 2.180 0 0.4720 7.2360 41.10 4.0220 7 222.0 18.40 393.68 6.93 36.10 306 | 0.05479 33.00 2.180 0 0.4720 6.6160 58.10 3.3700 7 222.0 18.40 393.36 8.93 28.40 307 | 0.07503 33.00 2.180 0 0.4720 7.4200 71.90 3.0992 7 222.0 18.40 396.90 6.47 33.40 308 | 0.04932 33.00 2.180 0 0.4720 6.8490 70.30 3.1827 7 222.0 18.40 396.90 7.53 28.20 309 | 0.49298 0.00 9.900 0 0.5440 6.6350 82.50 3.3175 4 304.0 18.40 396.90 4.54 22.80 310 | 0.34940 0.00 9.900 0 0.5440 5.9720 76.70 3.1025 4 304.0 18.40 396.24 9.97 20.30 311 | 2.63548 0.00 9.900 0 0.5440 4.9730 37.80 2.5194 4 304.0 18.40 350.45 12.64 16.10 312 | 0.79041 0.00 9.900 0 0.5440 6.1220 52.80 2.6403 4 304.0 18.40 396.90 5.98 22.10 313 | 0.26169 0.00 9.900 0 0.5440 6.0230 90.40 2.8340 4 304.0 18.40 396.30 11.72 19.40 314 | 0.26938 0.00 9.900 0 0.5440 6.2660 82.80 3.2628 4 304.0 18.40 393.39 7.90 21.60 315 | 0.36920 0.00 9.900 0 0.5440 6.5670 87.30 3.6023 4 304.0 18.40 395.69 9.28 23.80 316 | 0.25356 0.00 9.900 0 0.5440 5.7050 77.70 3.9450 4 304.0 18.40 396.42 11.50 16.20 317 | 0.31827 0.00 9.900 0 0.5440 5.9140 83.20 3.9986 4 304.0 18.40 390.70 18.33 17.80 318 | 0.24522 0.00 9.900 0 0.5440 5.7820 71.70 4.0317 4 304.0 18.40 396.90 15.94 19.80 319 | 0.40202 0.00 9.900 0 0.5440 6.3820 67.20 3.5325 4 304.0 18.40 395.21 10.36 23.10 320 | 0.47547 0.00 9.900 0 0.5440 6.1130 58.80 4.0019 4 304.0 18.40 396.23 12.73 21.00 321 | 0.16760 0.00 7.380 0 0.4930 6.4260 52.30 4.5404 5 287.0 19.60 396.90 7.20 23.80 322 | 0.18159 0.00 7.380 0 0.4930 6.3760 54.30 4.5404 5 287.0 19.60 396.90 6.87 23.10 323 | 0.35114 0.00 7.380 0 0.4930 6.0410 49.90 4.7211 5 287.0 19.60 396.90 7.70 20.40 324 | 0.28392 0.00 7.380 0 0.4930 5.7080 74.30 4.7211 5 287.0 19.60 391.13 11.74 18.50 325 | 0.34109 0.00 7.380 0 0.4930 6.4150 40.10 4.7211 5 287.0 19.60 396.90 6.12 25.00 326 | 0.19186 0.00 7.380 0 0.4930 6.4310 14.70 5.4159 5 287.0 19.60 393.68 5.08 24.60 327 | 0.30347 0.00 7.380 0 0.4930 6.3120 28.90 5.4159 5 287.0 19.60 396.90 6.15 23.00 328 | 0.24103 0.00 7.380 0 0.4930 6.0830 43.70 5.4159 5 287.0 19.60 396.90 12.79 22.20 329 | 0.06617 0.00 3.240 0 0.4600 5.8680 25.80 5.2146 4 430.0 16.90 382.44 9.97 19.30 330 | 0.06724 0.00 3.240 0 0.4600 6.3330 17.20 5.2146 4 430.0 16.90 375.21 7.34 22.60 331 | 0.04544 0.00 3.240 0 0.4600 6.1440 32.20 5.8736 4 430.0 16.90 368.57 9.09 19.80 332 | 0.05023 35.00 6.060 0 0.4379 5.7060 28.40 6.6407 1 304.0 16.90 394.02 12.43 17.10 333 | 0.03466 35.00 6.060 0 0.4379 6.0310 23.30 6.6407 1 304.0 16.90 362.25 7.83 19.40 334 | 0.05083 0.00 5.190 0 0.5150 6.3160 38.10 6.4584 5 224.0 20.20 389.71 5.68 22.20 335 | 0.03738 0.00 5.190 0 0.5150 6.3100 38.50 6.4584 5 224.0 20.20 389.40 6.75 20.70 336 | 0.03961 0.00 5.190 0 0.5150 6.0370 34.50 5.9853 5 224.0 20.20 396.90 8.01 21.10 337 | 0.03427 0.00 5.190 0 0.5150 5.8690 46.30 5.2311 5 224.0 20.20 396.90 9.80 19.50 338 | 0.03041 0.00 5.190 0 0.5150 5.8950 59.60 5.6150 5 224.0 20.20 394.81 10.56 18.50 339 | 0.03306 0.00 5.190 0 0.5150 6.0590 37.30 4.8122 5 224.0 20.20 396.14 8.51 20.60 340 | 0.05497 0.00 5.190 0 0.5150 5.9850 45.40 4.8122 5 224.0 20.20 396.90 9.74 19.00 341 | 0.06151 0.00 5.190 0 0.5150 5.9680 58.50 4.8122 5 224.0 20.20 396.90 9.29 18.70 342 | 0.01301 35.00 1.520 0 0.4420 7.2410 49.30 7.0379 1 284.0 15.50 394.74 5.49 32.70 343 | 0.02498 0.00 1.890 0 0.5180 6.5400 59.70 6.2669 1 422.0 15.90 389.96 8.65 16.50 344 | 0.02543 55.00 3.780 0 0.4840 6.6960 56.40 5.7321 5 370.0 17.60 396.90 7.18 23.90 345 | 0.03049 55.00 3.780 0 0.4840 6.8740 28.10 6.4654 5 370.0 17.60 387.97 4.61 31.20 346 | 0.03113 0.00 4.390 0 0.4420 6.0140 48.50 8.0136 3 352.0 18.80 385.64 10.53 17.50 347 | 0.06162 0.00 4.390 0 0.4420 5.8980 52.30 8.0136 3 352.0 18.80 364.61 12.67 17.20 348 | 0.01870 85.00 4.150 0 0.4290 6.5160 27.70 8.5353 4 351.0 17.90 392.43 6.36 23.10 349 | 0.01501 80.00 2.010 0 0.4350 6.6350 29.70 8.3440 4 280.0 17.00 390.94 5.99 24.50 350 | 0.02899 40.00 1.250 0 0.4290 6.9390 34.50 8.7921 1 335.0 19.70 389.85 5.89 26.60 351 | 0.06211 40.00 1.250 0 0.4290 6.4900 44.40 8.7921 1 335.0 19.70 396.90 5.98 22.90 352 | 0.07950 60.00 1.690 0 0.4110 6.5790 35.90 10.7103 4 411.0 18.30 370.78 5.49 24.10 353 | 0.07244 60.00 1.690 0 0.4110 5.8840 18.50 10.7103 4 411.0 18.30 392.33 7.79 18.60 354 | 0.01709 90.00 2.020 0 0.4100 6.7280 36.10 12.1265 5 187.0 17.00 384.46 4.50 30.10 355 | 0.04301 80.00 1.910 0 0.4130 5.6630 21.90 10.5857 4 334.0 22.00 382.80 8.05 18.20 356 | 0.10659 80.00 1.910 0 0.4130 5.9360 19.50 10.5857 4 334.0 22.00 376.04 5.57 20.60 357 | 8.98296 0.00 18.100 1 0.7700 6.2120 97.40 2.1222 24 666.0 20.20 377.73 17.60 17.80 358 | 3.84970 0.00 18.100 1 0.7700 6.3950 91.00 2.5052 24 666.0 20.20 391.34 13.27 21.70 359 | 5.20177 0.00 18.100 1 0.7700 6.1270 83.40 2.7227 24 666.0 20.20 395.43 11.48 22.70 360 | 4.26131 0.00 18.100 0 0.7700 6.1120 81.30 2.5091 24 666.0 20.20 390.74 12.67 22.60 361 | 4.54192 0.00 18.100 0 0.7700 6.3980 88.00 2.5182 24 666.0 20.20 374.56 7.79 25.00 362 | 3.83684 0.00 18.100 0 0.7700 6.2510 91.10 2.2955 24 666.0 20.20 350.65 14.19 19.90 363 | 3.67822 0.00 18.100 0 0.7700 5.3620 96.20 2.1036 24 666.0 20.20 380.79 10.19 20.80 364 | 4.22239 0.00 18.100 1 0.7700 5.8030 89.00 1.9047 24 666.0 20.20 353.04 14.64 16.80 365 | 3.47428 0.00 18.100 1 0.7180 8.7800 82.90 1.9047 24 666.0 20.20 354.55 5.29 21.90 366 | 4.55587 0.00 18.100 0 0.7180 3.5610 87.90 1.6132 24 666.0 20.20 354.70 7.12 27.50 367 | 3.69695 0.00 18.100 0 0.7180 4.9630 91.40 1.7523 24 666.0 20.20 316.03 14.00 21.90 368 | 13.52220 0.00 18.100 0 0.6310 3.8630 100.00 1.5106 24 666.0 20.20 131.42 13.33 23.10 369 | 4.89822 0.00 18.100 0 0.6310 4.9700 100.00 1.3325 24 666.0 20.20 375.52 3.26 50.00 370 | 5.66998 0.00 18.100 1 0.6310 6.6830 96.80 1.3567 24 666.0 20.20 375.33 3.73 50.00 371 | 6.53876 0.00 18.100 1 0.6310 7.0160 97.50 1.2024 24 666.0 20.20 392.05 2.96 50.00 372 | 9.23230 0.00 18.100 0 0.6310 6.2160 100.00 1.1691 24 666.0 20.20 366.15 9.53 50.00 373 | 8.26725 0.00 18.100 1 0.6680 5.8750 89.60 1.1296 24 666.0 20.20 347.88 8.88 50.00 374 | 11.10810 0.00 18.100 0 0.6680 4.9060 100.00 1.1742 24 666.0 20.20 396.90 34.77 13.80 375 | 18.49820 0.00 18.100 0 0.6680 4.1380 100.00 1.1370 24 666.0 20.20 396.90 37.97 13.80 376 | 19.60910 0.00 18.100 0 0.6710 7.3130 97.90 1.3163 24 666.0 20.20 396.90 13.44 15.00 377 | 15.28800 0.00 18.100 0 0.6710 6.6490 93.30 1.3449 24 666.0 20.20 363.02 23.24 13.90 378 | 9.82349 0.00 18.100 0 0.6710 6.7940 98.80 1.3580 24 666.0 20.20 396.90 21.24 13.30 379 | 23.64820 0.00 18.100 0 0.6710 6.3800 96.20 1.3861 24 666.0 20.20 396.90 23.69 13.10 380 | 17.86670 0.00 18.100 0 0.6710 6.2230 100.00 1.3861 24 666.0 20.20 393.74 21.78 10.20 381 | 88.97620 0.00 18.100 0 0.6710 6.9680 91.90 1.4165 24 666.0 20.20 396.90 17.21 10.40 382 | 15.87440 0.00 18.100 0 0.6710 6.5450 99.10 1.5192 24 666.0 20.20 396.90 21.08 10.90 383 | 9.18702 0.00 18.100 0 0.7000 5.5360 100.00 1.5804 24 666.0 20.20 396.90 23.60 11.30 384 | 7.99248 0.00 18.100 0 0.7000 5.5200 100.00 1.5331 24 666.0 20.20 396.90 24.56 12.30 385 | 20.08490 0.00 18.100 0 0.7000 4.3680 91.20 1.4395 24 666.0 20.20 285.83 30.63 8.80 386 | 16.81180 0.00 18.100 0 0.7000 5.2770 98.10 1.4261 24 666.0 20.20 396.90 30.81 7.20 387 | 24.39380 0.00 18.100 0 0.7000 4.6520 100.00 1.4672 24 666.0 20.20 396.90 28.28 10.50 388 | 22.59710 0.00 18.100 0 0.7000 5.0000 89.50 1.5184 24 666.0 20.20 396.90 31.99 7.40 389 | 14.33370 0.00 18.100 0 0.7000 4.8800 100.00 1.5895 24 666.0 20.20 372.92 30.62 10.20 390 | 8.15174 0.00 18.100 0 0.7000 5.3900 98.90 1.7281 24 666.0 20.20 396.90 20.85 11.50 391 | 6.96215 0.00 18.100 0 0.7000 5.7130 97.00 1.9265 24 666.0 20.20 394.43 17.11 15.10 392 | 5.29305 0.00 18.100 0 0.7000 6.0510 82.50 2.1678 24 666.0 20.20 378.38 18.76 23.20 393 | 11.57790 0.00 18.100 0 0.7000 5.0360 97.00 1.7700 24 666.0 20.20 396.90 25.68 9.70 394 | 8.64476 0.00 18.100 0 0.6930 6.1930 92.60 1.7912 24 666.0 20.20 396.90 15.17 13.80 395 | 13.35980 0.00 18.100 0 0.6930 5.8870 94.70 1.7821 24 666.0 20.20 396.90 16.35 12.70 396 | 8.71675 0.00 18.100 0 0.6930 6.4710 98.80 1.7257 24 666.0 20.20 391.98 17.12 13.10 397 | 5.87205 0.00 18.100 0 0.6930 6.4050 96.00 1.6768 24 666.0 20.20 396.90 19.37 12.50 398 | 7.67202 0.00 18.100 0 0.6930 5.7470 98.90 1.6334 24 666.0 20.20 393.10 19.92 8.50 399 | 38.35180 0.00 18.100 0 0.6930 5.4530 100.00 1.4896 24 666.0 20.20 396.90 30.59 5.00 400 | 9.91655 0.00 18.100 0 0.6930 5.8520 77.80 1.5004 24 666.0 20.20 338.16 29.97 6.30 401 | 25.04610 0.00 18.100 0 0.6930 5.9870 100.00 1.5888 24 666.0 20.20 396.90 26.77 5.60 402 | 14.23620 0.00 18.100 0 0.6930 6.3430 100.00 1.5741 24 666.0 20.20 396.90 20.32 7.20 403 | 9.59571 0.00 18.100 0 0.6930 6.4040 100.00 1.6390 24 666.0 20.20 376.11 20.31 12.10 404 | 24.80170 0.00 18.100 0 0.6930 5.3490 96.00 1.7028 24 666.0 20.20 396.90 19.77 8.30 405 | 41.52920 0.00 18.100 0 0.6930 5.5310 85.40 1.6074 24 666.0 20.20 329.46 27.38 8.50 406 | 67.92080 0.00 18.100 0 0.6930 5.6830 100.00 1.4254 24 666.0 20.20 384.97 22.98 5.00 407 | 20.71620 0.00 18.100 0 0.6590 4.1380 100.00 1.1781 24 666.0 20.20 370.22 23.34 11.90 408 | 11.95110 0.00 18.100 0 0.6590 5.6080 100.00 1.2852 24 666.0 20.20 332.09 12.13 27.90 409 | 7.40389 0.00 18.100 0 0.5970 5.6170 97.90 1.4547 24 666.0 20.20 314.64 26.40 17.20 410 | 14.43830 0.00 18.100 0 0.5970 6.8520 100.00 1.4655 24 666.0 20.20 179.36 19.78 27.50 411 | 51.13580 0.00 18.100 0 0.5970 5.7570 100.00 1.4130 24 666.0 20.20 2.60 10.11 15.00 412 | 14.05070 0.00 18.100 0 0.5970 6.6570 100.00 1.5275 24 666.0 20.20 35.05 21.22 17.20 413 | 18.81100 0.00 18.100 0 0.5970 4.6280 100.00 1.5539 24 666.0 20.20 28.79 34.37 17.90 414 | 28.65580 0.00 18.100 0 0.5970 5.1550 100.00 1.5894 24 666.0 20.20 210.97 20.08 16.30 415 | 45.74610 0.00 18.100 0 0.6930 4.5190 100.00 1.6582 24 666.0 20.20 88.27 36.98 7.00 416 | 18.08460 0.00 18.100 0 0.6790 6.4340 100.00 1.8347 24 666.0 20.20 27.25 29.05 7.20 417 | 10.83420 0.00 18.100 0 0.6790 6.7820 90.80 1.8195 24 666.0 20.20 21.57 25.79 7.50 418 | 25.94060 0.00 18.100 0 0.6790 5.3040 89.10 1.6475 24 666.0 20.20 127.36 26.64 10.40 419 | 73.53410 0.00 18.100 0 0.6790 5.9570 100.00 1.8026 24 666.0 20.20 16.45 20.62 8.80 420 | 11.81230 0.00 18.100 0 0.7180 6.8240 76.50 1.7940 24 666.0 20.20 48.45 22.74 8.40 421 | 11.08740 0.00 18.100 0 0.7180 6.4110 100.00 1.8589 24 666.0 20.20 318.75 15.02 16.70 422 | 7.02259 0.00 18.100 0 0.7180 6.0060 95.30 1.8746 24 666.0 20.20 319.98 15.70 14.20 423 | 12.04820 0.00 18.100 0 0.6140 5.6480 87.60 1.9512 24 666.0 20.20 291.55 14.10 20.80 424 | 7.05042 0.00 18.100 0 0.6140 6.1030 85.10 2.0218 24 666.0 20.20 2.52 23.29 13.40 425 | 8.79212 0.00 18.100 0 0.5840 5.5650 70.60 2.0635 24 666.0 20.20 3.65 17.16 11.70 426 | 15.86030 0.00 18.100 0 0.6790 5.8960 95.40 1.9096 24 666.0 20.20 7.68 24.39 8.30 427 | 12.24720 0.00 18.100 0 0.5840 5.8370 59.70 1.9976 24 666.0 20.20 24.65 15.69 10.20 428 | 37.66190 0.00 18.100 0 0.6790 6.2020 78.70 1.8629 24 666.0 20.20 18.82 14.52 10.90 429 | 7.36711 0.00 18.100 0 0.6790 6.1930 78.10 1.9356 24 666.0 20.20 96.73 21.52 11.00 430 | 9.33889 0.00 18.100 0 0.6790 6.3800 95.60 1.9682 24 666.0 20.20 60.72 24.08 9.50 431 | 8.49213 0.00 18.100 0 0.5840 6.3480 86.10 2.0527 24 666.0 20.20 83.45 17.64 14.50 432 | 10.06230 0.00 18.100 0 0.5840 6.8330 94.30 2.0882 24 666.0 20.20 81.33 19.69 14.10 433 | 6.44405 0.00 18.100 0 0.5840 6.4250 74.80 2.2004 24 666.0 20.20 97.95 12.03 16.10 434 | 5.58107 0.00 18.100 0 0.7130 6.4360 87.90 2.3158 24 666.0 20.20 100.19 16.22 14.30 435 | 13.91340 0.00 18.100 0 0.7130 6.2080 95.00 2.2222 24 666.0 20.20 100.63 15.17 11.70 436 | 11.16040 0.00 18.100 0 0.7400 6.6290 94.60 2.1247 24 666.0 20.20 109.85 23.27 13.40 437 | 14.42080 0.00 18.100 0 0.7400 6.4610 93.30 2.0026 24 666.0 20.20 27.49 18.05 9.60 438 | 15.17720 0.00 18.100 0 0.7400 6.1520 100.00 1.9142 24 666.0 20.20 9.32 26.45 8.70 439 | 13.67810 0.00 18.100 0 0.7400 5.9350 87.90 1.8206 24 666.0 20.20 68.95 34.02 8.40 440 | 9.39063 0.00 18.100 0 0.7400 5.6270 93.90 1.8172 24 666.0 20.20 396.90 22.88 12.80 441 | 22.05110 0.00 18.100 0 0.7400 5.8180 92.40 1.8662 24 666.0 20.20 391.45 22.11 10.50 442 | 9.72418 0.00 18.100 0 0.7400 6.4060 97.20 2.0651 24 666.0 20.20 385.96 19.52 17.10 443 | 5.66637 0.00 18.100 0 0.7400 6.2190 100.00 2.0048 24 666.0 20.20 395.69 16.59 18.40 444 | 9.96654 0.00 18.100 0 0.7400 6.4850 100.00 1.9784 24 666.0 20.20 386.73 18.85 15.40 445 | 12.80230 0.00 18.100 0 0.7400 5.8540 96.60 1.8956 24 666.0 20.20 240.52 23.79 10.80 446 | 10.67180 0.00 18.100 0 0.7400 6.4590 94.80 1.9879 24 666.0 20.20 43.06 23.98 11.80 447 | 6.28807 0.00 18.100 0 0.7400 6.3410 96.40 2.0720 24 666.0 20.20 318.01 17.79 14.90 448 | 9.92485 0.00 18.100 0 0.7400 6.2510 96.60 2.1980 24 666.0 20.20 388.52 16.44 12.60 449 | 9.32909 0.00 18.100 0 0.7130 6.1850 98.70 2.2616 24 666.0 20.20 396.90 18.13 14.10 450 | 7.52601 0.00 18.100 0 0.7130 6.4170 98.30 2.1850 24 666.0 20.20 304.21 19.31 13.00 451 | 6.71772 0.00 18.100 0 0.7130 6.7490 92.60 2.3236 24 666.0 20.20 0.32 17.44 13.40 452 | 5.44114 0.00 18.100 0 0.7130 6.6550 98.20 2.3552 24 666.0 20.20 355.29 17.73 15.20 453 | 5.09017 0.00 18.100 0 0.7130 6.2970 91.80 2.3682 24 666.0 20.20 385.09 17.27 16.10 454 | 8.24809 0.00 18.100 0 0.7130 7.3930 99.30 2.4527 24 666.0 20.20 375.87 16.74 17.80 455 | 9.51363 0.00 18.100 0 0.7130 6.7280 94.10 2.4961 24 666.0 20.20 6.68 18.71 14.90 456 | 4.75237 0.00 18.100 0 0.7130 6.5250 86.50 2.4358 24 666.0 20.20 50.92 18.13 14.10 457 | 4.66883 0.00 18.100 0 0.7130 5.9760 87.90 2.5806 24 666.0 20.20 10.48 19.01 12.70 458 | 8.20058 0.00 18.100 0 0.7130 5.9360 80.30 2.7792 24 666.0 20.20 3.50 16.94 13.50 459 | 7.75223 0.00 18.100 0 0.7130 6.3010 83.70 2.7831 24 666.0 20.20 272.21 16.23 14.90 460 | 6.80117 0.00 18.100 0 0.7130 6.0810 84.40 2.7175 24 666.0 20.20 396.90 14.70 20.00 461 | 4.81213 0.00 18.100 0 0.7130 6.7010 90.00 2.5975 24 666.0 20.20 255.23 16.42 16.40 462 | 3.69311 0.00 18.100 0 0.7130 6.3760 88.40 2.5671 24 666.0 20.20 391.43 14.65 17.70 463 | 6.65492 0.00 18.100 0 0.7130 6.3170 83.00 2.7344 24 666.0 20.20 396.90 13.99 19.50 464 | 5.82115 0.00 18.100 0 0.7130 6.5130 89.90 2.8016 24 666.0 20.20 393.82 10.29 20.20 465 | 7.83932 0.00 18.100 0 0.6550 6.2090 65.40 2.9634 24 666.0 20.20 396.90 13.22 21.40 466 | 3.16360 0.00 18.100 0 0.6550 5.7590 48.20 3.0665 24 666.0 20.20 334.40 14.13 19.90 467 | 3.77498 0.00 18.100 0 0.6550 5.9520 84.70 2.8715 24 666.0 20.20 22.01 17.15 19.00 468 | 4.42228 0.00 18.100 0 0.5840 6.0030 94.50 2.5403 24 666.0 20.20 331.29 21.32 19.10 469 | 15.57570 0.00 18.100 0 0.5800 5.9260 71.00 2.9084 24 666.0 20.20 368.74 18.13 19.10 470 | 13.07510 0.00 18.100 0 0.5800 5.7130 56.70 2.8237 24 666.0 20.20 396.90 14.76 20.10 471 | 4.34879 0.00 18.100 0 0.5800 6.1670 84.00 3.0334 24 666.0 20.20 396.90 16.29 19.90 472 | 4.03841 0.00 18.100 0 0.5320 6.2290 90.70 3.0993 24 666.0 20.20 395.33 12.87 19.60 473 | 3.56868 0.00 18.100 0 0.5800 6.4370 75.00 2.8965 24 666.0 20.20 393.37 14.36 23.20 474 | 4.64689 0.00 18.100 0 0.6140 6.9800 67.60 2.5329 24 666.0 20.20 374.68 11.66 29.80 475 | 8.05579 0.00 18.100 0 0.5840 5.4270 95.40 2.4298 24 666.0 20.20 352.58 18.14 13.80 476 | 6.39312 0.00 18.100 0 0.5840 6.1620 97.40 2.2060 24 666.0 20.20 302.76 24.10 13.30 477 | 4.87141 0.00 18.100 0 0.6140 6.4840 93.60 2.3053 24 666.0 20.20 396.21 18.68 16.70 478 | 15.02340 0.00 18.100 0 0.6140 5.3040 97.30 2.1007 24 666.0 20.20 349.48 24.91 12.00 479 | 10.23300 0.00 18.100 0 0.6140 6.1850 96.70 2.1705 24 666.0 20.20 379.70 18.03 14.60 480 | 14.33370 0.00 18.100 0 0.6140 6.2290 88.00 1.9512 24 666.0 20.20 383.32 13.11 21.40 481 | 5.82401 0.00 18.100 0 0.5320 6.2420 64.70 3.4242 24 666.0 20.20 396.90 10.74 23.00 482 | 5.70818 0.00 18.100 0 0.5320 6.7500 74.90 3.3317 24 666.0 20.20 393.07 7.74 23.70 483 | 5.73116 0.00 18.100 0 0.5320 7.0610 77.00 3.4106 24 666.0 20.20 395.28 7.01 25.00 484 | 2.81838 0.00 18.100 0 0.5320 5.7620 40.30 4.0983 24 666.0 20.20 392.92 10.42 21.80 485 | 2.37857 0.00 18.100 0 0.5830 5.8710 41.90 3.7240 24 666.0 20.20 370.73 13.34 20.60 486 | 3.67367 0.00 18.100 0 0.5830 6.3120 51.90 3.9917 24 666.0 20.20 388.62 10.58 21.20 487 | 5.69175 0.00 18.100 0 0.5830 6.1140 79.80 3.5459 24 666.0 20.20 392.68 14.98 19.10 488 | 4.83567 0.00 18.100 0 0.5830 5.9050 53.20 3.1523 24 666.0 20.20 388.22 11.45 20.60 489 | 0.15086 0.00 27.740 0 0.6090 5.4540 92.70 1.8209 4 711.0 20.10 395.09 18.06 15.20 490 | 0.18337 0.00 27.740 0 0.6090 5.4140 98.30 1.7554 4 711.0 20.10 344.05 23.97 7.00 491 | 0.20746 0.00 27.740 0 0.6090 5.0930 98.00 1.8226 4 711.0 20.10 318.43 29.68 8.10 492 | 0.10574 0.00 27.740 0 0.6090 5.9830 98.80 1.8681 4 711.0 20.10 390.11 18.07 13.60 493 | 0.11132 0.00 27.740 0 0.6090 5.9830 83.50 2.1099 4 711.0 20.10 396.90 13.35 20.10 494 | 0.17331 0.00 9.690 0 0.5850 5.7070 54.00 2.3817 6 391.0 19.20 396.90 12.01 21.80 495 | 0.27957 0.00 9.690 0 0.5850 5.9260 42.60 2.3817 6 391.0 19.20 396.90 13.59 24.50 496 | 0.17899 0.00 9.690 0 0.5850 5.6700 28.80 2.7986 6 391.0 19.20 393.29 17.60 23.10 497 | 0.28960 0.00 9.690 0 0.5850 5.3900 72.90 2.7986 6 391.0 19.20 396.90 21.14 19.70 498 | 0.26838 0.00 9.690 0 0.5850 5.7940 70.60 2.8927 6 391.0 19.20 396.90 14.10 18.30 499 | 0.23912 0.00 9.690 0 0.5850 6.0190 65.30 2.4091 6 391.0 19.20 396.90 12.92 21.20 500 | 0.17783 0.00 9.690 0 0.5850 5.5690 73.50 2.3999 6 391.0 19.20 395.77 15.10 17.50 501 | 0.22438 0.00 9.690 0 0.5850 6.0270 79.70 2.4982 6 391.0 19.20 396.90 14.33 16.80 502 | 0.06263 0.00 11.930 0 0.5730 6.5930 69.10 2.4786 1 273.0 21.00 391.99 9.67 22.40 503 | 0.04527 0.00 11.930 0 0.5730 6.1200 76.70 2.2875 1 273.0 21.00 396.90 9.08 20.60 504 | 0.06076 0.00 11.930 0 0.5730 6.9760 91.00 2.1675 1 273.0 21.00 396.90 5.64 23.90 505 | 0.10959 0.00 11.930 0 0.5730 6.7940 89.30 2.3889 1 273.0 21.00 393.45 6.48 22.00 506 | 0.04741 0.00 11.930 0 0.5730 6.0300 80.80 2.5050 1 273.0 21.00 396.90 7.88 11.90 507 | -------------------------------------------------------------------------------- /SVM_example.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# Boilerplate\n", 12 | "%matplotlib inline\n", 13 | "\n", 14 | "# Intel DAAL related imports\n", 15 | "from daal.data_management import HomogenNumericTable\n", 16 | "\n", 17 | "# Helpersfor getArrayFromNT and printNT. See utils.py\n", 18 | "from utils import *\n", 19 | "\n", 20 | "# Import numpy, matplotlib, seaborn\n", 21 | "import numpy as np\n", 22 | "import matplotlib\n", 23 | "import matplotlib.pyplot as plt\n", 24 | "\n", 25 | "# Plotting configurations\n", 26 | "%config InlineBackend.figure_format = 'retina'\n", 27 | "plt.rcParams[\"figure.figsize\"] = (12, 9)" 28 | ] 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "metadata": {}, 33 | "source": [ 34 | "# Handwritten Digits Recognition with SVM" 35 | ] 36 | }, 37 | { 38 | "cell_type": "markdown", 39 | "metadata": {}, 40 | "source": [ 41 | "This lab is partially based on a [scikit-learn tutorial](http://scikit-learn.org/stable/auto_examples/classification/plot_digits_classification.html) on the same subject." 42 | ] 43 | }, 44 | { 45 | "cell_type": "markdown", 46 | "metadata": {}, 47 | "source": [ 48 | "### Tutorial brief\n", 49 | "This lab studies a popular and powerful classification algorithm, _Support Vector Machine_ (SVM). We build a multi-class classifier on top of SVM using pyDAAL, and use it to tackle a famous classification problem in machine learning: handwritten digits recognition. We go on to solve the same problem using scikit-learn's Support Vector Classifier, and compare performance and classification accuracy between the pyDAAL solution and the scikit-learn solution.\n", 50 | "\n", 51 | "The lab code contains definition of a `MultiClassSVM` class, which consists of a `train` method and a `predict` method, among other helper functions. Except for the `predit` method, all parts of the class is implemented. In the exercise, you are asked to implement the `predict` method.\n", 52 | "\n", 53 | "### Learning objectives\n", 54 | "* To understand and practice the typical code sequence of using pyDAAL for classification.\n", 55 | "* To see pyDAAL's performance advantage over scikit-learn.\n", 56 | "\n", 57 | "### SVM introduction\n", 58 | "SVM belongs to a family of generalized linear classification problems. It is a binary (two-class) classifier. It is typically used a key ingredient of a multi-class classifier to address multi-class classification problems.\n", 59 | "\n", 60 | "According to Wikipedia [link](https://en.wikipedia.org/wiki/Support_vector_machine):\n", 61 | "\n", 62 | "> An SVM model is a representation of the examples as points in space, mapped so that the examples of the separate categories are divided by a clear gap that is as wide as possible. New examples are then mapped into that same space and predicted to belong to a category based on which side of the gap they fall on.\n", 63 | "\n", 64 | "Despite its origin as a linear classifier, SVM is often used for non-linear classification problems. The key is the _kernel functions_. A kernel function maps features to higher dimensional feature spaces, making SVM to be able to capture non-linear relations. The SVM in DAAL supports two kernel functions: linear and KBF (Gausian kernel).\n", 65 | "\n", 66 | "### Multi-class classifier introduction\n", 67 | "SVM by itself is a binary (two-class) classifier. To use it on multi-class problems, DAAL employs a technique called One-Against-One. In plain language, let $K$ be the number of classes, the One-Against-One approach solves a two-class problem for each possible pair of labels of the $K$ labels. Then, the predicted label is the one that was predicted by the majority of the two-class classifiers.\n", 68 | "\n", 69 | "### The handwritten digits dataset\n", 70 | "\n", 71 | "Scikit-learn has some functions to load popular datasets for eager learners. These datasets are available through [sklearn.datasets](http://scikit-learn.org/stable/datasets). The [load_digits](http://scikit-learn.org/stable/modules/generated/sklearn.datasets.load_digits.html#sklearn.datasets.load_digits) method loads and returns the digits dataset. Because the dataset internally uses NumPy ndarray to store information, we can convert it to DAAL `NumericTables`, and pass them to DAAL algorithms. \n", 72 | "\n", 73 | "After loading the data, we take a quick look at the sizes and dimensions. There are 1797 samples (i.e. images of handwritten digits) in the dataset, and each sample has 64 features. Note that the images are stored in `digits.data` and the corresponding labels are stored in `digits.target`." 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": 2, 79 | "metadata": { 80 | "collapsed": true 81 | }, 82 | "outputs": [ 83 | { 84 | "name": "stdout", 85 | "output_type": "stream", 86 | "text": [ 87 | "(1797, 64)\n", 88 | "(1797,)\n" 89 | ] 90 | } 91 | ], 92 | "source": [ 93 | "from sklearn.datasets import load_digits\n", 94 | "\n", 95 | "digits = load_digits()\n", 96 | "print(digits.data.shape)\n", 97 | "print(digits.target.shape)" 98 | ] 99 | }, 100 | { 101 | "cell_type": "markdown", 102 | "metadata": {}, 103 | "source": [ 104 | "We can visualize the first 10 images and their correct labels (ground truth). The code below is directly copied from the scikit-learn tutorial:" 105 | ] 106 | }, 107 | { 108 | "cell_type": "code", 109 | "execution_count": 3, 110 | "metadata": { 111 | "collapsed": true 112 | }, 113 | "outputs": [ 114 | { 115 | "data": { 116 | "image/png": "iVBORw0KGgoAAAANSUhEUgAABWwAAANfCAYAAABNN5QRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAWJQAAFiUBSVIk8AAAIABJREFUeJzs3Xuw53dd3/HXGxZFw2U3iFiwNllEVLBELspQxWATAcuY\nBSGtOshKZxJvI0GriaKdgJch0iooU8lghwDWsQQk4KVSIixSFGu2LLVYvGBCLDdFdgMBIkI+/eP7\nO+y67tnNbs75ft/nnMdj5szZ/Z3f+X4+Z3fzzu88f9/z/dUYIwAAAAAALO8uS28AAAAAAICJYAsA\nAAAA0IRgCwAAAADQhGALAAAAANCEYAsAAAAA0IRgCwAAAADQhGALAAAAANCEYAsAAAAA0IRgCwAA\nAADQhGALAAAAANCEYAsAAAAA0IRgCwAAAADQhGALAAAAANCEYLuDVdX3VdXtVfX6DT7uq1fH/cGN\nPC7Qm5kCbCQzBdhIZgqwkcwUNptgu0lW/4Gdydublt77Bhirt22nqh5TVa+pqg9U1Ser6saq+qWq\n+pKl98b2ZqZsr5lSVV9SVU+vqhdV1e9X1SdWf19/tvTe2BnMlG03U76mqn66qn6vqv6mqj5VVR+u\nqjdX1aVVdbel98j2ZqZsu5ny1Kr6hap6W1W9d/U45daqendVvaSqHrr0HtnezJTtNVNOpKq+/ri/\nu7OX3lM3u5bewDb2wXVuPzvJ3ZLcluSWE3z8bzdtR//Y4STvTnLzBh/3fUn+NPN+LZuuqr4nyS8m\nqUwD9GNJviTJpUm+raqeMMZ4+4JbZHszU7bXTPmJJP929etx3HuYg5myTWZKVV2S5CU5OkNuT/LR\nJHuSPDbJNyS5pKq+aYyxLb5mWjJTtslMWfn5JPdf/XokOZLkXkkelOTLkjyzqp41xvilhfbH9mem\nbK+Z8g+snkhee+yy1lc4To3hz2VOVfXmTA+eXz7GeObS++GOqapHJ3lrprPSX5rkijHGkao6N8nL\nMv2dfijJl40xPrbcTtlpzJStqaquzhRR/ijJDZm+AfreJH8xxviyJffGzmambD1V9X1JfjrJK5P8\nWpI/HGN8uqrukeSSJD+Z5O5J3jTGuHC5nbITmSlbU1X9dJL/m+RtSW4eY3ymqirJw5P8TJILk3wm\nydeMMd6x3E7ZacyU7aGqnpPp8cnbkzw6U7C97xjjI4turBln2MId81OZYu1bxhjfvXbjGOPGqtqX\n6Zm1L0zyg0meu8wWgS3ku8cxz5iuggvAmfjdJL86xjh87I1jjFuT/FxV3ZbkxUm+saq+WlwBTmWM\n8ZwT3DaSHKyqi5K8J8kXJfnOJGYKcIdV1QOTPCfJnyV5QZLXLLujvlzDtqmqesjqOh4fXf3+sVX1\nutW1Uz9dVc875r6PqqoXrK4xdHNV3ba6ftn1VfX0k6yx7kWyV9c9u72qHl5V962qX6yqm1bHvrmq\nXlxV91nnuCe8SPYJvqaH13Q92A/VdD3Yd1XVj1TVXU+y57Oq6meq6s9Xn/O+qnpFVT3o+ONvlKq6\nX5LHrX77H47/+BjjSJL/nOlU/m/fyLVho5gp6+559pmSfPabHtiyzJR19zz7TBljvPv4WHucV+To\njxo+YiPXho1ipqy750Uep5zMGOO2JP9n9dv7n+y+sBQzZd09d5gpv5Tkc5N8f5JPbfJaW5pguwVU\n1f4kb0rypCSfk+nHT471e5nO7Hx0kt1JPpHp2i6PS/LyqvqvZ7Ds2oWuH5jkUKYf1T17ddsDVr9/\na1V9/kk+92Rf07ck+f0k+zJdg+ZuSb48yfMzXWLgRJ9zdpI/THJFkr2rNc5K8h1J/meSrz7JemvD\n5/aqesrJ9nYC52eKsZ9O8uZ17vOG1fsvraovPs3jw6zMlM9+zlIzBbYVM+Wzn9NypqzOtP271W/X\n/SYOujBTPvs5LWfK6s/gn69+e+NGHhs2g5ny2c9ZfKZU1bcnuSDJtWOM68/0ODuFYNvf3ZP8pyS/\nkuSfjjHuk+TzM11Hdc1vJHlakvuNMe41xjg7yT0zvaDNh5M8taouPcP1r07y3iSPHGPcK8k9klyc\n5NYkD8402E7X566+nl9N8iWr/d47ydqzXN9RVV93gs97aZKvzHTR+6cluccYY3emaym9J8kv3IG1\nz+Sstq9cvb9pjPHJde7zJye4P3Rkphy11EyB7cRMOarlTKmqr83095QcPSsOujJTjmo1U6rqPlX1\nLzOdqHK/TGfGvfTknwWLM1OOWnSmVNXuJD+X6cXbz+Tr3nEE2/7umuTAGGP/GOP9STLG+MwY46/W\n7jDGuHiM8etjjA8fc9snxhjXJHl6prNDv/cM178lyYVr1ztbrf2aTJcGqCRPPYNj7kryhjHGM8cY\n71sd9+NjjOcmObC6zz84blU9NMmTMw2Ip6++3ttXn/vOJI/P9KrIJ3PKZ6rW8U9W79+/7oGnP/u/\nP+7+0JGZksVnCmwnZkraz5SfXL1/d6azcaAzMyV9ZkpVXbp2Vl2Sv0nyxiT/ItOr2D9xjPGeO3N8\nmIGZkjYz5WeT3DfJlWt/F5ycYLs1/Mc78bm/m+nH4B5SVfc8g89/8Rjj4ye4/brV+zM9m/SqdW6/\nLtPgeuhxt6+ddv/uMcZvHf9JY4y/zXQd2RMaY7xrjHHXMcauMcavn+Zez1q9X+/s2hz38Xuc5vFh\nbmbKsjMFthszpelMqapnZ/rRw9uTPGsM189mSzBT+syUTyT5YJK/zjRHRpIPJHl2ph8jh63ATFl4\nplTVYzKdsfzHSV50up+/Uwm2W8MfnOoOVfXtVfX6qvqr1cWj154J/VSmU+YrZ3bm5w3r3P6+1fu7\nnsHguj3JwVMcd89xt391pgcI/+Mkx33rae4DdiozxUyBjWSmNJwpVfWETN/QjSRXDdeKY+swU5rM\nlDHGK8cY9x9jfFGmHyP/xiQ3J3lVktdX1edt9h5gA5gpC86UqtqV6dIQSfJ9a2f2cmq7lt4Ap3Tb\nGOMT632wqu6W6Zor35Sjp6fflulHVtYupn2/1fuzcvo+tt6+jvn13U7zmJ88yRkea8c9/phfsHr/\ngZMcd7NOq197RuxUD0jWPn7rJu0DNoKZMllypsB2YqZMWs2U1bXrXp3pR0FfOcZ4zlxrw51kpkxa\nzZQkGWN8KslbquobMr1A0ROT/FiSn5hzH3CazJTJkjPlh5M8JMk1Y4y3bdIa25IzbPs7/hUMj3dZ\npuHy0STfneQBY4yzxhj3Wz0bev9M101JpmeFOH1rg+v+692hqr4gR4fiyYYgLM1MATaSmdJMVT0q\nyW9meiL5tUmeueyO4LSYKc2twu1LM/35mi90Z6YsqKrum+THM/35Pq+qzjr2LUdfFDVJ1m4/3YC9\nbTnDdut7aqZngn50jPGPXqWzqu6e5F6z72rjrV0A/GQ/hrBZL/b1J6v351TV3ccYt53gPl95gvvD\nVmSmHOUFBOHOM1OO2vSZUlUPS/I7mV7d+g1Jvs2PHrLNmClHLfk4Ze3Hrr/oJN8fwVZgphy1GTPl\nCzM9gfx5Sf7yFPd97+r9i5P8wCbsZctxhu3W98Wr94fW+fg3Znv8Pb8j0zNaX3eS+3z9Jq39lkxD\nfFeSx61zn29avf+LMcb/26R9wBzMlKM2a6bATmKmHLWpM6WqviLTK7jvzvTY5SljjL/fzDVhAWbK\nUUs+Tjl39f7TYi1bnJly1GbNlHGKtxPdj2yPf3g73drp+V91/AdWp5I/d97tbJrXrt5/eVU98fgP\nVtV9Mr3q4IYbY3woyZsyDbh/d4K1771aeyT5lc3YA8zITMnmzhTYYcyUbP5MqaoHZnol6/skeXuS\nJ4kobFNmSjZ3plTVXU/x8bMy/eh4krgeJVudmZLNmyljjHeNMe663luSbznm7l+wuv1ZG72PrUqw\n3fremCkk/lRVPb6qKkmq6qsy/Sjcg5P83YL72xBjjD9Ocl2mr/VXquopVXWX5LM//veGnOTfc1U9\nZO2VHqvqKWewhR/P9GqM51fVS6pq9+q4e5O8PtOFyD+U5IVncGzoxEyZYaZU1a6qus/aW46+iMFd\njr199YQQbGVmyibPlKp6QJLrMz0WeUeSJ57sBVZgizNTNv9xyiVV9WtV9YSq+uyPglfV3avqmzNF\n2gdl+t7oJ0/z2NCNmTJPT7kjXCP4OILtMjbyH+JPJ7k50xkV/y3JJ6vqliTvTPKYTBeCv3UD17uj\nNuM/tksyXR92d6ZXPr61qo5k+uZkb45e5+RkA/WMTq8fY/xhku/P9MDkkiQfrqrDSf4i048VHEny\n5DHGeq8CCZvJTDkzi82UJI/P9Oqza2/PX92+97jb33qGx4c7w0w5M0vNlB9I8s8yfU0PTPKnVfWB\ndd5+6gyOD3eWmXJmlpopd0lycZLfTnKkqo5U1Ycz/bn+ZqYzET+R5JljjDefwfHhzjJTzsyS3/tw\nBgTbZdzR63Kc8n5jjL9O8rVJfjnJ+1c3fyzJq5I8Zozx6mOOdbpr3NE9nu7tpzruCe8zxvhwpq/1\nqiTvyTTEbk3yyiRfk+Sm1V2P3Im119/UGC/JdF2X1yT560yvaHhTkquTPGwVdWEJZsqpj9luphzz\n+Xf0uk4wFzPl1MfsNFPucszn3ivTC3ys93bPMzg+3FlmyqmP2WmmXJvkezN9z/PuJJ/JNDsOJ/mD\nTGfVfvkY4xVncGzYCGbKqY/Zaaaciu951lFj+HNhe6iqZyX5+SS/Ocb4llPdH+BkzBRgI5kpwEYy\nU4CNZKb04wxbtoWq+rxMzwSPJP994e0AW5yZAmwkMwXYSGYKsJHMlJ4EW7aMqnpwVV1dVY9eDZTU\n5DGZXhn5QZkuVfDKJfcJbA1mCrCRzBRgI5kpwEYyU7Yel0Rgy1i9euE7jrnpcJLPT/K5mZ4JujXJ\nRWOMA/PvDthqzBRgI5kpwEYyU4CNZKZsPYItW0ZV3TPJpUm+KdOzP/fNNFj+KtNp+z8/xnjvcjsE\nthIzBdhIZgqwkcwUYCOZKVuPYAsAAAAA0IRr2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAA\nNLFr6Q0cZ0e8Atq11147+5qXX3757GteeOGFs6/5/Oc/f/Y19+zZM/uaC6mlN3AGdsRMWcL5558/\n+5pHjhyZfc0rr7xy9jX37ds3+5oLMVP4rAMHDsy+5hL/rZ133nmzr7nEn+1CzJSmrrrqqtnXvOKK\nK2Zf89xzz519zYMHD86+pu99WtsRM2UJS3wfsn///tnXvO6662Zfcwc57ZniDFsAAAAAgCYEWwAA\nAACAJgRbAAAAAIAmBFsAAAAAgCYEWwAAAACAJgRbAAAAAIAmBFsAAAAAgCYEWwAAAACAJgRbAAAA\nAIAmBFsAAAAAgCYEWwAAAACAJgRbAAAAAIAmBFsAAAAAgCYEWwAAAACAJgRbAAAAAIAmBFsAAAAA\ngCYEWwAAAACAJgRbAAAAAIAmBFsAAAAAgCYEWwAAAACAJgRbAAAAAIAmBFsAAAAAgCYEWwAAAACA\nJgRbAAAAAIAmBFsAAAAAgCYEWwAAAACAJgRbAAAAAIAmdi29gZ3o8ssvn33NG2+8cfY1Dx8+PPua\nZ5999uxrvupVr5p9zac97WmzrwnH2r179+xrvuUtb5l9zQMHDsy+5r59+2ZfE4516NCh2dd83OMe\nN/ua9773vWdf86abbpp9TTjWFVdcMfuaSzxWvvrqq2df89JLL519zYMHD86+5gUXXDD7mrC0a665\nZvY1zzvvvNnXpBdn2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAA\nADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAA\nNCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0\nIdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh\n2AIAAAAANLFr6Q0s7eDBg7OveeONN86+5nve857Z19y7d+/sa1544YWzr7nEv6GnPe1ps69JX4cO\nHZp9zQMHDsy+5hLOO++8pbcAs7vuuutmX/NhD3vY7Gvu27dv9jWf+9znzr4mHOuSSy6Zfc3LL798\n9jUf8YhHzL7mueeeO/uaF1xwwexrwtKOHDky+5rXXHPN7Gtedtlls6950003zb7mEs4555ylt3CH\nOMMWAAAAAKAJwRYAAAAAoAnBFgAAAACgCcEWAAAAAKAJwRYAAAAAoAnBFgAAAACgCcEWAAAAAKAJ\nwRYAAAAAoAnBFgAAAACgCcEWAAAAAKAJwRYAAAAAoAnBFgAAAACgCcEWAAAAAKAJwRYAAAAAoAnB\nFgAAAACgCcEWAAAAAKAJwRYAAAAAoAnBFgAAAACgCcEWAAAAAKAJwRYAAAAAoAnBFgAAAACgCcEW\nAAAAAKAJwRYAAAAAoAnBFgAAAACgCcEWAAAAAKAJwRYAAAAAoAnBFgAAAACgCcEWAAAAAKCJXUtv\nYGmHDx+efc2HP/zhs6+5d+/e2ddcwiMe8Yilt8AO98IXvnD2Na+88srZ17zllltmX3MJ559//tJb\ngNlddtlls695zjnnzL7mEl/nRRddNPuacKwlvif4y7/8y9nXvPHGG2df84ILLph9zSW+l92zZ8/s\na8KxrrnmmtnXvOmmm2Zfc//+/bOvucRjo927d8++5hLfP58JZ9gCAAAAADQh2AIAAAAANCHYAgAA\nAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAA\nADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAA\nNCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0\nIdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADSxa+kNLO3w4cOzr3nhhRfOvuZOscTf5549\ne2Zfk74uu+yy2dfcv3//7GvulH/3R44cWXoL7HBL/Bt84QtfOPua11133exrLuGaa65Zegswu717\n986+5kc+8pHZ17zgggt2xJrXX3/97GvulMedW9ES//9+9rOfPfuaz3jGM2ZfcwkvetGLZl/zZS97\n2exrbhXOsAUAAAAAaEKwBQAAAABoQrAFAAAAAGhCsAUAAAAAaEKwBQAAAABoQrAFAAAAAGhCsAUA\nAAAAaEKwBQAAAABoQrAFAAAAAGhCsAUAAAAAaEKwBQAAAABoQrAFAAAAAGhCsAUAAAAAaEKwBQAA\nAABoQrAFAAAAAGhCsAUAAAAAaEKwBQAAAABoQrAFAAAAAGhCsAUAAAAAaEKwBQAAAABoQrAFAAAA\nAGhCsAUAAAAAaEKwBQAAAABoQrAFAAAAAGhCsAUAAAAAaEKwBQAAAABoQrAFAAAAAGhCsAUAAAAA\naGLX0htY2p49e2Zf8+DBg7OvuYTDhw/PvuYNN9ww+5oXX3zx7GsC8zh06NDsa5533nmzr0lfV155\n5exrvuhFL5p9zSW89rWvnX3N3bt3z74m7ERLfI93/fXXz77mpZdeOvuaV1111exrPv/5z599Te6Y\nJf6/du9733v2NV/+8pfPvuYS34csYd++fUtvoS1n2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIA\nAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAA\nAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAA\nADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANCHYAgAAAAA0IdgCAAAAADQh2AIAAAAA\nNCHYAgAAAAA0IdgCAAAAADQh2AIAAAAANLFr6Q0sbe/evbOvecMNN8y+5rXXXrsj1lzC5ZdfvvQW\nANim9u/fP/uaBw4cmH3Nd77znbOv+eQnP3n2NS+66KLZ11zi39C+fftmX5O+rrjiitnXvOCCC2Zf\n8/Dhw7Ov+cY3vnH2NS+++OLZ16Sv888/f/Y1jxw5Mvuahw4dmn3NJf5sn/GMZ8y+5u7du2dfc6tw\nhi0AAAAAQBOCLQAAAABAE4ItAAAAAEATgi0AAAAAQBOCLQAAAABAE4ItAAAAAEATgi0AAAAAQBOC\nLQAAAABAE4ItAAAAAEATgi0AAAAAQBOCLQAAAABAE4ItAAAAAEATgi0AAAAAQBOCLQAAAABAE4It\nAAAAAEATgi0AAAAAQBOCLQAAAABAE4ItAAAAAEATgi0AAAAAQBOCLQAAAABAE4ItAAAAAEATgi0A\nAAAAQBOCLQAAAABAE4ItAAAAAEATgi0AAAAAQBOCLQAAAABAE4ItAAAAAEATgi0AAAAAQBO7lt7A\n0vbu3Tv7mlddddXsa15++eWzr/nIRz5y9jUPHjw4+5qwtN27d8++5kUXXTT7mq973etmX/PAgQOz\nr7l///7Z16Sv8847b/Y1Dx06tCPWvPLKK2dfc4k5ds4558y+5r59+2Zfk7727Nkz+5qXXHLJ7Gsu\n4eKLL559zauvvnr2NWFpS3y/dcstt8y+pu9DenGGLQAAAABAE4ItAAAAAEATgi0AAAAAQBOCLQAA\nAABAE4ItAAAAAEATgi0AAAAAQBOCLQAAAABAE4ItAAAAAEATgi0AAAAAQBOCLQAAAABAE4ItAAAA\nAEATgi0AAAAAQBOCLQAAAABAE4ItAAAAAEATgi0AAAAAQBOCLQAAAABAE4ItAAAAAEATgi0AAAAA\nQBOCLQAAAABAE4ItAAAAAEATgi0AAAAAQBOCLQAAAABAE4ItAAAAAEATgi0AAAAAQBOCLQAAAABA\nE4ItAAAAAEATgi0AAAAAQBOCLQAAAABAEzXGWHoPAAAAAADEGbYAAAAAAG0ItgAAAAAATQi2AAAA\nAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAA\nAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAA\nTQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABN\nCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0I\ntgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2\nAAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYA\nAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAA\nAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAA\nAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAA\nAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAA\nTQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABN\nCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0I\ntgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2\nAAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYA\nAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAA\nAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAA\nAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAA\nAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAA\nTQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABN\nCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0I\ntgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2\nAAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYA\nAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAA\nAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAA\nAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAA\nAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAA\nTQi2AAAP8RsIAAAe3UlEQVQAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAA\nAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAA\nAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAA\nAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAA\nTQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABN\nCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0I\ntgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2\nAAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYA\nAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAA\nAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAA\nAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAA\nAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAA\nTQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABN\nCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0ItgAAAAAATQi2AAAAAABNCLYAAAAAAE0I\ntgAAAAAATQi2O1hVfV9V3V5Vr9/g4756ddwf3MjjAr2ZKcBGMlOAjWSmABvJTGGzCbabZPUf2Jm8\nvWnpvW+AsXrbNo4Zmid7+9Wl98n2ZaZsr5lyrKr65qq6tqpurqrbquqDVfX2qrqqqh6w9P7YnsyU\n7TNTqupfncbf3y1L75ftyUzZPjNlTVXtqarnVdXBqvpoVf1dVX2gqn67qv710vtjezNTtuVMuWdV\n/URV/a/VTPlYVb2zqv59VZ219P462rX0BraxD65z+9lJ7pbktiQnetD8t5u2o3/scJJ3J7l5g4/7\nviR/mnm/ls22NjQ/luQT69zn8HzbYQcyU7bXTElVfU6S/5LkW3N0xtyS6e/0vkkeleRtmb5+2Ghm\nyvaZKbdl/b/PNffNdKLGwc3fDjuUmbJ9Zkqq6iFJrk9yv0yPT25PcmuSL0zy+CRPqKpvS/KtY4zP\nLLZRtjMzZXvNlC9N8oYk52aaKZ/INFcemuSrkjy9qr5hjPH+5XbZT42x7cJ9a1X15iSPTfLyMcYz\nl94Pd0xVXZvkKUl+eIzxc0vvB9aYKVtXTT8+9aQkH0pyeZLXjDE+XlV3SfLAJPuSvHmMccOC22SH\nMVO2n6r6p0luWv32u8YYr1hwO+wwZsrWVFX/O8lDMkWzS5L8zhjjM1W1J8kPJfmxTNHlh8YYL1xu\np+w0ZsrWU1W7krwzyVdkitv7xxgHVh97dJKXJXlwkj8aY3ztUvvsyCURAGBmVfVdmWLtR5N83Rjj\nFWOMjyfJGOP2McafjzFeINYCG+AZSSrT2SyvWXgvQHNV9dBMZ70lyfeMMX5r7SzaMcbhMcaPZ5ol\nlemEFoCTuThTrB1J/s1arE2SMcbbkzw109m2j3S5lX9IsG2qqh6yugbLR1e/f2xVvW513aBPV9Xz\njrnvo6rqBVX1tmOugfg3VXV9VT39JGuse5Hsqvrw6mMPr6r7VtUvVtVNq2PfXFUvrqr7rHPcE14k\n+wRf08Or6jVV9aGq+mRVvauqfqSq7nqSPZ9VVT9TVX+++pz3VdUrqupBxx8fOMpMWXfPS82UH830\noOVnxxjv2YTjw6YyU9bdc8fHKU/PNG9evfbEEHRjpqy75yVmyv2O+fWhde6zdnkV152kJTNl3T0v\nMVOeuHr/R6tA+w+MMd6V5I2r337nBq+9pbmG7RZQVfuT/HKmZzGPJDn+OkG/l+RzVr/+eKYzKM5O\n8rgk31hVTxpjnO4zFWvXU3xgkt9I8kWrY48k/7+9uw2yNC3rA/6/s0tAZJleWIvEF7aBIIbFzIwS\nQUKKhpSoqeA2hkRKNDvBgk35Ri8fUjH5sGtVTKSMMCQhlKJxzCpGo9JGEwTLSm+SKrWsys6WkoIk\nsj3ZEBJeyt4XA4Ry73x4Tm93humZnd7u+7l6+vf78vScPue57tPT5+rn/J/73M+XJfnuxb5f3Hu/\neE3XKy6S3Vr7liS/kGn9mQcX269K8sOZzuh+wQu1tfaMxXN94WL/n8t0kPCGJK9J8n2XqXdLkt9b\n/PN1vfdfvtz44Fqmpzz2mFl6Smvtzyf5M4uaP/d4HwdV6SmPPabccUpr7euTPH8xHkshcCToKY89\nZq6esrnr69O59PqcL15srYtNeXrKY4+Zq6fcvKj3kcvc58OZ1sd+ZWutdWu3JjHD9ih4SpJ/luRn\nknxF7/2ZSZ6a5D277vOrSf5akmf13p/ee39GkhuSfFeSTyV5XWvt9n3W/7EkF5K8uPf+9CRPyzSl\n/ZFM64y89TKP3cuTF8/nvUmevRjviSTbZ7ne0Fp7+SUe955MzWUr0/N9Wu99KcnXJPmDJP/4cdR+\nIi/8luRNizNin1ucNbuntbbWXNWQo0NP2TFXT/n6xfah3vv9rbU3ttZ+t7X2SGtta3F2//bLnR2H\nQvSUHXMfp1zKmcX2gd77vzvgfcNh0FN2zNJTFp/82cj03ufdrbXXtGkNyrTWntFa+weZlkL4VJIf\nutr9w2B6yo65jlO2H3O59zbbk0mfnGliCxHYHgXXJdnovZ/ZvmJe7/2Pe+8PbN+h9/7Xe++/3Hv/\n1K7b/k/v/Vymj8G1TGdw9uPBJN/Qe793V+1fSvKPFvt93T72eX2SD/Te39h7/9hiv3/Ue//BTAcH\nuXi/bVpL6bWZXuzfuXi+jy4ee1+mszGPXqHuFc9UPY7Hf2Wmq6M+kmQpycuTvD3J+dbaC57AvmEU\nPSWz95TnL7afbK39ZKYz/qczXe32i5K8NMm7k/x6a+1PXnoXUIaektl7yiW11p6c6U1hT/LTB7Vf\nOGR6Skr0lG9P8juZlkf4lSSfba39Yabw6q2ZZva9tPd+YZ/7h1H0lMzeUy5keq4vvMx9dn/vT++j\nxjVJYHs0/OgTeOxvZprqfktr7YZ9PP6f9kuvd7a+2F7uRXc5b9vj9vVML+YXXXT79oL2H+69/5uL\nH9R7/3SSn9yrWO/9Q73363rv1/f9fczwt5O8KcmX996fsjgzd1OS70/ycKaPOrzfTFuOCD1l3p6y\ntNg+N9Pst/dm6i03Lb73dzIdML0qez8vqERPmf845VJWM824SZK7D2ifMIKeMnNP6b3/r0zrTv5S\npoCmJXn64uvrMs0SvOT6m1CQnjJvT/ngYnuytfaXLv5ma+3rkqzsumk/P+drksD2aPitK92htfbt\nrbV/3Vp7YLF49KOttUeT/N9M08pb9nemYq8rlH9ssb1uH43r0ey93tH2fm+86PbTmQ4Q/uNl9vsf\nrnIcj1vv/Ud77/+89/7xXbdt9d7flels1B9nWptlz3VfoBA9Zd6esv23tyX5/d77dyzeGKX3/pne\n+48k+fHF929vrS3tsR+oQk+Z+ThlD7cttr/Ve/9vg2vDE6GnzNxTWmsrmT4i/Y2ZZtQ+L1NI+zWZ\nZtf+5SQbrbVXHdYY4ADpKfP2lJ9P8l8y/Qx/rrX2+tbaDa21p7bWVjOdGPr8rvtfaabvseGiY/V9\ntn/hItSPaa09KdOaK6/OzvT0zyb5ZHYW096+0ud+Zn8+vNe4dn39pKvc52d633MR6e39XrzPmxbb\nj2dv//Mqx3Egeu+/3Vr7lUxnrV6TaaFvqEpPmczZUx7Z9fW79rjP25P8rUwHiH8hyRecCYci9JRJ\nqeOU1tqfSvINmX7mPzWqLhwAPWUyW09prd2UaRmEpyVZ7b3/6q5v35dpfczrMi258q7W2gsv8/xg\nbnrKZLae0nv/fGvt1iS/nuTZmT5duNtDSX4gOzOhtw5jHEeRGbb1XXwFw4utZWouD2V6c/9lvfcv\n7r0/q/f+pb33L820bkoyndHgcPzOYvvcWUcBV6anzG/3wdBeV0v9aHbONH/F4Q4HnhA9pabvzPSx\n5c8m+VczjwWuhp4yv7+Z6SPJmxeFtbu9Y7H9yiR/dsioYH/0lAJ67x9J8ueS/L1Ms3wvJPlQput2\nnM40A3fbfx0+wKLMsD36XpfpTNAP9N7fc/E3W2tPybTe0FG3vQD45T6GYHFqeOL0lB2H1VN+/yrv\nb9YKR5mesmPkccrfyPRzX++9PzSwLhw2PWXHYfWU7QD2/svc56O7vl5O8p8PaSxw2PSUHYd6nNJ7\nfzjTp5G/4BPJrbXvWHz50d0XfzvuzLA9+r58sT2/x/dflWvj//neTGe0Xn6Z+/zFQWO5lJcstpc7\nsIGjQE/ZcVg9ZSM7Z/tfsMd9npedjzJtHtI4YAQ9ZceQ45TW2tcmuWXxz58eURMG0lN2HFZP2V4/\n8tmXuc/Nu77e6yPfcBToKTvmzFO+LVNw/rMzjqGca+EX77jbnp7/1Rd/Y7Eeyw+OHc6hed9i+1Wt\ntW+++JuttWcm+a6xQ3qs9kuS3JqpwfzaHGOAA6Sn5HB7yuIqrP8200HT9+5xt7cutg9n/IWK4CDp\nKRl+nHJmsf14kt8YVBNG0VNy6D3lvsX2eYuLj13Kmxfbzyf5T4c0DhhBT8nsecpbk7ww0//Fu+cY\nQ1UC26PvNzK96f/7rbVvbK21JGmtfXWSD2SavfW5Gcd3IHrvv5dkPdNz/ZnW2re21v5EkrTWTmZ6\nrnv+PrfWbtm+0mNr7VuvpnZr7c2ttbtba6/efQXH1tpSa+17FrWvS/JAkn9y1U8OatFTDrmnLPzd\nTFedvaW19rOLCwRlcbXUv53kTZlOAr3tchdKgCNATxnTU7b386Qkr8/UP+52ISCuQXrK4feU92Za\nz7Ml+ZeLK7p/0WK/z2qtvT1TsNOT/FTv/Y+u9vlBIXrKgOOU1tr3LnrJTbtuW26tnU3yI5n6yff1\n3v/31e77WiawncdBLlb9Q0n+e5JnJnl/ks+01h7MdGb0ZUnemP//iuSjHMaC3G/OtD7SUpJfTPJI\na20r0/T+5yb5/sX9LtdQ9/PG5UlJ3pDpqoYPtta2WmufTvLpTAHtDZkWyf6mxbosMJqesj9z9ZT0\n3j+U5LZMoe3rk3ystfapTFdF/eFMz/fu3vs/3M/+4QnSU/Zntp6yy1/J9LNOLIdAHXrK/szSUxaf\nBPq2TJ/y+ZJMAe4jrbWHMs3cX1vc9d9n5xNBMJKesj9zHqe8IlMv+URrbbuffHRR83NJbu+9Ww7h\nIgLbefQ8vl/0K96v9/6JTOun/kR2rjz+cJJfSPKy3vsv7trX1dZ4vGO82tuvtN9L3mex+PRLkrwt\nyR9kamKPJLk7yddlZ53HrSdQ+1Len+TOJB/MtEZtS/LFST6R6Yzc9yQ51Xv/8D72DQdBT7nyPiv1\nlO36P5/ka5P8iyT/I1Nf2crUc/5q7/3MfvcNT5CecuV9luspC9sXG/tdxyUUoqdceZ+lekrv/QNJ\nXpTk7ZmCq0eSPDnJJzO9/3ljklf13j+zn/3DE6SnXHmfpXpKkh9f1PlIpmt5tCQfTnI2yYt67z+x\nz/1e05pPSnGtaK29Jck7kvxa7/1b5h4PcLTpKcBB0lOAg6SnAAdJT6nHDFuuCYt1lb470xmfD848\nHOCI01OAg6SnAAdJTwEOkp5Sk8CWI6O19oLW2o+11l66a+H71lp7WZLfTPL8TMsU3D3nOIGjQU8B\nDpKeAhwkPQU4SHrK0WNJBI6MxdUL79110x8meWqm9ZR6pvVXbu29b4wfHXDU6CnAQdJTgIOkpwAH\nSU85egS2HBmttRuS3J7k1ZnO/nxJpsbyQKZp++/ovV+Yb4TAUaKnAAdJTwEOkp4CHCQ95egR2AIA\nAAAAFGENWwAAAACAIgS2AAAAAABFCGwBAAAAAIoQ2AIAAAAAFHH93AO4yLG4AtrKysrwmsvLy8Nr\nnjt3bnhNDlWbewD7cCx6yhzm6GNbW1vDa54/f354zWNETynq7Nmzw2vO8fpeX18fXvO+++4bXvPE\niRPDa25ubg6vubS0pKcUtba2NrzmHK/vM2fODK85x892aWlpeM2Z6ClFra6uDq85x3HKxsbG8Joc\nqqvuKWbYAgAAAAAUIbAFAAAAAChCYAsAAAAAUITAFgAAAACgCIEtAAAAAEARAlsAAAAAgCIEtgAA\nAAAARQhsAQAAAACKENgCAAAAABQhsAUAAAAAKEJgCwAAAABQhMAWAAAAAKAIgS0AAAAAQBECWwAA\nAACAIgS2AAAAAABFCGwBAAAAAIoQ2AIAAAAAFCGwBQAAAAAoQmALAAAAAFCEwBYAAAAAoAiBLQAA\nAABAEQJbAAAAAIAiBLYAAAAAAEUIbAEAAAAAihDYAgAAAAAUIbAFAAAAAChCYAsAAAAAUETrvc89\nht1KDeawLC8vD6954cKF4TXncPPNNw+vubm5ObzmTNrcA9iHY9FT1tfXh9d87WtfO7zmnXfeObzm\nXXfdNbzmMaKnFHX27Nm5hzDEqVOnhtec42e7tbU1vObGxsbwmtFTylpZWRle87gcn8/xvnKm1/cc\n9JTHYY7X2nOe85zhNY+LkydPDq95/vz54TVnctU9xQxbAAAAAIAiBLYAAAAAAEUIbAEAAAAAihDY\nAgAAAAAUIbAFAAAAAChCYAsAAAAAUITAFgAAAACgCIEtAAAAAEARAlsAAAAAgCIEtgAAAAAARQhs\nAQAAAACKENgCAAAAABQhsAUAAAAAKEJgCwAAAABQhMAWAAAAAKAIgS0AAAAAQBECWwAAAACAIgS2\nAAAAAABFCGwBAAAAAIoQ2AIAAAAAFCGwBQAAAAAoQmALAAAAAFCEwBYAAAAAoAiBLQAAAABAEQJb\nAAAAAIAiBLYAAAAAAEUIbAEAAAAAihDYAgAAAAAUcf3cAziOlpaWhte8cOHC8JonTpwYXnNlZWV4\nza2treE15/gdoq677rpr7iEMsbq6OvcQ4FhYW1ubewhDzNE7Nzc3h9fc2NgYXhN2O3Xq1PCay8vL\nw2ueO3dueM053hPM0VPmeI/H4zPHe+E5vOIVrxhec44+5pihFjNsAQAAAACKENgCAAAAABQhsAUA\nAAAAKEJgCwAAAABQhMAWAAAAAKAIgS0AAAAAQBECWwAAAACAIgS2AAAAAABFCGwBAAAAAIoQ2AIA\nAAAAFCGwBQAAAAAoQmALAAAAAFCEwBYAAAAAoAiBLQAAAABAEQJbAAAAAIAiBLYAAAAAAEUIbAEA\nAAAAihDYAgAAAAAUIbAFAAAAAChCYAsAAAAAUITAFgAAAACgCIEtAAAAAEARAlsAAAAAgCIEtgAA\nAAAARQhsAQAAAACKENgCAAAAABQhsAUAAAAAKEJgCwAAAABQxPVzD+A4Wl5eHl7zvvvuG17zwQcf\nHF7z1KlTw2suLS0Nrwm7bW1tDa958uTJ4TXneH3D3DY2No5FzTmcPXt27iEMsb6+PrzmmTNnhtek\nrjl+H06fPj285ubm5vCac7wPmeO9LHUdl9+HOf6Wrq6uDq85x/tK9maGLQAAAABAEQJbAAAAAIAi\nBLYAAAAAAEUIbAEAAAAAihDYAgAAAAAUIbAFAAAAAChCYAsAAAAAUITAFgAAAACgCIEtAAAAAEAR\nAlsAAAAAgCIEtgAAAAAARQhsAQAAAACKENgCAAAAABQhsAUAAAAAKEJgCwAAAABQhMAWAAAAAKAI\ngS0AAAAAQBECWwAAAACAIgS2AAAAAABFCGwBAAAAAIoQ2AIAAAAAFCGwBQAAAAAoQmALAAAAAFCE\nwBYAAAAAoAiBLQAAAABAEQJbAAAAAIAiBLYAAAAAAEUIbAEAAAAAirh+7gEcR+vr68NrbmxsDK95\n/vz54TXvuOOO4TXnsLa2NvcQKGRra2t4zeXl5eE1z549O7zm6urq8Jpz/Gypa47fhzn+fs9xnDKH\nOY4BV1ZWhteE3eY4TpnDPffcM7zm/fffP7ym4xR2W1paGl7z5MmTw2veeOONw2u+5S1vGV5zjmPA\nzc3N4TWPSh8zwxYAAAAAoAiBLQAAAABAEQJbAAAAAIAiBLYAAAAAAEUIbAEAAAAAihDYAgAAAAAU\nIbAFAAAAAChCYAsAAAAAUITAFgAAAACgCIEtAAAAAEARAlsAAAAAgCIEtgAAAAAARQhsAQAAAACK\nENgCAAAAABQhsAUAAAAAKEJgCwAAAABQhMAWAAAAAKAIgS0AAAAAQBECWwAAAACAIgS2AAAAAABF\nCGwBAAAAAIoQ2AIAAAAAFCGwBQAAAAAoQmALAAAAAFCEwBYAAAAAoAiBLQAAAABAEQJbAAAAAIAi\nBLYAAAAAAEVcP/cAGGNlZWXuIVyzNjc35x4Cx9zy8vLwmvfcc8/wmltbW8Nr3nHHHcNr3nvvvcNr\nnjp1anhNHp85Xt/r6+vDa7bWhtd83/veN7ym4zHmdv78+eE1X/nKVw6veeeddw6vOcd7gtXV1eE1\n5/gbMcffQuqao4/NUfO4HJ+vra0NrzlHH9sPM2wBAAAAAIoQ2AIAAAAAFCGwBQAAAAAoQmALAAAA\nAFCEwBYAAAAAoAiBLQAAAABAEQJbAAAAAIAiBLYAAAAAAEUIbAEAAAAAihDYAgAAAAAUIbAFAAAA\nAChCYAsAAAAAUITAFgAAAACgCIEtAAAAAEARAlsAAAAAgCIEtgAAAAAARQhsAQAAAACKENgCAAAA\nABQhsAUAAAAAKEJgCwAAAABQhMAWAAAAAKAIgS0AAAAAQBECWwAAAACAIgS2AAAAAABFCGwBAAAA\nAIoQ2AIAAAAAFCGwBQAAAAAoQmALAAAAAFDE9XMP4DhaX18fXnNpaWl4zbvuumt4zTmsrq7OPQSO\nuTNnzgyveccddwyvuby8PLzm5ubm8Jpz/I04derU8JrUtba2NrzmiRMnhtdcWVkZXhPmNsff0jle\n33P0sTmOGU6fPj285rlz54bXPC7vK6lrjmPlOfrYHK/vOd77HBVm2AIAAAAAFCGwBQAAAAAoQmAL\nAAAAAFCEwBYAAAAAoAiBLQAAAABAEQJbAAAAAIAiBLYAAAAAAEUIbAEAAAAAihDYAgAAAAAUIbAF\nAAAAAChCYAsAAAAAUITAFgAAAACgCIEtAAAAAEARAlsAAAAAgCIEtgAAAAAARQhsAQAAAACKENgC\nAAAAABQhsAUAAAAAKEJgCwAAAABQhMAWAAAAAKAIgS0AAAAAQBECWwAAAACAIgS2AAAAAABFCGwB\nAAAAAIoQ2AIAAAAAFCGwBQAAAAAoQmALAAAAAFCEwBYAAAAAoIjr5x7AcbSxsTG85jvf+c7hNedw\n2223Da+5srIyvCbsdubMmeE1Nzc3h9c8d+7c8JpzvL5XV1eH14Td5jhOmeP1vbS0NLwmzG2O3/s5\n/pbeeOONw2ueOHFieM1bb711eM21tbXhNWG3OX4Hz58/P7zm1tbW8JpzHAOeOnVqeM2jwgxbAAAA\nAIAiBLYAAAAAAEUIbAEAAAAAihDYAgAAAAAUIbAFAAAAAChCYAsAAAAAUITAFgAAAACgCIEtAAAA\nAEARAlsAAAAAgCIEtgAAAAAARQhsAQAAAACKENgCAAAAABQhsAUAAAAAKEJgCwAAAABQhMAWAAAA\nAKAIgS0AAAAAQBECWwAAAACAIgS2AAAAAABFCGwBAAAAAIoQ2AIAAAAAFCGwBQAAAAAoQmALAAAA\nAFCEwBYAAAAAoAiBLQAAAABAEQJbAAAAAIAiBLYAAAAAAEUIbAEAAAAAihDYAgAAAAAU0Xrvc48B\nAAAAAICYYQsAAAAAUIbAFgAAAACgCIEtAAAAAEARAlsAAAAAgCIEtgAAAAAARQhsAQAAAACKENgC\nAAAAABQhsAUAAAAAKEJgCwAAAABQhMAWAAAAAKAIgS0AAAAAQBECWwAAAACAIgS2AAAAAABFCGwB\nAAAAAIoQ2AIAAAAAFCGwBQAAAAAoQmALAAAAAFCEwBYAAAAAoAiBLQAAAABAEQJbAAAAAIAiBLYA\nAAAAAEUIbAEAAAAAihDYAgAAAAAUIbAFAAAAAChCYAsAAAAAUITAFgAAAACgCIEtAAAAAEARAlsA\nAAAAgCIEtgAAAAAARQhsAQAAAACKENgCAAAAABQhsAUAAAAAKEJgCwAAAABQhMAWAAAAAKAIgS0A\nAAAAQBECWwAAAACAIgS2AAAAAABFCGwBAAAAAIoQ2AIAAAAAFCGwBQAAAAAo4v8BknbueHTtDVAA\nAAAASUVORK5CYII=\n", 117 | "text/plain": [ 118 | "" 119 | ] 120 | }, 121 | "metadata": { 122 | "image/png": { 123 | "height": 431, 124 | "width": 694 125 | } 126 | }, 127 | "output_type": "display_data" 128 | } 129 | ], 130 | "source": [ 131 | "images_and_labels = list(zip(digits.images[:10], digits.target[:10]))\n", 132 | "for index, (image, label) in enumerate(images_and_labels):\n", 133 | " plt.subplot(2, 5, index + 1)\n", 134 | " plt.axis('off')\n", 135 | " plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')\n", 136 | " plt.title('Training: %i' % label)" 137 | ] 138 | }, 139 | { 140 | "cell_type": "markdown", 141 | "metadata": {}, 142 | "source": [ 143 | "### Data preparation\n", 144 | "We save the last 100 samples from the dataset for testing, and use the rest to train a classifier.\n", 145 | "\n", 146 | "As described above, images and the corresponding labels are accessible in `digits.data` and `digits.target`, respectively. The code below creates four Intel DAAL numeric tables, for `training_data`, `training_labels`, `test_data`, and `test_labels`.\n", 147 | "\n", 148 | "It is important to keep in mind that DAAL NumericTables can only be created from ndarrays with C-contiguous memory layout. `digits.data` and `digits.target` are not C-contiguous. You can check this with:\n", 149 | "\n", 150 | "```python\n", 151 | "digits.data.flags['C']\n", 152 | "```\n", 153 | "To put them into correct memory layout, we use NumPy function [`np.ascontiguousarray`](http://docs.scipy.org/doc/numpy/reference/generated/numpy.ascontiguousarray.html)." 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": 4, 159 | "metadata": { 160 | "collapsed": false 161 | }, 162 | "outputs": [], 163 | "source": [ 164 | "# Split the data into training data and labels, and create numeric tables\n", 165 | "nsamples = len(digits.images)\n", 166 | "data = np.ascontiguousarray(digits.data, dtype = np.double)\n", 167 | "labels = np.ascontiguousarray(digits.target.reshape(nsamples,1), dtype = np.double)\n", 168 | "\n", 169 | "training_data = HomogenNumericTable(data[:-100])\n", 170 | "training_labels = HomogenNumericTable(labels[:-100])\n", 171 | "\n", 172 | "test_data = HomogenNumericTable(data[-100:])\n", 173 | "test_labels = HomogenNumericTable(labels[-100:])" 174 | ] 175 | }, 176 | { 177 | "cell_type": "markdown", 178 | "metadata": {}, 179 | "source": [ 180 | "### Create a multi-class classifier based on SVM\n", 181 | "\n", 182 | "The definition of the `MulticlassSVM` class using pyDAAL is below. A few things to help understanding the code:\n", 183 | "\n", 184 | "1. The implementation uses `multi_class_classifier` from pyDAAL, which can be parameterized by an underlying two-class classifier, and the number of classes. SVM is hard-coded as the two-class classifier, while the number of classes is a user input.\n", 185 | "2. The SVM classifier itself takes several parameters, in particular,\n", 186 | " * Kernel function: Either a linear kernel or an RBF kernel. A kernel function also has its own parameters. For example, the RBF kernel is parameterized by $\\sigma$.\n", 187 | " * Cache size (in bytes): A cache is used to store the kernel matrix. For best performance, the cache size should be about `number_of_samples x number_of_samples x sizeof(feature_data_type)`.\n", 188 | " * C: Upper bound in conditions of the quadratic optimization problem. It is used to control the trade-off between variance and bias of the model. It is typically set to 1.0.\n", 189 | " * shrinking: A bool value that enables or disables kernel shrinking. Kernel shrinking is an optimization technique to reduce the amount of kernel computation.\n", 190 | " * There are other parameters, such as accuracy threshold and $\\tau$ the parameter for the WSS scheme, that are not explicitly set in the code below. We just use their default values. For details about what these parameters are about, refer to the [Intel DAAL Developer Guide](https://software.intel.com/sites/products/documentation/doclib/daal/daal-user-and-reference-guides/index.htm).\n", 191 | "3. The `predict` method of the class is left as an exercise. You should follow the `train` method as an example to flesh out the implementation for `predict`.\n" 192 | ] 193 | }, 194 | { 195 | "cell_type": "code", 196 | "execution_count": 5, 197 | "metadata": { 198 | "collapsed": true 199 | }, 200 | "outputs": [], 201 | "source": [ 202 | "from daal.algorithms.svm import training as svm_training\n", 203 | "from daal.algorithms.svm import prediction as svm_prediction\n", 204 | "from daal.algorithms.kernel_function import linear, rbf\n", 205 | "from daal.algorithms.multi_class_classifier import training as multiclass_training\n", 206 | "from daal.algorithms.multi_class_classifier import prediction as multiclass_prediction\n", 207 | "from daal.algorithms.classifier import training as training_params\n", 208 | "from daal.algorithms.classifier import prediction as prediction_params\n", 209 | "\n", 210 | "class MulticlassSVM:\n", 211 | "\n", 212 | "\n", 213 | " def __init__(self, nclasses):\n", 214 | " \"\"\"Initialize class parameters\n", 215 | "\n", 216 | " Args:\n", 217 | " nclasses: number of classes\n", 218 | " \"\"\"\n", 219 | "\n", 220 | " self._nclasses = nclasses\n", 221 | " # Create an SVM two-class classifier object for training\n", 222 | " self._svm_training_alg = svm_training.Batch_Float64DefaultDense()\n", 223 | " # Create an SVM two-class classifier object for prediction\n", 224 | " self._svm_prediction_alg = svm_prediction.Batch_Float64DefaultDense()\n", 225 | "\n", 226 | " \n", 227 | " def setSVMParams(self, \n", 228 | " cachesize = 1000000000, \n", 229 | " C = 1.0,\n", 230 | " sigma = 1.0,\n", 231 | " kernel = linear.Batch_Float64DefaultDense(),\n", 232 | " shrinking = False):\n", 233 | " \"\"\"Tweak SVM training and prediction algorithm parameters\n", 234 | "\n", 235 | " Args:\n", 236 | " cachesize: size of chache in bytes for storing kernel matrix\n", 237 | " kernel: SVM kernel, can be either linear or rbf\n", 238 | " sigma: Coefficient of the rbf kernel\n", 239 | " shrinking: whether do shrinking optimization or not\n", 240 | " \"\"\"\n", 241 | "\n", 242 | " self._svm_training_alg.parameter.cacheSize = cachesize \n", 243 | " self._svm_training_alg.parameter.C = C \n", 244 | " if getattr(kernel.parameter, 'sigma', None):\n", 245 | " kernel.parameter.sigma = sigma\n", 246 | " self._svm_training_alg.parameter.kernel = kernel\n", 247 | " self._svm_prediction_alg.parameter.kernel = kernel\n", 248 | " self._svm_training_alg.parameter.doShrinking = shrinking\n", 249 | "\n", 250 | "\n", 251 | "\n", 252 | " def train(self, data, labels):\n", 253 | " \"\"\"Train an SVM model.\n", 254 | "\n", 255 | " Args:\n", 256 | " data: training data\n", 257 | " labels: ground truth known for training data \n", 258 | "\n", 259 | " Returns:\n", 260 | " An SVM model object\n", 261 | " \"\"\"\n", 262 | " \n", 263 | " # Create a multiclass classifier object based on the\n", 264 | " # SVM two-class classifier\n", 265 | " multiclass_training_alg = multiclass_training.Batch_Float64OneAgainstOne()\n", 266 | " multiclass_training_alg.parameter.nClasses = self._nclasses\n", 267 | " multiclass_training_alg.parameter.training = self._svm_training_alg\n", 268 | " multiclass_training_alg.parameter.prediction = self._svm_prediction_alg\n", 269 | "\n", 270 | " # Pass training data and labels\n", 271 | " multiclass_training_alg.input.set(training_params.data, data)\n", 272 | " multiclass_training_alg.input.set(training_params.labels, labels)\n", 273 | "\n", 274 | " # Build the model and return it\n", 275 | " return multiclass_training_alg.compute().get(training_params.model)\n", 276 | "\n", 277 | " \n", 278 | "\n", 279 | " def predict(self, model, testdata):\n", 280 | " \"\"\"Make predictions for unseen data using a learned model.\n", 281 | "\n", 282 | " Args:\n", 283 | " model: a learned SVM model\n", 284 | " testdata: new data\n", 285 | "\n", 286 | " Returns:\n", 287 | " A NumericTable containing predicted labels\n", 288 | " \"\"\"\n", 289 | "\n", 290 | " # Create a multiclass classifier object based on the\n", 291 | " # SVM two-class classifier\n", 292 | " #\n", 293 | " # YOUR CODE HERE\n", 294 | " #\n", 295 | " # The multi-class prediction algorithm you need is Batch_Float64DefaultDenseOneAgainstOne\n", 296 | " # Follow the example in the `train` method to set parameters, including nClasses, and training \n", 297 | " # and prediction algorithms for the underlying two-class classifier. \n", 298 | " \n", 299 | " \n", 300 | " # Pass a model and input data\n", 301 | " #\n", 302 | " # YOUR CODE HERE\n", 303 | " #\n", 304 | " # Use the input.setModel method to specify a pre-trained model. The input ID to use is\n", 305 | " # prediction_params.model.\n", 306 | " # Use the input.setTable method to specify test data. The input ID to use is prediction_params.data\n", 307 | " \n", 308 | "\n", 309 | " # Compute and return prediction results\n", 310 | " #\n", 311 | " # YOUR CODE HERE\n", 312 | " #\n", 313 | " # Call the `compute` method of the multi-class prediction algorithm. Store the return value into \n", 314 | " # variable `results`.\n", 315 | " \n", 316 | " return results.get(prediction_params.prediction)" 317 | ] 318 | }, 319 | { 320 | "cell_type": "markdown", 321 | "metadata": {}, 322 | "source": [ 323 | "Now the `MulticlassSVM` is fully implemented, we can apply it to the handwritten digits recognition problem.\n", 324 | "\n", 325 | "The code below creates a `MulticlassSVM` object, sets some parameters, and then continues to train a model using the training data and training labels we defined above. Next, the model is used to make predictions on the test data. We can time the training and prediction stages, respectively. We will compare the timings with that of the scikit-learn SVC solution later on. " 326 | ] 327 | }, 328 | { 329 | "cell_type": "code", 330 | "execution_count": 6, 331 | "metadata": { 332 | "collapsed": true 333 | }, 334 | "outputs": [ 335 | { 336 | "name": "stdout", 337 | "output_type": "stream", 338 | "text": [ 339 | "Wall time: 47.9 ms\n", 340 | "Wall time: 0 ns\n" 341 | ] 342 | } 343 | ], 344 | "source": [ 345 | "#from svm_multi_class import *\n", 346 | "\n", 347 | "nclasses = 10\n", 348 | "classifier = MulticlassSVM(nclasses)\n", 349 | "\n", 350 | "classifier.setSVMParams(\n", 351 | " cachesize = 32000000,\n", 352 | " kernel = linear.Batch_Float64DefaultDense(),\n", 353 | " shrinking = True)\n", 354 | "\n", 355 | "%time svm_model = classifier.train(training_data, training_labels)\n", 356 | "\n", 357 | "%time predictions = classifier.predict(svm_model, test_data)" 358 | ] 359 | }, 360 | { 361 | "cell_type": "markdown", 362 | "metadata": {}, 363 | "source": [ 364 | "### Quality metrics\n", 365 | "We can check the performance of the model by computing quality metrics. Some DAAL algorithms, including the multi-class classifier, allow users to compute and query quality metrics. There are many aspects in a multi-class classifier's quality metrics, but \"average accuracy\" is probably the most commonly used inidcator. For convenience, we define a class `ClassifierQualityMetrics` in file [quality_metrics.py](quality_metrics.py) for you to easily access classification quality metrics. " 366 | ] 367 | }, 368 | { 369 | "cell_type": "code", 370 | "execution_count": 7, 371 | "metadata": { 372 | "collapsed": true 373 | }, 374 | "outputs": [ 375 | { 376 | "name": "stdout", 377 | "output_type": "stream", 378 | "text": [ 379 | "Average accuracy: 99.40%\n" 380 | ] 381 | } 382 | ], 383 | "source": [ 384 | "from quality_metrics import *\n", 385 | "\n", 386 | "quality = ClassifierQualityMetrics(test_labels, predictions, nclasses)\n", 387 | "print('Average accuracy: {:.2f}%'.format(quality.get('accuracy')*100))" 388 | ] 389 | }, 390 | { 391 | "cell_type": "markdown", 392 | "metadata": {}, 393 | "source": [ 394 | "### Visualize predictions\n", 395 | "The code below shows the last 10 images we did predictions for, together with the predicted labels. Does our classifier do a good job in guessing the labels?" 396 | ] 397 | }, 398 | { 399 | "cell_type": "code", 400 | "execution_count": 8, 401 | "metadata": { 402 | "collapsed": true 403 | }, 404 | "outputs": [ 405 | { 406 | "data": { 407 | "image/png": "iVBORw0KGgoAAAANSUhEUgAABWwAAANfCAYAAABNN5QRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAWJQAAFiUBSVIk8AAAIABJREFUeJzs3X2wJXlZH/DvIwsssu7OCoosyA5oomiQAa1ECWEvoNGo\nJQPBGEXZQaVSKLrLixqNkVnfMGJkUEmJEXc2vhUQw6IWlqkAs6WiaJRBYgm+DhGj6AozLvKysvvL\nH32GuYx37s7M3ul+7pzPp6rr3jmnu5/f6Tnnuae/3adPjTECAAAAAMDyPmrpAQAAAAAAMBHYAgAA\nAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYEtgAAAAAATQhsAQAA\nAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMAWAAAAAKAJge0aqqpjVXVnVT3utNuvXd3++pnH\nc2dV3VFVD5mzLrAz9BRgJ+kpwE7SU4CdpKcwF4HteaiqG1cvitOnE1X15qr6/qp60NLj3MZYTRdU\nVV1TVS+oqidd6FqdrB7zVs+PzdPvLj1O+tBTzs669pStVNWDquq2Tc+Vx931UqwLPeXsrHNPqap7\nVdV1VfWrVfXuqrq9qm6tqluq6tlVdc+lx0gfesrZ0VP0FM6OnnJ21rmnnG5d930EtnfP7Un+cjW9\nK8l9k3xGkucneWtVPWbBsZ2PE0neluQdO7S+jSQvSHJXDebtq7p/v0N1u3h/Tj0/Tp/+esFx0Zee\nsr2NrHdP2eylST46M71hZNfSU7a3kTXsKVV1/yS/leTFST4nyRVJ3pvkyiSPTfJDSd5UVVcuNki6\n0lO2txE9RU/hXOgp29vIGvaUM1jLfR+B7d3zxjHGVavpgUkuS/L0JO/J9IfqVVV170VHeA7GGDeP\nMT5tjPGMmes+fIzx6WOMv5iz7gxesen5cfr0xKUHR0t6ys7UvVh7SpJkdZT9S5K8KUktPBx601N2\npu7F1lN+OMkjMh1Y/tokHz3G+NhMO8pfl2kH+pFJXrTYCOlKT9mZunoKTPSUnal7sfWUj7DO+z4C\n2x00xvjAGOOnk1yX6Yn0CUn2LzsqYLfSUzhdVd03047RbUmet/Bw2GX0FKrqXkmenOnslO8ZY9w4\nxvhg8uHnx8uSvDDT8+Mpy42U3UBPQU9hJ+kpnG7d930EthfGK5Pcufr9M0/eWKddhLqqnra6rs+t\nq9u/ZPNKquq+VfVtVfWbVXW8qt5fVX9QVS+pqgdvN4DVun99dZ2Pv6mq11XVF97FMnd5keyq+tSq\n+tGqentV/V1Vvaeqfnc1pkev5rm6qu7MdPp+khzY4vo0D9m0zm0vkl1VD6uql1XVH6+2wbtX2+1r\nqmrL53BVHVmt9+lVdWlVHayqt1XV+6rqXVX1s1X1ydttD2hET9FTTvruJA/KtC3+3wz1uDjpKevb\nU65Mcq/V70fPMM9vr37e5wLU5+Kkp+gpiZ7CztFT1rennG6t930uWXoAF6Mxxu1VdWuSj0ty+Vbz\nVNVLknxDkjsyXevkQ6fd//Akv5TkIZmOWH4oyQeTfNJqua+sqi8eY/z6Fuv+kUwfPxmZGt3tSa5J\nslFV15/v46qqb0jyg0nusVr3363W/+mr6RFJnrB6TH+Z6SMNl2X6eMyJTasaq3nOpuYXZ2rYl66W\nO5Hp2iWPTfIvknxZVT1pjPH+0xY9eW2TK5K8MdPHcD64Gu/9k3xZks+tqn86xvjTLeqe/ANxcIzx\nnWczVrhQ9BQ9ZbWORyV5dpK3Zroe3Ceez3pAT1nrnvJXmR7vpUkeleS1W8zzWaufbz6H9bLG9BQ9\nJXoKO0hPWeuesnkda7/v4wzbC6CqLs3UXJLk+BazfFaSr0/yH5Pcb4xx/yQfm+mFkKq6PNMfu09M\n8opML45LxxiXZ2owP53paObPrebdXPtpOdVcXrRa//2SPDDJf0vyA5vGdi6P6UuTvCTTc+aVST5t\njHH5at33S/KVWR09HWO8c4xxVZL/vFr89Gu5PmiM8ednUfNhSX42yb2TvCHJp6yuh/QxSf5dkg8k\neeJqXFuuIskNmZrM5ye57xjjsiSPS/LOTNv8hdsM4e5ezPpzV0fwPrA6ove/q+o7q+rj7+Z6WTN6\nip5SVZXkx1Zj+Loxxp13sQickZ6yvj1ljDGSvHxV+z+szq65dPV47lNVz0ry7zPtlH3Lua6f9aSn\n6CnRU9hBesr69pRNY7fvkyRjDNM5TkluzHRk4fVnuP/Zq/vvSPLkTbdfu+n279pm/d+9mu8nt5nn\ntav1PPe02/9odfvLz7Dc/9w0hseddt+1Wz2uTGdiv3O1zBnHtEWtF6zW9xN3Md/J8TzktNtfvrrv\nDzI12NOXe+bq/g8ledhp971hdd97kzx0i2Wfsrr/fUku2WZM33Eez48XrJa9I9PRuFszfWPjHav1\n3prkCUs/j019Jj3lrLfTWvaU1fLfuFrHyzfddvWZtr1pvSc95ay301r2lEwfS35lTr0vuTPTF7yc\nXOcvJ/nspZ/Hpj6TnnLW20lP0VNMZzHpKWe9ndayp6yWt+8zhjNsd1JN1xp5fpL/tLrpWJJf2GLW\nO5K8eJtVPT3T0Ygf3Gaen8l0tOHzNtXfl+Rhq39+3xmW+95t1nkmT0xyVaZxf/N5LH++npLVdhhj\nfGCL+388yZ9n2g5P3eL+keS/jy1O0U/y86v7753kH1x7ZYzxUWOMe4zzO33/D5M8P8k/ytQY75/p\noxz/NqeORL16pmu+sIvpKTtuV/aUqroqyXcleXfm3V5cZPSUHbcre8qYPvb4VZnOHBqr6fKcOhPm\nY3IeZw+xfvSUHaensNb0lB23K3uKfZ9TXMP27tnYdG2OzUamJ/7+McaHtrj/j8YY795qhTVd/PrB\nq3X8UlWNrebLqYu7b76Ox6NXP981xvjDMyz3xkxHUO5xhvu38tmrn28ZY/zFOSx33lan71+RaTsc\n2WqeMcaoqiNJnpZTj/10v3WGZT9UVX+V5OMzfRxix4wxfmaL296f5FVV9RtJfidTaHsw00cf4CQ9\n5QLZzT0l0zejXpbkWWOMv9nhdXNx01MukN3cU6rqHyf5xSR7MwUsNyZ5R6YzV746yXOT3FxVzxpj\n/NhO1mbX01MuED2FNaWnXCC7uafEvs+HCWzvntszpf7JqYtG/0mm0+RfPsY4cYbl/nqbdT5w0+93\ndSRy5CO/bfPk/Gf89rxx6gLeD7iLdW92ct7/ew7L3F2bH/t212d55xbzb3bbNsuePMp0z7Md1N01\nxvizqnppku9I8kVz1WXX0FMunF3ZU1ZfFPDkJL9pJ4fzoKdcOLu1p3xUktdkuobft48xNl977u1J\nvqWqbkvynUleVFU/t+47S3wEPeXC0VNYR3rKhbNbe4p9n00EtnfPG8cYTziP5bb7Rr/Nl6nYM8bY\n7gWyLi7N9o1it3nT6uflVXU/b1rYRE+Zx27qKS/NdK2mb6mq+5523+Z/32d1/x1n+MgT60lPmcdu\n6imfn+RTMvWVM33JyKFM4cplmT7G+cp5hsYuoKfMQ09hXegp89hNPcW+zyauYdvPuzb9fvU5Lnvy\nSNNVZ5qhqu6Z5P7nOaZzHc/dsfmo2UO2me/BW8wPnKKnTHZrT/nETH+r35Dpjdbm6f9smu+XVre9\ndu4Bsnb0lMlu7SkPX/28dYzxvq1mGGO8N6fGu3eOQbHW9JSJngI7Q0+Z7NaeYt9nE4FtM2OMYzn1\ngv5X57j476x+PmCbL7R6TM79zOrfWP38jKp64LZzfqST16Opc6yXMcafJDm++ufjt5qnqirJRqaP\nMvzOVvM09c9WP29zdi0Xmp4y2cU9ZdzFtNV8cMHoKZNd3FNOPub7VdW9t5qhqu6TUzuju+WMHHYp\nPWWip8DO0FMmu7in2PfZRGDb0+FML8rn39ULuqquOPn7GONokj9a/fNbzrDIt57HeF6X6bon98h0\nIfmz9bern3vOo2aS/I9M2+G6qrp0i/ufmeRBmV6krzrPGrNaXQT96zON+aI+GkQrh6OnJLuwp6y+\nXXXLKae+xTZJNla3P3GpsbJWDkdPSXZhT0nyltXPj0rytWeY55k5tXP4pjPMAzvpcPSURE+BnXI4\nekqyC3uKfZ+PJLDt6fsyXWz745L8elV96eYXWFXtrapnVdXRJE86bdmDmV6UX11V33eyAVXVx1fV\njZmOoPzduQxm9c2Mz1ut9yuq6hVV9SmbxnNlVT2zqk6/btHvrX4+dpsjVNv53tVYr0ry2tU3kKaq\n7lVVz8x0naSR5MfHGH96Hus/o6q6czV9xzku97iqem1VPbWq7r/p9vtU1b9J8qtJ7pfkfUlu2Mkx\nwzb0lMmu6ynnUuICrRe2oqdMdmNPuSXJ72faVt9fVc/d9H9wRVU9L8kLV+P+tTFGlzNuuLjpKRM9\nBXaGnjLZjT3lrEtcoPW24kvHGhpjnKiqf5nk5zNdF+gVSe6oquOZLrR8stn8g1PAxxg/U1Wfneks\nzm9O8ryq+tucOipzXaZmsd11TLYa0yur6qpMR4SemuRLq+q9ST60ad1HTlvsSJI/znQk5O01fZvi\nyWsb/fMxxhm/fXFV80+q6sszPf5rkrxt0za4Z6bH/r+SPOdcHss5OJ/T6yvJF6ymrLbRBzNto3us\n1nlrki8fY7x9h8YJ29JTPlxzN/YUaEdP+XDNXddTxhh3VtVTM30D91VJfiDJD9T0Le4fs2m9f5jk\nK3ZqoLAdPeXDNfUU2AF6yodr7rqewkdyhu35O9/rZZzVcqtrjjwqydcleX2Sdye5PMntmT568rIk\nX5Tkp7ZY9huTfGWma6Wc/Ma8NyT5ojHGj2waxzmNb4xxaDWmG5P8aabA/87VeF6c017oqyNJT0jy\nk0nemakRPSTThaRPP1hwppq/mOQRSf7rquZ9Mh0l+pVMp/B/wRjj/Wd4LGdjp5vIW5N8U5KbM70x\n+ftM/2/Hk/xakm9P8vAxxut2uC67n56ip5xvPW+G2IqeoqdsvcIxfj/JP0nyHzN9PPl4pnG/J8kb\nM72PefQY4507XZtdTU/RU7ZeoZ7C+dFT9JTzrbc2+z41xto8VgAAAACA1pxhCwAAAADQhMAWAAAA\nAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAAAABoQmALAAAA\nANDEJUsP4DRj6QHM4cCBA7PXvOmmm2avecUVV8xe89ixY7PX3LNnz+w1F1JLD+A8rEVPWcLx48dn\nr7l///7Zay7Rr5eouRA9pamDBw/OXvOGG26Yveab3/zm2Wvu27dv9ppL9OuF3hvpKU3dfPPNs9c8\ndOjQ7DU3NjZmr7lEv9ZTWluLnrKEo0ePzl5ziZ6yRL9e4nEu5Jx7ijNsAQAAAACaENgCAAAAADQh\nsAUAAAAAaEJgCwAAAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ\n2AIAAAAANCGwBQAAAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYAAAAAAE0I\nbAEAAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYE\ntgAAAAAATQhsAQAAAACaENgCAAAAADQhsAUAAAAAaOKSpQewjm666abZa15zzTWz17z++utnr7ln\nz57Za8I6OnDgwOw1jx49OnvNjY2N2WvC0m644YbZa1599dWz1zx27NjsNffv3z97zb17985e88iR\nI7PXpK/Dhw/PXvOWW26ZveYS+z779u2bveYSj3OJ9530tcTfmMc//vGz11wiw7Hv04szbAEAAAAA\nmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYEtgAAAAAA\nTQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAAAACA\nJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAAAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABA\nEwJbAAAAAIAmBLYAAAAAAE0IbAEAAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ESNMZYew2az\nD+bYsWNzl8xDH/rQ2Wted911s9fcv3//7DU3NjZmr7lGaukBnIdWDe5COXjw4Ow1b7jhhtlr3njj\njbPXXKKPHT9+fPaae/funb1m9JS21qWnLGGJ92OHDh2aveZC9JSmltjf2rdv3+w1T5w4MXvNa665\nZvaaN9988+w19+zZM3vN6CltLbHPf/To0dlrHjlyZPaaS+yHLPH6XuJvRM6jpzjDFgAAAACgCYEt\nAAAAAEATAlsAAAAAgCYEtgAAAAAATQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMAW\nAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAAAABoQmAL\nAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYAAAAAAE0IbAEAAAAAmhDYAgAAAAA0IbAF\nAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYEtgAAAAAATdQYY+kxbDb7YI4c\nOTJ3yTz+8Y+fvea6uPrqq2evucRzaO/evbPXTFJLFL2b9JQL5Lrrrpu95sGDB2evucRr7cCBA7PX\nPHTo0Ow1o6e0dfTo0dlrPupRj5q95hLvGZbYtnv27Jm95kL0FD5sib+lN9100+w1b7zxxtlrLrFt\nF6KnnIUl3kM+5znPmb3mq1/96tlrLrEfsrGxMXvN66+/fvaaS+xX5jx6ijNsAQAAAACaENgCAAAA\nADQhsAUAAAAAaEJgCwAAAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAA\nAJoQ2AIAAAAANCGwBQAAAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYAAAAA\nAE0IbAEAAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAA\ngCYEtgAAAAAATQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQxCVLD2Bp+/btm73mtdde\nO3vNAwcOzF5ziW175ZVXzl7z2LFjs9fcu3fv7DU5O9dff/3sNa+44orZay7x+l5i2544cWL2mkts\nW9hsidfaEt7xjnfMXvP48eOz19yzZ8/sNWGzJZ73N9988+w1l7DEtoXNDh8+vPQQZnHw4MHZay6R\nMyyx77NEVrVbOMMWAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAA\nNCGwBQAAAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYAAAAAAE0IbAEAAAAA\nmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYEtgAAAAAA\nTQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAAAACA\nJgS2AAAAAABNXLL0AJa2Z8+e2WsePnx49ppLOHbs2NJDmMXRo0dnr7mxsTF7Tc7OEj3lxIkTs9d8\nxjOeMXvNdbF///6lh8Cau+WWW2av+aQnPWn2mq95zWtmr7nEe6O9e/fOXhM2O3To0Ow1l3hvtIQj\nR47MXvP666+fvSZ9LbHvs4S3vOUtSw9hFo985CNnr+l9ypk5wxYAAAAAoAmBLQAAAABAEwJbAAAA\nAIAmBLYAAAAAAE0IbAEAAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAA\nAEATAlsAAAAAgCYEtgAAAAAATQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMAWAAAA\nAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAAAABoQmALAAAA\nANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYAAAAAAE1csvQA1tGhQ4dmr3n8+PHZax4+fHj2\nmkvY2NhYegg0cvPNN89ec4nX2r59+2aveeDAgdlr7tmzZy1q0tfRo0eXHsIsXvOa1yw9hFkcO3Zs\n6SHA7Naljy1h7969Sw+BNXfkyJG1qLlEnvLkJz959pr79++fvSZn5gxbAAAAAIAmBLYAAAAAAE0I\nbAEAAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYE\ntgAAAAAATQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMAWAAAAAKAJgS0AAAAAQBMC\nWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAAAABoQmALAAAAANCEwBYAAAAAoAmB\nLQAAAABAEwJbAAAAAIAmBLYAAAAAAE0IbAEAAAAAmhDYAgAAAAA0ccnSA1hHe/bsmb3m4cOHZ6+5\nsbExe80DBw7MXnPfvn2z16SvJV7f119//ew118USfQw2W+JvzLXXXjt7zZtuumn2mks8zv37989e\nE5a2xPP++PHjs9dcwhL7PrC0Jd6fr0tPse/TizNsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAA\nAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAA\nAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYAAAAAAE0IbAEAAAAAmhDYAgAA\nAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYEtgAAAAAATQhsAQAA\nAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQRI0xlh4DAAAAAABxhi0AAAAAQBsCWwAAAACAJgS2\nAAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAAAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABAEwJb\nAAAAAIAmBLYAAAAAAE0IbAEAAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEt\nAAAAAEATAlsAAAAAgCYEtgAAAAAATQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMAW\nAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAAAABoQmAL\nAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYAAAAAAE0IbAEAAAAAmhDYAgAAAAA0IbAF\nAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYEtgAAAAAATQhsAQAAAACaENgC\nAAAAADQhsAUAAAAAaEJgCwAAAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwB\nAAAAAJoQ2AIAAAAANCGwBQAAAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYA\nAAAAAE0IbAEAAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsA\nAAAAgCYEtgAAAAAATQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMAWAAAAAKAJgS0A\nAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAAAABoQmALAAAAANCEwBYA\nAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYAAAAAAE0IbAEAAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsA\nAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYEtgAAAAAATQhsAQAAAACaENgCAAAAADQhsAUA\nAAAAaEJgCwAAAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIA\nAAAANCGwBQAAAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYAAAAAAE0IbAEA\nAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYEtgAA\nAAAATQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAA\nAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAAAABoQmALAAAAANCEwBYAAAAAoAmBLQAA\nAABAEwJbAAAAAIAmBLYAAAAAAE0IbAEAAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAA\nAACgCYEtAAAAAEATAlsAAAAAgCYEtgAAAAAATQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAA\nAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAA\nAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYAAAAAAE0IbAEAAAAAmhDYAgAA\nAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYEtgAAAAAATQhsAQAA\nAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAA\nAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAAAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAA\nAIAmBLZQfu2iAAAf1UlEQVQAAAAAAE0IbAEAAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITA\nFgAAAACgCYEtAAAAAEATAlsAAAAAgCYEtgAAAAAATQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJg\nCwAAAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAANCGw\nBQAAAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYAAAAAAE0IbAEAAAAAmhDY\nAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYEtgAAAAAATQhs\nAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2\nAAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAAAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABAEwJb\nAAAAAIAmBLYAAAAAAE0IbAEAAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEt\nAAAAAEATAlsAAAAAgCYEtgAAAAAATQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMAW\nAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwBAAAAAJoQ2AIAAAAANCGwBQAAAABoQmAL\nAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYAAAAAAE0IbAEAAAAAmhDYAgAAAAA0IbAF\nAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsAAAAAgCYEtgAAAAAATQhsAQAAAACaENgC\nAAAAADQhsAUAAAAAaEJgCwAAAADQhMAWAAAAAKAJgS0AAAAAQBMCWwAAAACAJgS2AAAAAABNCGwB\nAAAAAJoQ2AIAAAAANCGwBQAAAABoQmALAAAAANCEwBYAAAAAoAmBLQAAAABAEwJbAAAAAIAmBLYA\nAAAAAE0IbAEAAAAAmhDYAgAAAAA0IbAFAAAAAGhCYAsAAAAA0ITAFgAAAACgCYEtAAAAAEATAlsA\nAAAAgCYEtgAAAAAATQhsAQAAAACaENgCAAAAADQhsAUAAAAAaEJgCwAAAADQhMB2DVXVsaq6s6oe\nd9rt165uf/3M47mzqu6oqofMWRfYGXoKsJP0FGAn6SnATtJTmIvA9jxU1Y2rF8Xp04mqenNVfX9V\nPWjpcW5jrKYLqqquqaoXVNWTLnStbqrqkqp6VlX9SlW9p6reV1V/UFUvrqpPWHp89KKnnJ117ilJ\nUlWfXFUvr6p3VNUHqurPq+pnq+rRS4+NXvSUs7POPaWq7lVV11XVr1bVu6vq9qq6tapuqapnV9U9\nlx4jfegpZ2fNe4p9H86annJ21ryneJ8Sge3ddXuSv1xN70py3ySfkeT5Sd5aVY9ZcGzn40SStyV5\nxw6tbyPJC5LcVYN5+6ru3+9Q3UVV1RVJbkny0iSPSXJpkvcn+aQk1yX5var6rOVGSGN6yvY2soY9\nJUmq6vOSHE3yjCQPTvK+JA9I8mVJfqOqnrbg8OhLT9neRtawp1TV/ZP8VpIXJ/mcJFckeW+SK5M8\nNskPJXlTVV252CDpSk/Z3kbWs6fY9+F86Snb28h69hTvU1YEtnfPG8cYV62mBya5LMnTk7wn05Pq\nVVV170VHeA7GGDePMT5tjPGMmes+fIzx6WOMv5iz7gV0Y6bGcluSr0hy2Rjjfkk+NdObmSuT/EJV\nXb7cEGlKT9mZuhdVT6mqByR5VZL7JPnlJFePMT42ySck+akklyT58ap6+HKjpCk9ZWfqXlQ9JckP\nJ3lEpkDla5N89Kqn3DfJ12XagX5kkhctNkK60lN2pu7F1lPs+3C+9JSdqXux9RTvU1YEtjtojPGB\nMcZPZzqSWJl2pvcvOyrmVFWPzPR/PpI8d4zxijHGHUkyxvjD1X3vTvLxSb5psYGyK+gprHxrksuT\n/FmSfz3GeGeSjDFuTXIgyW8nuVeS71xqgOwOegpVda8kT870PuV7xhg3jjE+mHz4+fGyJC/M9Px4\nynIjZTfQU7Dvw07SU/A+5SMJbC+MVya5c/X7Z568sU67CHVVPW11DY5bV7d/yeaVVNV9q+rbquo3\nq+p4Vb1/dS2gl1TVg7cbwGrdv15Vt1XV31TV66rqC+9imbu8SHZVfWpV/WhVvb2q/m51jaLfXY3p\n0at5rq6qOzOdvp8kB7a4Ps1DNq1z24tkV9XDquplVfXHq23w7tV2+5qq2vI5XFVHVut9elVdWlUH\nq+ptq+spvaum6z5+8nbb4zx9wernbUl+4vQ7xxgnMh2FriRfdQHqc3HSU9a0p1RVZbrswUjyX8YY\n79t8/xjjziQ/mKmnfHFVXbbTY+CipKesaU/JdKbbvVa/Hz3DPL+9+nmfC1Cfi5Oesr49xb4PF4Ke\nsr49xfuUTS5ZegAXozHG7VV1a5KPy3RW1D9QVS9J8v/bu/sYy+6yDuDPTxZoEZipLRAK7U6JAQGR\nKVGDBukUX9DQhMWXGBHoLrEhINABFTQKnaoRELVbYzQq2K0oim9dgYAQhdmoxVc6axQVUWalKO+d\n8l7Y9ucf54x7O9yZnZnd+Z3ncj+f5OTu3HvOeX737dlzvvfcc18QEXdGd66Tkxtuf2REvDUiLo5u\nR/1kRNwR3bmAXhARzyilXFFrfdeYdf9KdIeK1+ga3Rci4rKIWCilLO72fpVSXhBdMHCPft2f6df/\n6H56TEQ8qb9PH4ruKw33je5Q9ttHVlX7ebZT84roGvY5/XK3R8R9ojt3ybdExPeXUp5aa/3chkXX\nTwQ+ExE3R3fI/B39eC+ILgD5tlLKN9Za3z+m7vp/EEu11p0ctba/v/zPWutmJyL/t/7yolLKw2ut\n793B+plCespU95RHRXeu2hoRb99knvXr79Xfjz/bwfqZQnrKVPeUj0R3f8+JiEsj4i1j5lk/1+Qt\nO1gvU0xPmeqeYt+Hs05PmeqeYjtlhCNs90Ap5ZzomktExNqYWb4+In44Il4WEefXWi+IiK+K7o0Q\npTu/z1si4qKIeEN0b45zaq33j67B/G50nzz8cdlwLqDS/fDMenN5db/+8yPiwRHx2xHxCyNj28l9\n+r6IuD6618wfRMSjaq3379d9fkQ8I/pPOmqtt9ZaL4yIX+wXf8PIuWkurLU+pNb6wW3UfFhE/F5E\n3Dsi3hkRj+jPXXK/iHhORHw+Ir61H9fYVUTEtdE1mSdHxFfWWu8bEU+MiFuje8xfscUQdvPLj+vL\n3GOLeUY/KHn0LmowZfSUqe4pjxpZ9j1jV1rrx6PbuBmdHzalp0xvT+kDldf2tX+yP7rmnP7+nFtK\neW5E/Hh0O2Uv3en6mU56yvT2lLDvwx7QU6a3p9hO2aDWatrhFN3XOu6KiHdscvvz+9vvjIinjVx/\n5cj1P7PF+n+2n+91W8zzln49L95w/fv661+7yXJvHxnDEzfcduW4+xXdf7K39stsOqYxta7p1/db\np5lvfTwXb7j+tf1t742uwW5c7qr+9pMR8bANt72zv+3TEXHJmGW/u7/9sxGxb4sxvXyHr42X9Mt+\nKiLuuck814+s/3lDv55Nw096yrYfp2nsKS/sl/3oaeZ7d7/+Vw/9ejYNP+kp236cpq6n9MueG93O\n4p39eu6K7gde1tf5toh4/NCvY1OeSU/Z9uM0dT0l7PuYdjHpKdt+nKaup/TL2k7pJ0fYnkX9uUZ+\nNCJe1V+1GhFvGjPrnRFx3RarelZ0n0b80hbzvD66Tx2+faT+fEQ8rP/zlZss93NbrHMz3xoRF0Y3\n7pfsYvnd+u7oH4da6+fH3P6aiPhgdI/D9465vUbEH9Uxh+hHxBv72+8dEV9y7pVa61fUWu9Rd3b4\nfsSprybfJ7pP5u6mlPLA6H4kaP3TpvvtcP1MET3lrJvEnvKV/eXGrylttH5uW+ewZVN6ylk3iT0l\nave1x2dGd+RQ7af7x923TXZ89BDTR0856yaxp9j34azRU866SewptlNGOIftmVkYOTfHqBrdC/9A\nrfXkmNvfV2v9xLgVlu7k1w/t1/HWUkodN1+cOhHzRSPXPa6//HDtfpVznJuj+wRlq6+tbPT4/vJ4\nrfV/d7DcrvWH789E9zgsj5un1lpLKcsR8YNx6r5v9PebLHuylPKR6H6x9LwzHe/IeldKKW+OiCsi\n4pX98/f6iPhkdI/j9dE1tRKnzokD6/SUPTKpPQXOkJ6yRya5p5RSHh4Rb46Iueh2hm6IiBPRnYvy\n2RHx4og4Wkp5bq31N85mbSaenrJHJrWn2PfhDOkpe2RSe0qE7ZRRAtsz84WIWG8UNbqTRv9XdJ80\nvrZ2v4o5zke3WOeDR/59uk8Natz9l/HW5/+fTRc4dQLvB51m3aPW5/3vHSxzpkbv+1bnZ7l1zPyj\nPrXFsuufMt1zu4PapoPRfcXiGyLicD+tOxndV5x/tf973Dl5mF56yt6Z1J7ymf7ydL+Cep/+8tNn\nsTaTT0/ZOxPZU0r3a9B/Gt05/H6q1jp67rl/j4iXllI+FRE/HRGvLqX8ce3Okw0Respemsie0jsY\n9n3YHT1l70xkT7GdcncC2zNzc631SbtYbqtf9Bs9TcVsrXWrN8i0OCe2bhSp1Fo/UUp5QkQciojv\nie4rAicj4h+i++rGx0Zm3+yTO6aTntLGJPWU9Q3G80op96q1fmGT+S7sL5t8as/E0FPamKSe8uSI\neER0R7lt9iMjh6PbEbpvdF/j/IM2Q2MC6CltTFJPse/DmdBT2piknmI7ZYRz2Obz4ZF/79/hsuuf\nNF242QyllHtGxAW7HNNOx3MmRj81u3iL+R46Zv7B1VpP1lp/s9b6nbXWr661fk2t9Rm11n+MU183\nWN+Qgb2kp3Qmtae8p78sEfGocTOUUs6P7utIo/PDXtFTOpPaUx7ZX36s1vrZcTPUWj8dp8Y712JQ\nTDU9pTOpPSUi7PuQip7SmdSeYjtlhMA2mVrrapx6Q3/XDhd/d3/5oFLKl5z4uffNsfMjq/+mv/y6\nUsqDt5zz7tbPR1N2WC9qrf8Vp74yc/m4eUopJSIWovsqw7vHzZPU0/vLN/XNBvaMntKZ4J7yr3Hq\n+fv2Teb5jv7yCxHxV3s+IqaantKZ4J6yfp/PL6Xce9wMpZRz49TO6KQckcOE0lM6E9xTtsO+D83o\nKZ0J7im2U0YIbHM6Et2b8kdP94Yupcys/7vWuhIR7+v/fOkmi/zELsbzF9Gd9+Qe0Z30ebs+2V/O\n7qJmRMSfRPc4XF1KOWfM7VdFxEOiazB/uMsaTZVSnhzdrzXeGRE/P/BwmB5HQk+JmMCeUmutEfH7\n0Y37ef0Gyv/rN7QW+z/faEeIRo6EnhIxgT0lIo73l18RET+0yTxXxamdw7/d8xGBnrJuEnvKluz7\nMJAjoadETGZPsZ0yQmCb0yujO9n2AyLiXaWU7xt9g5VS5kopzy2lrETEUzcsuxTdi/fZpZRXrjeg\nUsoDSyk3RPcJymdiB/pfZvyRfr1PL6W8oZTyiJHxnFdKuaqUsvEcI//SXz5hi0+otvJz/VgvjIi3\n9L8WGKWUe5VSrorunCY1Il5Ta33/Lta/qVLKXf308l0s+72llOf0v1C5ft0DSikvja5pRkS8utb6\nd2drvHAaekpnIntKdM/fJ6P7OtNNpZSL+nVeEBE3RvcjH3dE91xBC3pKZxJ7yrHojtwvEfHzpZQX\njzwHM6WUH4mIV/Tj/utaa5Yjbvjypqd0JrGn2PchIz2lM4k9xXbKqFqraYdTRNwQ3aHa79jhcldu\nd7mIeFhE/HN0n0beFRFfjO48HZ/t/76rv+2ZY5b95Q3Lfbz/+86IeH5EvL//9xN3Mr7ojuL64si6\nPxndrzreNW656L4q8B8jtT/c135/RFw4Mt/6fbl4TM0romsy6zU/EV0wsb7M2yLi3DHLvbO//Vlb\nPMZjH4cNY3r5Ll4f14w8Jp+PiNtG/j4ZEa8a+jVsyjXpKXrKNp6/b4vuKz/r475t5N93RMQPDP06\nNuWZ9BQ95TTP3SMj4gMjY74rIm7f8Lz9W0Q8dOjXsinHpKfoKad57uz7mHY06Sl6ymmeO9sp/eQI\n292r/bQny9XunCOXRsTzIuId0b2x7h/dOQqPR8SvR8RTIuJ3xiz7woh4RnTnSvl8f/U7I+IptdZf\nGRnHjsZXaz3cj+mG6N6c+6J7wxyP7hdAX7Rh/pMR8aSIeF1E3BrdofwXR8RF8aXnfdms5psj4jER\n8Zt9zXOjazh/Gd2h8N9Za/3cJvdlO3bzHJ7OGyPi1yLin6Ib672jG/sNEfFNtdbNvl7BdNNT9JTN\nV1rrn0fEfHSP1Qei+7XXD0V3uoTH11p/by/qMtH0FD1l/Apr/deI+NqIeFl0XyVci27ct0XEzRHx\nYxHxuFrrrWe7NhNNT9FTNmPfh93QU/SU8Su0nfL/Sp9gAwAAAAAwMEfYAgAAAAAkIbAFAAAAAEhC\nYAsAAAAAkITAFgAAAAAgCYEtAAAAAEASAlsAAAAAgCQEtgAAAAAASQhsAQAAAACSENgCAAAAACSx\nb+gBbFCHHkALy8vLzWseOXKkec2VlZXmNRcXF5vXPHjwYPOaAylDD2AXpqKnDPH+PnToUPOaj33s\nY5vXnJuba15ziOdzdna2ec3QU9JaW1trXnOI/0uH2B5bXV1tXnOg9/cQ9JSkhnjdLy0tNa85Pz/f\nvOYQ+z5TRE9Jalq2GYa4n0P0FNspm3OELQAAAABAEgJbAAAAAIAkBLYAAAAAAEkIbAEAAAAAkhDY\nAgAAAAAkIbAFAAAAAEhCYAsAAAAAkITAFgAAAAAgCYEtAAAAAEASAlsAAAAAgCQEtgAAAAAASQhs\nAQAAAACSENgCAAAAACQhsAUAAAAASEJgCwAAAACQhMAWAAAAACAJgS0AAAAAQBICWwAAAACAJAS2\nAAAAAABJCGwBAAAAAJIQ2AIAAAAAJCGwBQAAAABIQmALAAAAAJCEwBYAAAAAIAmBLQAAAABAEgJb\nAAAAAIAkBLYAAAAAAEmUWuvQYxjVfDBra2utS8Z5553XvOb+/fub15ybm2te89ixY81r3nLLLc1r\nzs/PN68ZEWWIomcoVYPbK6W0f2pmZmaa1xzidT9ET7ntttua15ydnW1eM/SUtIZ4rx0/frx5zWuu\nuaZ5zYMHDzavOcT7W0/ZtqnoKUPsE5w4caJ5zSEMsT22urravKaesm3Ne8oQr4dLLrmkec0h8pSl\npaXmNQ8cONC85kDv7yHsuKc4whYAAAAAIAmBLQAAAABAEgJbAAAAAIAkBLYAAAAAAEkIbAEAAAAA\nkhDYAgAAAAAkIbAFAAAAAEhCYAsAAAAAkITAFgAAAAAgCYEtAAAAAEASAlsAAAAAgCQEtgAAAAAA\nSQhsAQAAAACSENgCAAAAACQhsAUAAAAASEJgCwAAAACQhMAWAAAAACAJgS0AAAAAQBICWwAAAACA\nJAS2AAAAAABJCGwBAAAAAJIQ2AIAAAAAJCGwBQAAAABIQmALAAAAAJCEwBYAAAAAIAmBLQAAAABA\nEgJbAAAAAIAkBLYAAAAAAEnsG3oAQ1tZWRl6CE0sLS01rzk/P9+85qWXXtq85traWvOa5KWn7J3F\nxcXmNYfoY0ePHm1e8+DBg81rktfx48eb15yZmWlec3V1tXnNSy65pHnNm266qXnNAwcONK/J9iwv\nLzeveeLEieY1r7vuuuY1FxYWmtccYt/nyJEjzWsOsQ3I9szNzTWvOcQ2wxD7/ENspwzxfMpTNucI\nWwAAAACAJAS2AAAAAABJCGwBAAAAAJIQ2AIAAAAAJCGwBQAAAABIQmALAAAAAJCEwBYAAAAAIAmB\nLQAAAABAEgJbAAAAAIAkBLYAAAAAAEkIbAEAAAAAkhDYAgAAAAAkIbAFAAAAAEhCYAsAAAAAkITA\nFgAAAAAgCYEtAAAAAEASAlsAAAAAgCQEtgAAAAAASQhsAQAAAACSENgCAAAAACQhsAUAAAAASEJg\nCwAAAACQhMAWAAAAACAJgS0AAAAAQBICWwAAAACAJAS2AAAAAABJCGwBAAAAAJIQ2AIAAAAAJLFv\n6AEMbWFhoXnNyy67rHnNQ4cONa85LdbW1oYeAolMy+thcXFx6CE0MT8/37zm6upq85ow6pprrmle\n89prr21e88Ybb2xe87rrrmte88CBA81rkte0bKesrKwMPYQvW0NsG8GoI0eONK/5tKc9rXnNIbaN\nrrzyyuY12ZwjbAEAAAAAkhDYAgAAAAAkIbAFAAAAAEhCYAsAAAAAkITAFgAAAAAgCYEtAAAAAEAS\nAlsAAAAAgCQEtgAAAAAASQhsAQAAAACSENgCAAAAACQhsAUAAAAASEJgCwAAAACQhMAWAAAAACAJ\ngS0AAAAAQBICWwAAAACAJAS2AAAAAABJCGwBAAAAAJIQ2AIAAAAAJCGwBQAAAABIQmALAAAAAJCE\nwBYAAAAAIAmBLQAAAABAEgJbAAAAAIAkBLYAAAAAAEkIbAEAAAAAkhDYAgAAAAAkIbAFAAAAAEhC\nYAsAAAAAkMS+oQcwjZaXl6ei5hAuv/zy5jVXVlaa1zxw4EDzmmzPtLzXpsUQz+fS0lLzmjBqbW1t\n6CF82Zqfnx96CEy5IbYhb7rppuY1FxcXm9e0DQhtHD58uHnNmZmZ5jWHsLq6OvQQGOEIWwAAAACA\nJAS2AAAAAABJCGwBAAAAAJIQ2AIAAAAAJCGwBQAAAABIQmALAAAAAJCEwBYAAAAAIAmBLQAAAABA\nEgJbAAAAAIAkBLYAAAAAAEkIbAEAAAAAkhDYAgAAAAAkIbAFAAAAAEhCYAsAAAAAkITAFgAAAAAg\nCYEtAAAAAEASAlsAAAAAgCQEtgAAAAAASQhsAQAAAACSENgCAAAAACQhsAUAAAAASEJgCwAAAACQ\nhMAWAAAAACAJgS0AAAAAQBICWwAAAACAJAS2AAAAAABJCGwBAAAAAJIQ2AIAAAAAJLFv6AHQxsLC\nwtBDaGJmZqZ5zfn5+eY1yWtaXg+HDx9uXnN1dbV5zRMnTjSvOTs727wmjLr++uub19y/f3/zmkO8\nvw8cONC85traWvOaMGqI1/0QNYdQSmlec25urnlN8lpeXm5e89ixY81r3nDDDc1rDvFeu/zyy5vX\nPHLkSPOaBw8ebF5zNxxhCwAAAACQhMAWAAAAACAJgS0AAAAAQBICWwAAAACAJAS2AAAAAABJCGwB\nAAAAAJIQ2AIAAAAAJCGwBQAAAABIQmALAAAAAJCEwBYAAAAAIAmBLQAAAABAEgJbAAAAAIAkBLYA\nAAAAAEkIbAEAAAAAkhDYAgAAAAAkIbAFAAAAAEhCYAsAAAAAkITAFgAAAAAgCYEtAAAAAEASAlsA\nAAAAgCQEtgAAAAAASQhsAQAAAACSENgCAAAAACQhsAUAAAAASEJgCwAAAACQhMAWAAAAACAJgS0A\nAAAAQBICWwAAAACAJPYNPYBpdPTo0eY1V1dXm9ccwu2339685uzsbPOa5LWwsNC85szMTPOaL3rR\ni5rXnBZDvIZg1BA9ZW1trXnNIe7nENspMLTl5eXmNVdWVprXhGk0xPt7CEPcz7m5ueY1hzAtWdVu\nOMIWAAAAACAJgS0AAAAAQBICWwAAAACAJAS2AAAAAABJCGwBAAAAAJIQ2AIAAAAAJCGwBQAAAABI\nQmALAAAAAJCEwBYAAAAAIAmBLQAAAABAEgJbAAAAAIAkBLYAAAAAAEkIbAEAAAAAkhDYAgAAAAAk\nIbAFAAAAAEhCYAsAAAAAkITAFgAAAAAgCYEtAAAAAEASAlsAAAAAgCQEtgAAAAAASQhsAQAAAACS\nENgCAAAAACQhsAUAAAAASEJgCwAAAACQhMAWAAAAACAJgS0AAAAAQBICWwAAAACAJAS2AAAAAABJ\n7Bt6ANNofn6+ec3FxcXmNdfW1prXvPrqq5vXXFhYaF6TvGZnZ5vXPHz4cPOahw4dal5z//79zWsu\nLS01rznEawhGraysNK85RB9bXl5uXnOI7TEY2hD7BEePHm1e89ixY81rXnbZZc1rzs3NNa9JXtPy\n/9oQ2wxD1Byip0zLa2g3HGELAAAAAJCEwBYAAAAAIAmBLQAAAABAEgJbAAAAAIAkBLYAAAAAAEkI\nbAEAAAAAkhDYAgAAAAAkIbAFAAAAAEhCYAsAAAAAkITAFgAAAAAgCYEtAAAAAEASAlsAAAAAgCQE\ntgAAAAAASQhsAQAAAACSENgCAAAAACQhsAUAAAAASEJgCwAAAACQhMAWAAAAACAJgS0AAAAAQBIC\nWwAAAACAJAS2AAAAAABJCGwBAAAAAJIQ2AIAAAAAJCGwBQAAAABIQmALAAAAAJCEwBYAAAAAIAmB\nLQAAAABAEgJbAAAAAIAkSq116DEAAAAAABCOsAUAAAAASENgCwAAAACQhMAWAAAAACAJgS0AAAAA\nQBICWwAAAACAJAS2AAAAAABJCGwBAAAAAJIQ2AIAAAAAJCGwBQAAAABIQmALAAAAAJCEwBYAAAAA\nIAmBLQAAAABAEgJbAAAAAIAkBLYAAAAAAEkIbAEAAAAAkhDYAgAAAAAkIbAFAAAAAEhCYAsAAAAA\nkITAFgAAAAAgCYEtAAAAAEASAlsAAAAAgCQEtgAAAAAASQhsAQAAAACSENgCAAAAACQhsAUAAAAA\nSEJgCwAAAACQhMAWAAAAACAJgS0AAAAAQBICWwAAAACAJAS2AAAAAABJCGwBAAAAAJIQ2AIAAAAA\nJCGwBQAAAABIQmALAAAAAJCEwBYAAAAAIAmBLQAAAABAEgJbAAAAAIAkBLYAAAAAAEkIbAEAAAAA\nkhDYAgAAAAAk8X+3g9p/iJXrOwAAAABJRU5ErkJggg==\n", 408 | "text/plain": [ 409 | "" 410 | ] 411 | }, 412 | "metadata": { 413 | "image/png": { 414 | "height": 431, 415 | "width": 694 416 | } 417 | }, 418 | "output_type": "display_data" 419 | } 420 | ], 421 | "source": [ 422 | "predicted = getArrayFromNT(predictions)\n", 423 | "images_and_labels = list(zip(digits.images[-10:], predicted[-10:]))\n", 424 | "for index, (image, label) in enumerate(images_and_labels):\n", 425 | " plt.subplot(2, 5, index + 1)\n", 426 | " plt.axis('off')\n", 427 | " plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')\n", 428 | " plt.title('Prediction: %i' % label)" 429 | ] 430 | }, 431 | { 432 | "cell_type": "markdown", 433 | "metadata": {}, 434 | "source": [ 435 | "### Compare with scikit-learn SVC\n", 436 | "Finally, as a comparison, see a solution below that uses Support Vector Classifier from scikit-learn [`sklearn.svm.svc`](http://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC.html). We get the timings for both training and prediction. We also calculates the average accuracy of predictions. How does the pyDAAL solution compare against this solution?" 437 | ] 438 | }, 439 | { 440 | "cell_type": "code", 441 | "execution_count": 9, 442 | "metadata": { 443 | "collapsed": true 444 | }, 445 | "outputs": [ 446 | { 447 | "name": "stdout", 448 | "output_type": "stream", 449 | "text": [ 450 | "Wall time: 62.5 ms\n", 451 | "Wall time: 0 ns\n", 452 | "Average accuracy: 98.00%\n" 453 | ] 454 | } 455 | ], 456 | "source": [ 457 | "from sklearn import svm, metrics\n", 458 | "\n", 459 | "sklearn_classifier = svm.SVC(kernel='linear')\n", 460 | "\n", 461 | "%time sklearn_classifier.fit(digits.data[:-100], digits.target[:-100])\n", 462 | "\n", 463 | "%time sklearn_predictions = sklearn_classifier.predict(digits.data[-100:])\n", 464 | "\n", 465 | "sklearn_quality = metrics.accuracy_score(digits.target[-100:], sklearn_predictions)\n", 466 | "print('Average accuracy: {:.2f}%'.format(sklearn_quality*100))" 467 | ] 468 | }, 469 | { 470 | "cell_type": "markdown", 471 | "metadata": {}, 472 | "source": [ 473 | "### Summary\n", 474 | "In this lab we learned Support Vector Machine, a powerful classification algorithm. We saw how to use SVM in conjunction with a multi-class classifier to recognize handwritten digits. We also compared the execution time and the prediction quality between a pyDAAL solution and a scikit-learn solution." 475 | ] 476 | } 477 | ], 478 | "metadata": { 479 | "kernelspec": { 480 | "display_name": "Python 3", 481 | "language": "python", 482 | "name": "python3" 483 | }, 484 | "language_info": { 485 | "codemirror_mode": { 486 | "name": "ipython", 487 | "version": 3 488 | }, 489 | "file_extension": ".py", 490 | "mimetype": "text/x-python", 491 | "name": "python", 492 | "nbconvert_exporter": "python", 493 | "pygments_lexer": "ipython3", 494 | "version": "3.5.2" 495 | } 496 | }, 497 | "nbformat": 4, 498 | "nbformat_minor": 0 499 | } 500 | --------------------------------------------------------------------------------