├── AdEMAMix_Concepts.ipynb ├── AdaptiveLoss.ipynb ├── Augly_Try.ipynb ├── Conjugate_prior ├── Conjugate_Prior.ipynb ├── beta_post_PyMC3.gif ├── trace_bprior0.png ├── trace_bprior1.png ├── trace_bprior2.png ├── trace_bprior3.png └── trace_bprior4.png ├── Consumer_Complaints_short.csv ├── DBSCAN_Complete ├── DBSCAN_Medium.ipynb ├── images │ ├── Canada_WS.png │ ├── Dist_of_Mean_Temp.png │ ├── Hex_Tm_Tn.png │ ├── Violin_Plots_Min_Max_Temp.png │ ├── dbscan_density_connected.png │ ├── dbscan_points.png │ ├── etopo_cluster.png │ └── etopo_cluster_preci.png └── weather-stations.csv ├── DecsTree ├── Images │ ├── Bank_DecsT_new.png │ ├── Bank_DecsT_new_some_parts.png │ ├── Decs_tree1.png │ ├── Decs_tree2.png │ ├── Decs_tree3.png │ ├── Decs_tree5.png │ ├── Feature_Importance.png │ └── decs_tree8.png └── notebooks │ ├── Bank_Data_Analysis.ipynb │ ├── Learn_Gini_Imp.ipynb │ └── bank.csv ├── ExMax_ALgo ├── LVM.ipynb └── samples.npz ├── FocalLoss_Ex.ipynb ├── IBM_Python_Web_Scrapping.ipynb ├── KAN-Intro ├── KAN-Detailed.png ├── KAN-MLP.png └── KAN_Intro.ipynb ├── LICENSE.md ├── LassoandLin.py ├── NB_LogisticReg.ipynb ├── PCA_Muller.py ├── README.md ├── RidgeandLin.py ├── SVM_Decision_Boundary ├── .ipynb_checkpoints │ └── Decision_Boundary_SVM-checkpoint.ipynb ├── Cancer_labels_PCAs.png ├── ClassifyMalignant_Benign2D_Decs_Funct.png ├── ClassifyMalignant_Benign2D_Decs_FunctG0d01.png ├── ClassifyMalignant_Benign2D_Decs_FunctG10.png ├── Decision_Boundary_SVM.ipynb ├── Pairplots_Area_Texture.png └── cancer_jointplots1.png ├── SVMdemo.py ├── SklearnV1d2 ├── Scikit_Pandas_Output.ipynb └── old_Sklearn.py ├── Text-classification_Complain_Suvo.py ├── VotingClassifier ├── EnsembleClassifier.ipynb ├── EnsembleClassifier.py └── heart.csv ├── bank.csv ├── datacleaning ├── data_cleaning.py ├── hist_plot_numpy_scipy1.png ├── hist_plot_numpy_scipy2.png ├── movie_metadata.csv ├── outliers.odp ├── outliers.png ├── outliers_face.png ├── outliers_face_Zscore.png ├── standard_dev_Zscore.py └── zscore_std3.png ├── fakeCover3_Web_Scrap.png ├── gender_purchase.csv ├── lagmult.py ├── pima_diabetes.ipynb ├── pipelineWine.py ├── terrorism_kaggle.ipynb ├── why_relu.py └── winequality-red.csv /Conjugate_prior/beta_post_PyMC3.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/Conjugate_prior/beta_post_PyMC3.gif -------------------------------------------------------------------------------- /Conjugate_prior/trace_bprior0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/Conjugate_prior/trace_bprior0.png -------------------------------------------------------------------------------- /Conjugate_prior/trace_bprior1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/Conjugate_prior/trace_bprior1.png -------------------------------------------------------------------------------- /Conjugate_prior/trace_bprior2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/Conjugate_prior/trace_bprior2.png -------------------------------------------------------------------------------- /Conjugate_prior/trace_bprior3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/Conjugate_prior/trace_bprior3.png -------------------------------------------------------------------------------- /Conjugate_prior/trace_bprior4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/Conjugate_prior/trace_bprior4.png -------------------------------------------------------------------------------- /DBSCAN_Complete/images/Canada_WS.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DBSCAN_Complete/images/Canada_WS.png -------------------------------------------------------------------------------- /DBSCAN_Complete/images/Dist_of_Mean_Temp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DBSCAN_Complete/images/Dist_of_Mean_Temp.png -------------------------------------------------------------------------------- /DBSCAN_Complete/images/Hex_Tm_Tn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DBSCAN_Complete/images/Hex_Tm_Tn.png -------------------------------------------------------------------------------- /DBSCAN_Complete/images/Violin_Plots_Min_Max_Temp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DBSCAN_Complete/images/Violin_Plots_Min_Max_Temp.png -------------------------------------------------------------------------------- /DBSCAN_Complete/images/dbscan_density_connected.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DBSCAN_Complete/images/dbscan_density_connected.png -------------------------------------------------------------------------------- /DBSCAN_Complete/images/dbscan_points.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DBSCAN_Complete/images/dbscan_points.png -------------------------------------------------------------------------------- /DBSCAN_Complete/images/etopo_cluster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DBSCAN_Complete/images/etopo_cluster.png -------------------------------------------------------------------------------- /DBSCAN_Complete/images/etopo_cluster_preci.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DBSCAN_Complete/images/etopo_cluster_preci.png -------------------------------------------------------------------------------- /DecsTree/Images/Bank_DecsT_new.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DecsTree/Images/Bank_DecsT_new.png -------------------------------------------------------------------------------- /DecsTree/Images/Bank_DecsT_new_some_parts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DecsTree/Images/Bank_DecsT_new_some_parts.png -------------------------------------------------------------------------------- /DecsTree/Images/Decs_tree1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DecsTree/Images/Decs_tree1.png -------------------------------------------------------------------------------- /DecsTree/Images/Decs_tree2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DecsTree/Images/Decs_tree2.png -------------------------------------------------------------------------------- /DecsTree/Images/Decs_tree3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DecsTree/Images/Decs_tree3.png -------------------------------------------------------------------------------- /DecsTree/Images/Decs_tree5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DecsTree/Images/Decs_tree5.png -------------------------------------------------------------------------------- /DecsTree/Images/Feature_Importance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DecsTree/Images/Feature_Importance.png -------------------------------------------------------------------------------- /DecsTree/Images/decs_tree8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/DecsTree/Images/decs_tree8.png -------------------------------------------------------------------------------- /ExMax_ALgo/samples.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/ExMax_ALgo/samples.npz -------------------------------------------------------------------------------- /KAN-Intro/KAN-Detailed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/KAN-Intro/KAN-Detailed.png -------------------------------------------------------------------------------- /KAN-Intro/KAN-MLP.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/KAN-Intro/KAN-MLP.png -------------------------------------------------------------------------------- /KAN-Intro/KAN_Intro.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "provenance": [], 7 | "gpuType": "T4" 8 | }, 9 | "kernelspec": { 10 | "name": "python3", 11 | "display_name": "Python 3" 12 | }, 13 | "language_info": { 14 | "name": "python" 15 | }, 16 | "accelerator": "GPU" 17 | }, 18 | "cells": [ 19 | { 20 | "cell_type": "markdown", 21 | "source": [ 22 | "Checking out this potentially monumental (in my opinion) paper [KAN](https://arxiv.org/html/2404.19756v1). Few Notes and additions for future references :)\n", 23 | "\n", 24 | "## KA Representation Theorem:\n", 25 | "\n", 26 | "Was introduced to work on Hilbert's 13th problem: It is necessary to prove whether a solution exists for all $7$th-degree equations using algebraic functions of two arguments.\n", 27 | "\n", 28 | "Say we have a $7$th order polynomial as below:\n", 29 | "\n", 30 | "$$x^7 + ax^3 + bx^2 + cx + 1 = 0\\, \\ldots (1)$$\n", 31 | "\n", 32 | "Hilbert asked whether its solution, $x$, considered as a function of the three variables $a, b, c,$ can be expressed as the composition $(h = g \\circ f,\\rightarrow h(x) = g(f(x)))$ of a finite number of two-variable functions.\n", 33 | "\n", 34 | "The KA representation theorem states that for any continuous function $f\\, :\\, [0, 1]^d \\rightarrow \\mathbb{R}$, there exists univariate continuous functions $g_q, \\, \\psi _{p, q}$ such that\n", 35 | "\n", 36 | "$$f(x_1, x_2, \\ldots \\, x_d) = f(x) = \\sum \\limits_{q=0}^{2d}\\, g_q \\left(\\sum \\limits _{p=1}^d\\, \\psi _{p, q}(x_p) \\right)\\, \\ldots (2)$$\n", 37 | "\n", 38 | "This means that the $(2d + 1)(d + 1)$ univariate functions $g_q$\n", 39 | "and $ψ_{p,q}$ are enough for an exact representation of a $d$-variate\n", 40 | "function. _The underlying point here is that the only truly multivariate function is addition since every other function can be written using univariate functions and sum_.\n", 41 | "\n", 42 | "We bear in mind that $\\psi _{p, q},\\, g_q$ both are univariate functions. So, any continuous function of several variables can be expressed as the composition of univariate functions. Wow! I didn't know that. 😯 😯\n", 43 | "\n", 44 | "--------------------------------------------------------------------------------\n", 45 | "\n", 46 | "### Thinking About KA Theorem 🤔 🤔\n", 47 | "\n", 48 | "The authors in the paper state that these 1D functions (defined before) can be non-smooth and even fractal, so they may not be learnable in practice. To build KANs the authors go beyond the definition of the KA representation theorem; The original representation theorem can be thought of as depth 2 with each layer having $(2d+1)$ terms. We will comeback to this soon.\n", 49 | "\n", 50 | "The MLPs (conventional neural networks in general) are based on the universal approximation theorem which states that any continuous function $f : [0, 1]^d → [0, 1]$ can be approximated arbitrarily well by a neural network (weights, biases and non-linear activation function) with at least $1$ hidden layer with a finite number of weights. During backpropagation, network learns to optimize the weights and biases to act as a function approximator while the activation functions remain fixed.\n", 51 | "\n", 52 | "Now, can we build a neural-net architecture based on the KA representation theorem discussed above? \n", 53 | "\n", 54 | "Let's take a look of the shallow architecture of KAN from the paper itself\n", 55 | "\n", 56 | "![KAN-MLP](https://raw.githubusercontent.com/suvoooo/Machine_Learning/master/KAN-Intro/KAN-MLP.png)\n", 57 | "\n", 58 | "If we think about a supervised learning problem where given $\\{x_i, y_i\\}$ pair we want to find a function such that $y_i \\approx f(x_i)$, then KA representation theorem tells me is that all I need to find are the univariate functions in Eq. 2 $(g_q, \\, \\psi _{p, q})$.\n", 59 | "\n", 60 | "The authors here argue that as we need to learn only univariate functions, we can parametrize each 1D function as a B-spline curve (check below), with learnable coefficients of local B-spline basis functions. This leads to the prototype KAN and illustrated in the figure above (Model(shallow) (b)), with input dimensions $n=2$ appearing as a two-layer neural network with activation functions placed on edges instead of nodes (simple summation is performed on nodes), and with width $2n+1$ in the middle layer. The construction of the network i.e. number of activations, nodes, etc will be clear soon.\n", 61 | "\n", 62 | "--------------------------------------------------------------------------------\n", 63 | "\n", 64 | "**B-splines:** We can think that a B-spline function is a combination of flexible bands that is controlled by a number of points (control points), creating smooth curves. A bit more mathematical definition would be a B-spline of order $p+1$ is a collection of piecewise polynomial functions $B_{i, p}$ of degree $p$ in a variable $t$. The values of $t$ where the pieces of polynomial meet are known as knots. \n", 65 | "\n", 66 | "B-splines are built from piecewise polynomials (basis functions) and the order of a B-spline is one more than the degree of its basis polynomials. For example, a quadratic B-spline has polynomials of degree 2 and is of order 3.\n", 67 | "\n", 68 | "\n", 69 | "--------------------------------------------------------------------------------\n", 70 | "\n", 71 | "## Constructing KAN Layer: 🚧 🏗 👷\n", 72 | "\n", 73 | "It was already mentioned that the 2 layer network representing the original KA representation theorem is too simple to approximate any function arbitrarily well. How to make the KAN wider and deeper?\n", 74 | "\n", 75 | "Here the authors present an excellent analogy between KAN and MLPs to go deeper. First we need to see what is a KAN layer and how to stack them on top of each other to build a deep neural net.\n", 76 | "\n", 77 | "First of all one can express the KA representation in matrix form as\n", 78 | "\n", 79 | "$$f(x) = \\psi _{\\text{out}} \\circ \\psi _{\\text{in}} \\circ x\\, \\ldots \\, (3)$$\n", 80 | "\n", 81 | "A KAN layer with $n_{\\text{in}}$-dimensional inputs and $n_{\\text{out}}$dimensional outputs can be defined as a matrix of 1D functions\n", 82 | "\n", 83 | "$$\\Psi = \\{\\psi _{q, p}\\}; \\, p=1, 2, \\ldots n_{\\text{in}}, \\, q = 1, 2, \\ldots n_{\\text{out}}\\, \\ldots \\, (4)$$\n", 84 | "\n", 85 | "In the Kolmogorov-Arnold theorem (Eq. 2), the inner functions form a KAN layer with $n_{\\text{in}} = n$ and $n_{\\text{out}} = 2n+ 1$, and the outer functions\n", 86 | "form a KAN layer with $n_{\\text{in}} = 2n + 1$ and $n_{\\text{out}} = 1$. At this stage we can clearly see that the KA representation can be thought of as a composition of two KAN layers. Let's try to get accustomed to the notations when we stack more KAN layers.\n", 87 | "\n", 88 | "\n", 89 | "Let's see via an example that the authors presented in the paper, below is the network graph:\n", 90 | "\n", 91 | "![KAN-graph](https://raw.githubusercontent.com/suvoooo/Machine_Learning/master/KAN-Intro/KAN-Detailed.png)\n", 92 | "\n", 93 | "\n", 94 | "\n", 95 | "We denote $n_i$ as the number of nodes in the $i$th layer of the KAN and the $i$th neuron in $l$th layer would be denoted by $(l, i)$ where the activation of this neuron is given by $x_{l, i}$. We think of the activation functions as learnable functions residing on the edges of the network graph and the nodes represent the summation operation. So between 1st (0th) and the 2nd layer (1st) we see there are 10 activation functions denoted by $\\phi _{0, 1, 1}, \\, \\phi _{0, 1, 2}, \\ldots$. These number of activation functions are governed by the number of nodes in 0th and 1st layer.\n", 96 | "\n", 97 | "In the 0th layer we have two nodes $x_{0, 1}, \\, x_{0, 2}$ and in the first layer we have 5, so the number of activation functions would be $n_l \\times n_{l+1}$.\n", 98 | "\n", 99 | "The $n_l$ and $n_{l+1}$ are determined from the input and output dimensions of the inner function defined in Eq. 4. So we started with two inputs $n_{\\text{in}}=2$, so our $n_{\\text{out}}$ has to be $2n+1 = 5$. This in turn determines the number of activation functions in the hidden layer.\n", 100 | "\n", 101 | "If we continue with the number of nodes is $n_1 = 5$ and $n_2 = 1 (n_{\\text{out}})$, it makes sense that the number of activations at that layer is 5. This would be the outer function. So to repeat, **the KA representation is composed of two KAN layers**.\n", 102 | "\n", 103 | "--------------------------------------------------------------------------------\n", 104 | "\n", 105 | "### Matrix Form of KAN Layer\n", 106 | "\n", 107 | "We can write this in matrix form. Let's see: The activation function that connects the two nodes at layers $l, l+1$ is denoted by $\\phi_{l, j, i}$ where $j, i$ represents the $i, j$th neurons in those layers respectively.\n", 108 | "\n", 109 | "So learnable activation function between layer $l, l+1$:\n", 110 | "\n", 111 | "$$\\phi_{l, j, i}; l=0, 1, \\ldots L-1, i=1, 2, \\ldots n_l, j=1, 2, \\ldots n_{l+1}\\, \\ldots \\, (5)$$\n", 112 | "\n", 113 | "We can just check again that given that picture\n", 114 | "\n", 115 | "$$n_0=2\\, (n_{\\text{in}}), n_1=5\\, (2\\times n_0 + 1), n_2 = 1 \\, (n_{\\text{out}})\\, \\ldots \\, (5)$$\n", 116 | "\n", 117 | "With these in mind, let's denote the input pre-activation of $ϕ_{l,j,i}$ as $x_{l,i}$; Then post-activation we will have\n", 118 | "\n", 119 | "$$\\tilde{x}_{l, j, i} \\equiv ϕ_{l,j,i}(x_{l,i})$$\n", 120 | "\n", 121 | "The activation value of the $(l + 1, j)$ neuron is simply the sum of all incoming postactivations.\n", 122 | "\n", 123 | "\n", 124 | "With this we can define the learnable transformation matrix as\n", 125 | "\n", 126 | "$$\\Phi = \\begin{bmatrix}\\phi _{1, 1} (\\cdot) & \\phi _{1, 2} (\\cdot)\\, \\dots & \\phi _{1, n_{l}}(\\cdot) \\\\ \\phi _{2, 1} (\\cdot) & \\phi _{2, 2} (\\cdot) \\dots & \\phi _{2, n_{l}} \\\\ \\vdots & \\ddots & \\vdots \\\\ \\phi _{n_{l+1}, 1} & \\dots & \\phi _{n_{l+1}, n_l}\\end{bmatrix}\\, \\ldots \\, (6)$$\n", 127 | "\n", 128 | "Using this we can also write the transformation rule:\n", 129 | "\n", 130 | "$$\\mathbb{x}_{l+1} = \\Phi (\\mathbb{x}_{l}) = \\begin{bmatrix}\\phi _{1, 1} (\\cdot) & \\phi _{1, 2} (\\cdot)\\, \\dots & \\phi _{1, n_{l}}(\\cdot) \\\\ \\phi _{2, 1} (\\cdot) & \\phi _{2, 2} (\\cdot) \\dots & \\phi _{2, n_{l}} \\\\ \\vdots & \\ddots & \\vdots \\\\ \\phi _{n_{l+1}, 1} & \\dots & \\phi _{n_{l+1}, n_l}\\end{bmatrix}\\, \\mathbb{x}_l\\, \\ldots \\, (7)$$\n", 131 | "\n", 132 | "\n", 133 | "We can always check our understanding once again so\n", 134 | "\n", 135 | "$$l=0; \\rightarrow \\mathbb{x_0} = [x_{0, 1}, \\, x_{0, 2}]\\rightarrow \\mathbb{x}_{1\\times 2}; n_0 \\, (n_l) = 2, n_1 \\, (n_{l+1}) = 5; \\\\ \\Phi _{n_{l+1} \\times n_{l}} \\rightarrow \\Phi _{5 \\times 2}\\rightarrow \\Phi _{5 \\times 2} \\times \\mathbb{x}^T_{2 \\times 1} \\equiv \\mathbb{x}_{5\\times 1}\\, \\ldots \\, (8)$$\n", 136 | "\n", 137 | "Indeed we have 5 outputs $x_{1, 1}\\, x_{1, 2}\\, x_{1, 3}\\, x_{1, 4}\\, x_{1, 5}\\,$.\n", 138 | "\n", 139 | "Once we have the transformation matrix ready we can simply compose (stack layers) them to go deeper as below:\n", 140 | "\n", 141 | "$$\\text{KAN}(\\mathbb{x}) = \\left(\\Phi _{L-1} \\circ \\Phi _{L-2} \\circ \\dots \\Phi _{1} \\circ \\Phi _0\\right)(\\mathbb{x}) \\, \\ldots \\, (9)$$\n", 142 | "\n", 143 | "At this point, we can also appreciate that all the operations are differentiable (assuming the 1D functions also are) gradients can flow through the network i.e. we can do the backpropagation.\n", 144 | "\n", 145 | "One can also make comparison with the MLP layer where we have weight matrices (linear transformation) and activation function (non-linearity) separated\n", 146 | "\n", 147 | "$$\\text{MLP}(\\mathbb{x}) = \\left(W _{L-1} \\circ \\sigma \\circ W _{L-2} \\circ \\sigma \\circ \\dots \\circ W _0\\right)(\\mathbb{x}) \\, \\ldots \\, (10)$$\n", 148 | "\n", 149 | "The values in the weight matrices get updated but the activation function once defined is fixed. This is the pivotal difference between a KAN and an MLP layer. \n", 150 | "\n", 151 | "\n", 152 | "Since for KAN now everything boils down to the activation functions, the authors define how to construct these functions.\n", 153 | "\n", 154 | "--------------------------------------------------------------------------------\n", 155 | "\n", 156 | "\n", 157 | "### Learnable Activations: 🧑 🖊 💻\n", 158 | "\n", 159 | "For constructing the activation function $\\phi (x)$, the authors propose to have a basis function $(b(\\cdot))$ and spline function and combine them as below:\n", 160 | "\n", 161 | "$$\\phi (x) = w\\left(b(x) + \\text{spline}(x)\\right)\\, \\ldots \\, (11)$$\n", 162 | "\n", 163 | "The authors take the basis function as SiLU\n", 164 | "\n", 165 | "$$b(x) = x \\times \\sigma (x) = \\frac{x}{1+e^{-x}}\\, \\ldots \\, (12)$$\n", 166 | "\n", 167 | "For the spline function, it is a linear superposition of B-splines\n", 168 | "\n", 169 | "$$\\text{spline} (x) = \\sum _i \\, c_i \\, B_i(x)\\, \\ldots \\, (13)$$\n", 170 | "\n", 171 | "If we look back at the second figure we see that it is a linear combination of B-splines with $k=3$, i.e the order is 3, so the degree of the polynomials in the B-spline is 2. One advantage of defining a spline like this is the possibility of making it arbitrarily smooth by having more curves. This is shown also in Fig. 2 where we the authors have increased the number of intervals where we join different polynomials from 7 to 12.\n", 172 | "\n", 173 | "The weighting of the B-splines i.e. $c_i$'s are trainable and the authors argue that the only usage of the factor $w$ in Eq. 11 is to have better control of the overall magnitude of the activation function. \n", 174 | "\n", 175 | "Here also the authors discuss that in general KAN is slower than MLP. So to understand this we can simply caluclate the number of parameters by assuming a network of depth $L$ with every layer having equal number of nodes $n_i = N$ with each spline of order $k$ (usually k = 3) on G intervals; This would be\n", 176 | "\n", 177 | "$$\\text{KAN}: \\mathcal{O}(N^2 (L(G+k)) \\approx \\mathcal{O}\\left(N^2 \\, LG \\right); \\, \\, \\text{MLP}: \\mathcal{O}(N^2 L)$$\n", 178 | "\n", 179 | "However, KANs require much less depth i.e. $N$ than in MLPs. \n", 180 | "\n", 181 | "\n", 182 | "There are just so many intricate details still left in this paper but one thing that stands out personally for me is the interpretability of KANs. The authors show that KANs can \"discover\" simple division laws to non-trivial relations in knot theory. This could lead to further applications of KANs in foundational model for AI & Science in general.\n", 183 | "\n", 184 | "This also makes KANs more 'attractive' than symbolic regression; The authors gave an example of learning the very wiggly Bessel Function of order 20 $\\left(J_{20}(x)\\right)$ via KAN which is impossible through symbolic regression without any prior knowledge of that special function (in this case Bessel function) itself.\n", 185 | "\n", 186 | "\n", 187 | "\n", 188 | "\n", 189 | "Out of the many examples the authors presented, I liked the relatively easy but fascinating 'auto discoverable' property of KAN. Say we start with relativistic addition of velocity formula;\n", 190 | "\n", 191 | "$$f(v_1, v_2) = \\frac{v_1 + v_2}{1 + v_1\\, v_2}$$\n", 192 | "\n", 193 | "The way one can think about the depth of KAN is to consider every layer of KAN discovering one mathematical operation; So by looking at the formula above, first we think about multiplication;\n", 194 | "we need two layers for multiplication as the authors show that learned activation functions would be linear and quadratic, so:\n", 195 | "$$2xy = \\left(x+y\\right)^2 - \\left(x^2 + y^2\\right)$$\n", 196 | "\n", 197 | "Inverting of $1 + v_1\\, v_2$ would use one layer, and multiplication of $v_1 + v_2$ with $\\frac{1}{1 + v_1\\ v_2}$ would require another 2 layers; In total 5. \n", 198 | "\n", 199 | "But the researchers found that auto discovered KANs are only 2 layers deep and this on hindsight could be explained via rapidity trick.\n", 200 | "\n", 201 | "In relativity we can simplify the transformation rules via the rapidity trick; One can define rapidity as:\n", 202 | "\n", 203 | "$$\\phi_1 = \\text{arctanh}\\, v_1, \\, \\phi_2 = \\text{arctanh}\\, v_2 $$\n", 204 | "\n", 205 | "Sorry we physicists always take natural units $\\bar{h}=c = 1$.\n", 206 | "\n", 207 | "Since\n", 208 | "$$\\tanh (a+b) = \\frac{\\tanh a + \\tanh b}{1 + \\tanh a \\, \\tanh b}$$\n", 209 | "\n", 210 | "we can clearly see that\n", 211 | "\n", 212 | "$$f(v_1, v_2) = \\tanh (\\phi _1 + \\phi _2)$$\n", 213 | "\n", 214 | "So the relativistic addition of velocity is indeed simple addition and the final application of `tanh` function; So we need two layers!\n", 215 | "\n", 216 | "Can we use KAN like these to discover/rediscover some fundamental physics laws? ❣ ❣ ❣ ❣\n" 217 | ], 218 | "metadata": { 219 | "id": "cwFA9cev6TWW" 220 | } 221 | }, 222 | { 223 | "cell_type": "markdown", 224 | "source": [ 225 | "## Let's Check Some Variations of Example from [KAN Repository](https://github.com/KindXiaoming/pykan/tree/master/tutorials)" 226 | ], 227 | "metadata": { 228 | "id": "57w-cOAMrTQX" 229 | } 230 | }, 231 | { 232 | "cell_type": "code", 233 | "execution_count": null, 234 | "metadata": { 235 | "id": "FGip3a2l6PEs", 236 | "colab": { 237 | "base_uri": "https://localhost:8080/" 238 | }, 239 | "outputId": "1b6070bd-ec3d-4457-bae8-312a05615d87" 240 | }, 241 | "outputs": [ 242 | { 243 | "output_type": "stream", 244 | "name": "stdout", 245 | "text": [ 246 | "Requirement already satisfied: pykan in /usr/local/lib/python3.10/dist-packages (0.0.3)\n" 247 | ] 248 | } 249 | ], 250 | "source": [ 251 | "!pip install pykan" 252 | ] 253 | }, 254 | { 255 | "cell_type": "code", 256 | "source": [ 257 | "from kan import KAN, create_dataset\n", 258 | "\n", 259 | "import torch\n", 260 | "#device = torch.device(\"cuda\")" 261 | ], 262 | "metadata": { 263 | "id": "46yn9P_DrfNU" 264 | }, 265 | "execution_count": null, 266 | "outputs": [] 267 | }, 268 | { 269 | "cell_type": "code", 270 | "source": [ 271 | "\n", 272 | "f = lambda x: (x[:,[0]]+x[:,[1]])/(1+x[:,[0]]*x[:,[1]]) # dataset creation where x[:, [0]] represents v1, x[:, [1]]: v2\n", 273 | "\n", 274 | "# dataset = create_dataset(f, n_var=2, ranges=[-0.9,0.9])\n", 275 | "\n", 276 | "# print ('check dataset type: ', type(dataset), 'check keys: ', dataset.keys())" 277 | ], 278 | "metadata": { 279 | "id": "SDSZYv3yi5YD" 280 | }, 281 | "execution_count": null, 282 | "outputs": [] 283 | }, 284 | { 285 | "cell_type": "code", 286 | "source": [ 287 | "print (dataset['train_input'])\n", 288 | "train_input_arr = dataset['train_input'].numpy()\n", 289 | "print (train_input_arr.shape)\n", 290 | "train_label_arr = dataset['train_label'].numpy()\n", 291 | "print (train_label_arr.shape)" 292 | ], 293 | "metadata": { 294 | "colab": { 295 | "base_uri": "https://localhost:8080/" 296 | }, 297 | "id": "NcTRiGJbmNiv", 298 | "outputId": "de1d3038-d937-4e3b-badd-9c117624e9f1" 299 | }, 300 | "execution_count": null, 301 | "outputs": [ 302 | { 303 | "output_type": "stream", 304 | "name": "stdout", 305 | "text": [ 306 | "tensor([[-0.0067, 0.4992],\n", 307 | " [ 0.4828, 0.1612],\n", 308 | " [-0.7407, 0.1374],\n", 309 | " ...,\n", 310 | " [-0.2895, -0.4111],\n", 311 | " [ 0.0032, -0.3570],\n", 312 | " [-0.1730, -0.7539]])\n", 313 | "(1000, 2)\n", 314 | "(1000, 1)\n" 315 | ] 316 | } 317 | ] 318 | }, 319 | { 320 | "cell_type": "code", 321 | "source": [ 322 | "import matplotlib.pyplot as plt\n", 323 | "\n", 324 | "### check train and test input distribution\n", 325 | "\n", 326 | "fig = plt.figure(figsize=(10, 5))\n", 327 | "fig.add_subplot(131)\n", 328 | "plt.hist(dataset['train_input'][:, 0], bins=20, alpha=0.7, label=r'$v_1$-train', color='orange')\n", 329 | "plt.hist(dataset['train_input'][:, 1], bins=20, alpha=0.7, label=r'$v_2$-train', histtype='step')\n", 330 | "plt.legend(fontsize=12)\n", 331 | "fig.add_subplot(132)\n", 332 | "plt.hist(dataset['test_input'][:, 0], bins=20, alpha=0.7, label=r'$v_1$-test', color='orange')\n", 333 | "plt.hist(dataset['test_input'][:, 1], bins=20, alpha=0.7, label=r'$v_2$-test', histtype='step')\n", 334 | "plt.legend(fontsize=12)\n", 335 | "fig.add_subplot(133)\n", 336 | "plt.hist(dataset['train_label'].numpy(), bins=20, alpha=0.7, label=r'$\\frac{v_1+v_2}{1+v_1\\, v_2}$-train', color='orange')\n", 337 | "plt.hist(dataset['test_label'].numpy(), bins=20, alpha=0.7, label=r'$\\frac{v_1+v_2}{1+v_1\\, v_2}$-test', histtype='step')\n", 338 | "plt.legend(fontsize=12)\n", 339 | "plt.tight_layout()\n", 340 | "plt.show()" 341 | ], 342 | "metadata": { 343 | "colab": { 344 | "base_uri": "https://localhost:8080/", 345 | "height": 506 346 | }, 347 | "id": "tcwBhCOUj3L6", 348 | "outputId": "4524b6db-a744-4fd9-ecfb-3d378c6a014b" 349 | }, 350 | "execution_count": null, 351 | "outputs": [ 352 | { 353 | "output_type": "display_data", 354 | "data": { 355 | "text/plain": [ 356 | "
" 357 | ], 358 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAA9wAAAHpCAYAAABweDQqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABjdUlEQVR4nO3de3gU5f338U9OuwksSSSGHDj7wwoEQUGFqChSbB4qPOYhxeIRLOIB0IbQKvyKGmg0oL8KWgKo0CBtIwWFWLWgQhWlEEAOP4MHBEWhhgQJTUKAbNhknj9otqwkIbvZyW4279d17XWxs3PPfHez3LufnbnvCTIMwxAAAAAAAPCqYF8XAAAAAABAICJwAwAAAABgAgI3AAAAAAAmIHADAAAAAGACAjcAAAAAACYgcAMAAAAAYAICNwAAAAAAJgj1dQE/VFtbq6KiInXo0EFBQUG+LgdAG2AYhk6cOKHExEQFB/vv75D0jwBaGv0jANSvqf2j3wXuoqIide3a1ddlAGiDDh8+rC5duvi6jAbRPwLwFfpHAKjfhfpHvwvcHTp0kHS28MjISB9XA6AtqKioUNeuXZ39j7+ifwTQ0ugfAaB+Te0f/S5w150GFBkZSYcJoEX5+2mI9I8AfIX+EQDqd6H+0X8H4wAAAAAA0IoRuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwgd9Nmgb4Qk1Njc6cOePrMuBlISEhCgsL83UZPsP7unVr6+9fBI6amhplZmbqT3/6k4qLi5WYmKgJEyZo1qxZzsmGDMPQk08+qZdfflllZWW67rrrtHjxYl166aU+rr51ov+HWfhsch+BG22aYRgqLi5WeXm5DMPwdTkwgdVq1cUXX9ymZq3lfR042uL7F4Fn3rx5Wrx4sV555RUlJSXp448/1r333quoqCg98sgjkqRnnnlGL7zwgl555RX17NlTjz/+uFJSUvTZZ58pPDzcx8+g9aD/R0vgs8k9BG60aeXl5SorK1NsbKzat2/v95c9QdMZhqEzZ86ovLxc3333nSS1mQ8G3tetX1t+/yLwbNmyRbfeeqtuueUWSVKPHj306quvavv27ZLOvt8XLFigWbNm6dZbb5UkrVixQnFxccrPz9e4cePO26bdbpfdbnfer6ioaIFn4v/o/2EmPps8Q+BGm2UYho4eParIyEhdfPHFvi4HJoiIiFCHDh30z3/+U8eOHWsTHwq8rwNHW3z/IjBde+21eumll/Tll1/qRz/6kf73f/9Xmzdv1nPPPSdJOnjwoIqLizVixAhnm6ioKA0ePFhbt26tN3BnZ2dr9uzZLfYcWgP6f7QEPpvcx6RpaLNqampUU1NDRxHggoKCFBUVJbvd3ibGs/G+Dixt7f2LwDRjxgyNGzdOvXv3VlhYmK688kqlp6frzjvvlCQVFxdLkuLi4lzaxcXFOR/7oZkzZ6q8vNx5O3z4sLlPohWg/0dL4bPJPRzhRpvlcDgkSaGh/DcIdHWTe9TU1AT8RB+8rwNPW3r/IjCtWrVKf/7zn5WXl6ekpCTt2bNH6enpSkxM1Pjx4z3aptVqldVq9XKlrRv9P1oSn01Nx/9ItHmMbwp8bfFv3Bafc6Dib4nW7te//rXzKLckXX755fr222+VnZ2t8ePHKz4+XpJUUlKihIQEZ7uSkhJdccUVvii5VaPPQEvgfdZ0nFIOAAAA05w6dUrBwa5fOUNCQlRbWytJ6tmzp+Lj47Vx40bn4xUVFdq2bZuSk5NbtFYA8DaOcAMAAMA0o0eP1lNPPaVu3bopKSlJu3fv1nPPPadf/OIXks4eKUtPT1dWVpYuvfRS52XBEhMTlZqa6tviAaCZCNwAAAAwze9//3s9/vjjmjx5so4eParExEQ98MADeuKJJ5zrPProozp58qTuv/9+lZWV6frrr9f69eu5BjeAVo/ADQAAANN06NBBCxYs0IIFCxpcJygoSHPmzNGcOXNarjAAaAEEbgB+Z/ny5br33nt18OBB9ejRw9flBI4PRvu6grOGvenrCuq1ZcsWvfvuu0pPT1d0dHSr3w8A4MImTJigYcOGacKECb4uBQGKwA3Uh2DSKAIDAtGWLVs0e/ZsTZgwwfTA3RL7AYBAVlJSos6dO+vkyZPOS8StWrVKCxcu1Icfftjq9oPAReBGiyittKvS7nCrjc0aqhgb19j0R2YHhrvvvlvjxo3jGqtodRw1taoxjMbX+ffMzHZHjeyOGoUEBSk0hIuGAK0F32n8Q1xcnKKjo7V//37169dPtbW1yszM1JIlS1rlfhC4CNwwXWmlXbPy96raUetWO0tosLJS+/EBFQBOnjyp9u3bN3n9kJAQhYSEmFgRWqubb75ZBw8e1IEDB8577KqrrlJISIi2bdvm9nYzMzM1e/ZsSWcvUVTn3GEN3333nR5//HG9/fbbKisrU69evTR9+nTnTMuOmlrtO3xUv8v+rd5d95a+LylWh8hI9U66XDMe/636DbhCC555Ss8/my1J6n1pL+d+9h/4Sr3+6xK36wbQsvhO41+SkpL0xRdfqF+/fsrLy1OXLl00ePBgDRkyRHv37tWePXvUq1evC2/Ig/3ccMMN2rRpkx577DGFhobq6quv1vz5873wrMzBcD3fIXDDdJV2h6odtbpv6CVKjG7abKNFZVVa+tHXqrQ7+HDykK+CSd3jn376qbKysrRu3Tr16NFDu3fv1rfffqt58+Zp48aNOnTokNq1a6fhw4fr2Wefden8f/ihULfN/fv3KysrS/n5+TIMQ2PGjFFOTo7atWvn/guEVikpKUnvv/++7Ha7yxkQ+fn52rlzp959912PtjtmzBh9+eWXevXVVzV//nxdfPHFkqTY2FhJZ08pHDJkiIKCgjR16lTFxsZq3bp1mjhxoioqKpSenq4aw9BvfvVLrXszXw9Nnqzeffrq+PFSbfnHP3Tsu6+VcOMQ3TVurI4cPqhVK1fq2f/5naI7xuhElUMd/70/AP6N7zT+pS4IOxwOzZkzR3/6058UFhamN954Q4899liD7UaNGqXNmzdLOnud+FWrVik9PV2SNGPGDM2YMeOC+5GkXr16adOmTbJarbrzzjtVWFioyy+/3KPnwnC9wEXgRotJjA5X95imH+VE8/gqmNQZO3asLr30Uj399NMy/n2K7Y4dO7RlyxaNGzdOXbp00TfffKPFixdr2LBh+uyzzy4YnG+77Tb17NlT2dnZ2rVrl5YuXapOnTpp3rx5Hj0XtD5JSUmqqalxntonSYZh6Mknn9SNN96om2++2aPt9u/fXwMHDtSrr76q1NTU8379/81vfqOamhoVFhYqJiZGkvTggw/q9ttvV2Zmph544AEFh1n09/fe0S8m3qcFDRzluGrglbp60CCtWrlSP0sbo4QuXXWkrErt23PpI6A1aZXfafxhfhovz02TlJSkLVu2aMWKFerdu7euueYaSWdPA2/MW2+95fx3UyZNa2g/nTt3dq4TGhrarLPzGK4XuAjcQIDyVTCpM2DAAOXl5bksu+WWW/Szn/3MZdno0aOVnJys119/XXfffXej+77yyiu1bNky5/3S0lItW7aMwN2GJCUlSZLz1D5JWr16tT755BN99NFHpuzTMAy9/vrruu2222QYho4dO+Z8LCUlRStXrtSuXbt01eAhioyK0o7t21VUVKTExERT6gEAnJWUlKQXX3xRBQUFWrNmjc/2s3v3bh07dkx9+/Y1rYYfYrhe68EsLUCAOjeY1KkLJllZWabv/8EHHzxvWUREhPPfZ86cUWlpqXr16qXo6Gjt2rXL7W0OHTpUpaWlqqioaH7BaBV++L6um7wmJSVF119/vSRp8eLFGjhwoMLCwpSZmenSvrq6WsXFxS63mpqaRvf5/fffq6ysTC+99JJiY2Ndbvfee68k6ejRo5KkGU/8Vp9+ulddu3bVNddco8zMTH399dfefAkAAP+WlJSkwsJCDRo0SAMGDPDJfo4ePapHHnlEf/jDHzzefmZmpn79619LOjtcLygoSEFBQfrmm2+cjwcFBemzzz7THXfcoYsuukjXX3+9vv32W02ePFmXXXaZIiIiFBMTo7FjxzrbnWv58uX1bvPAgQPOo+pRUVG69957derUKY+fC87HEW4gQDU1mLz88ssqLCzUb37zG5dwUl1drePHj7tsMzY2tsm/jp47vrvO6dOnlZ2drdzcXH333XfOU80lqby8/ILb7Natm8v9iy66SJL0r3/9S5GRkU2qC61bVFSUEhMTne/rvLw8ff7551qxYoVznYSEBGVmZp53hoV09pS9m266yWXZhSaQqf33rOJ33XWXxo8fX+86/fv3lySNSk3TqJTh+tubf9W7776rZ599VvPmzdOaNWs0cuRIt54rAKBxsbGxLt8lPLF8+XKP92O323XHHXdowYIFFzyNvTGeDtdr7lA9ieF6LYHADQQoXwSTc517NLvOww8/rNzcXKWnpys5OVlRUVEKCgrSuHHjnKGmMQ2F/eZ+2KJ1qZu8pqamRnPmzFFqaqquuuoq5+OpqamSpL/97W/ntR0wYIDee+89l2Xx8fGSpKCgoHr3Fxsbqw4dOqimpkYjRoxosC674+yR8oSEBE2ePFmTJ0/W0aNHNXDgQD311FPOwN3QfgAA3pGamqrt27dr//79Sk9P19ixY03Zz4oVK1RYWKjp06dLkrKzs5WcnOz2djwdrnf69OlmDdWTGK7XEgjcQABr6WByIa+99prGjx+v3/3ud85lVVVVKisr82h7aJuSkpK0dOlSvfLKK/rqq6/cGrd30UUXNRia68bC/fD9GBISorS0NOXl5Wnv3r3OseN1vv/+e8XGxqqmpkYVFeVKOGfm4k6dOikxMVF2u73e/SR06drk2gEATZOfn98i+5k0aZImTZrUIvuSzh9a98OhehUVFS5D9ZoSuOsbrrd27VpVVFRw9qCXELiBANbSweRCQkJCzjsa/fvf//6CY2iBcyUlJamyslIzZszQz3/+8/MCsKcGDRok6eyM5OPGjVNYWJhGjx6t9u3ba+7cuXr//fc1ePBgTZo0SX379tXx48e1a9cubdiwQcePH9eJEyeUfPmPNCYtTQOvvEI2m00bNmzQjh07XH5kOnc/aWNv08kzhu68bYysUXyxAYBA5u3hes0dqicxXK8lELiBAOaLYNKYUaNG6Y9//KOioqLUt29fbd26VRs2bHBeZgkm8/LlWHylbn6C48ePO68J7w1XX321fvvb32rJkiVav369amtrdfDgQbVv315xcXHavn275syZozVr1mjRokWKiYlRUlKS87S7du3a6a5fTFLBh+/rjfy1qq2tVa9evbRo0SI99NBDje7nJzcNVUcCNwAENG8P12vuUD2J4XotgcAN1Idg0qjGgkljnn/+eYWEhOjPf/6zqqqqdN1112nDhg1KSUnxWm0IfMnJyaZ9EZg1a5ZmzZpV72OdOnXSwoULtXDhwnoft1gsmvlklhKiw2UNbfxoRd1+7I4aHSmrcjkNHQDgKlDmvjh+/LhXh+sxVK91IHADAcwXwSQzM/O8SzHViY6OrveyGT+8fMWECRM0YcKEC27zh+sBkuRwOORwOFRTUyOHw6GqqiqFhYVx/VEAaKUC6WirN4frMVSvdeA63EAbVhdGzg0mdNJo7bKyshQREaGlS5fqqaeeUkREhP74xz/6uiwAQDMsXrxYAwcOVFhYWIM/7Ldm5w7X++Mf/6iVK1fq5MmTjbapG6qXnp6ul156Sffee69eeOEFhur5GQI30IYRTBCIMjMzZRiGy40zIQCgdau7lGlaWlqT1p8wYUKTrrHtL+qG6/3v//6vJkyYoNtvv13ff/99o22ef/553XPPPfrzn/+s6dOn68iRI9qwYYNsNlsLVY2m4JRyoA1r7PRvAAAQQFr5/DSNXcrUEyUlJercubNOnjwpq9UqSVq1apUWLlyoDz/80OvtmsLd4XpNHaonMVzPlzjCDQAAAKBNiYuLU3R0tPbv3y9Jqq2tVWZmprKyskxph7aLwA0AAACgzUlKStIXX3whScrLy1OXLl10ww03yG63a8iQIbLZbDpw4ECT223atElDhgzR9ddfr2nTprXoc4H/InADAAAAaPVGjRql6OhoRUdHKy8vT5MnT3benzt37nnr1wVnh8OhOXPmOI9Sh4WF6Y033tDPfvazevfTULtevXpp06ZN2rx5s44eParCwkLznixaDcZwAwAAAGj13nrrLee/J0yYoGHDhjU6HjkpKUlbtmzRihUr1Lt3b11zzTWSpODgYMXFxbndrnPnzs51QkNDuRwlJBG4A0pppV2VdodbbWzWUMXYrCZVBAAAADSfw+GQw+FwuZRpWFhYs0JtUlKSXnzxRRUUFGjNmjVea7d7924dO3ZMffv29bg2BA4Cd4AorbRrVv5eVTtq3WpnCQ1WVmo/QjcAoHX5YLT7bVr5LM1AW5aVlaXZs2c77z/11FPKzc1t1ozaSUlJKiws1NixYzVgwACvtDt69KgeeeQRvfbaax7XhcBC4A4QlXaHqh21um/oJUqMDm9Sm6KyKi396GtV2h0EbgAAAPgtdy9l2pRrcMfGxsowDLdraaid3W7XHXfcoQULFjR6SjqaLhDO4CVwB5jE6HB1j2nv6zIAAACAVis1NVXbt2/X/v37lZ6errFjx16wzYoVK1RYWKjp06dLkrKzs5WcnGx2qQErUM7gJXADAAAAwDny8/PdbjNp0iRNmjTJ+8W0UYFyBi+BGwAAAADgl1r7GbwEbgBoIzwZB2UGfxtbVWfLli169913lZ6erujo6BbZT4Stg2n7AQAAvkfgBuB3li9frnvvvVcHDx5Ujx49fF1OQPB0HJQZ/G1sVZ0tW7Zo9uzZmjBhgumBu24/BG4AAAIbgRuoB0cCG9dSRwLhPZ6MgzKDP46tAgAAMAuBG/gBjgRemNlHAu+++26NGzdOVqt/Pe9A0NrHQd188806ePCgDhw4cN5jV111lUJCQrRt2za3t5uZmem8vmvPnj2dy889y+K7777T448/rrfffltlZWXq1auXpk+frl/84hfO9SsrT+hXc/5bb/71DR05ckRRUVEaMGCA5s2bp4EDBza4ny/2H9Blvf7L7boBAIB/I3ADP8CRQO87efKk2rdvesgLCQlRSEiIiRWZ47vvvtNjjz2mdevW6dSpU+rVq5dyc3N11VVXSZIMw9CTTz6pl19+WWVlZbruuuu0ePFiXXrppT6uvPVISkrS+++/L7vd7vKDTH5+vnbu3Kl3333Xo+2OGTNGX375pV599VXNnz9fF198saSz11qVpJKSEg0ZMkRBQUGaOnWqYmNjtW7dOk2cOFEVFRVKT0+XJM361S+17s18TZ06VX379lVpaak2b96szz//XAMHDjxvP1EXdVTZqTPO/QAAgMBC4AYawJHA+l3oSGDd459++qmysrK0bt069ejRQ7t379a3336refPmaePGjTp06JDatWun4cOH69lnn3UZq/3DMdx129y/f7+ysrKUn58vwzA0ZswY5eTkqF27du6/QF72r3/9S9ddd51uuukmrVu3TrGxsdq/f78uuugi5zrPPPOMXnjhBb3yyivq2bOnHn/8caWkpOizzz5TeLjvftxpTZKSklRTU6P9+/erX79+kv7zQ8aNN96om2++2aPt9u/fXwMHDtSrr76q1NTU8+YO+M1vfqOamhoVFhYqJiZGkvTggw/q9ttvV2Zmph544AEFh1n09/fe0S8m3qff/e53zraPPvpog/tJ6NJVR8qq1L49f38AAAJRmw7cnozT9dcxtcAP+epIYJ2xY8fq0ksv1dNPPy3DMCRJO3bs0JYtWzRu3Dh16dJF33zzjRYvXqxhw4bps88+u2Bwvu2229SzZ09lZ2dr165dWrp0qTp16qR58+Z59Fy8ad68eeratatyc3Ody879QcIwDC1YsECzZs3SrbfeKklasWKF4uLilJ+fr3HjxrV4za1RUlKSJOmLL75wBu7Vq1frk08+0UcffWTKPg3D0Ouvv67bbrtNhmHo2LFjzsdSUlK0cuVK7dq1S1cNHqLIqCjt2L5dRUVFSkxMNKUeAADQerTZwO3pOF1/HVML/JCvjgTWGTBggPLy8lyW3XLLLfrZz37msmz06NFKTk7W66+/rrvvvrvRfV955ZVatmyZ835paamWLVvmF4H7r3/9q1JSUjR27Fht2rRJnTt31uTJkzVp0iRJZ88AKC4u1ogRI5xtoqKiNHjwYG3durXewG2322W32533KyoqzH8ifu7cwC1JtbW1yszMVEpKiq6//nrZ7XY99NBD2rBhg8rKytS3b1/Nnz9fycnJkqTq6modP37cZZuxsbGNDmH4/vvvVVZWppdeekkvvfRSvescPXpUkjTjid/q1w8/oK5du2rQoEH66U9/qnvuuUeXXHJJs587mo8f2vFD7r4nisqqTKwGQCBqs4Hbk3G6gTSmFoHPF0cCz/Xggw+etywiIsL57zNnzqiiokK9evVSdHS0du3adcHA/cNtDh06VGvXrlVFRYUiIyO9U7iHvv76ay1evFgZGRn67//+b+3YsUOPPPKILBaLxo8fr+LiYklSXFycS7u4uDjnYz+UnZ3tPH0fZ0VFRSkxMdEZuPPy8vT5559rxYoVkiSHw6EePXpo8+bN6tKli1atWqXRo0frm2++kc1m05YtW3TTTTe5bPNCl5+rrT37w+xdd92l8ePH17tO//79JUmjUtM0KmW4/vbmX/Xuu+/q2Wef1bx587RmzRqNHDmyuU8fzVBaHc4P7XDRnIMvNmub/QoNwE1tvrdo7eN0gYb44kjguc49nbrO6dOnlZ2drdzcXH333XfOU80lqby8/ILb7Natm8v9uvHR//rXv3weuGtra3XVVVfp6aeflnT2aPzevXu1ZMmSBkPahcycOVMZGRnO+xUVFeratatX6m3NkpKS9MUXX6impkZz5sxRamqqc2K69u3b64knnnCuO27cOGVkZGjfvn0aNGiQBgwYoPfee89le/Hx8ZKkoKCgevcXGxurDh06qKamxuUMhR+yO2okSQkJCZo8ebImT56so0ePauDAgXrqqaecgbuh/cBclQ4LP7TDhaeTpHLWAwB3uB24mYUXaB18cSTwXOceza7z8MMPKzc3V+np6UpOTlZUVJSCgoI0btw451HExjQU9s8N7r6SkJCgvn37uizr06ePXn/9dUn/CXUlJSVKSEhwrlNSUqIrrrii3m1arVYujVaPpKQkLV26VK+88oq++uorrVmzpsF19+/fr+PHj6tXr16Szv5I01BorptJv6yszGV5SEiI0tLSlJeXp7179zrPGKnz/fffKzY2VjU1NaqoKFfCOV/cO3XqpMTERJehAefuJ6ELP6C0NH5oxw/xngBgJrcCN7PwAq1LSx8JvJDXXntN48ePd5nBuaqq6ryA0xpdd9112rdvn8uyL7/8Ut27d5d09oh/fHy8Nm7c6AzYFRUV2rZtmx566KEWq9PX4w+9sf+kpCRVVlZqxowZ+vnPf35eAK5z+vRp3XXXXZo5c6aioqIuuN1BgwZJOjsj+bhx4xQWFqbRo0erffv2mjt3rt5//30NHjxYkyZNUt++fXX8+HHt2rVLGzZs0PHjx3XixAklX/4jjUlL08Arr5DNZtOGDRu0Y8cOl/f8uftJG3ubTp4xdOdtY2SN8u1ZGgDQFk2YMEHDhg3ThAkTfF0KApRbgZtZeIHWpaWPBF5ISEjIeUejf//736umpsat7fijadOm6dprr9XTTz+t2267Tdu3b3eZZCsoKEjp6enKysrSpZde6vxBMjExUampqabXZ7OGyhIarKUffW36vi6kueMf64ZLHD9+vMEx7mfOnNHYsWPVq1cvlx+WGnP11Vfrt7/9rZYsWaL169ertrZWBw8eVPv27RUXF6ft27drzpw5WrNmjRYtWqSYmBglJSU5J+1r166d7vrFJBV8+L7eyF+r2tpa9erVS4sWLXL5UaW+/fzkpqHqSOAGgCYrKSlR586ddfLkSefZYKtWrdLChQv14Ycftrr9IHC59Y2HWXjRljR2JM5RU6taN89iDg6SQkOCvbL/pvLFkcDGjBo1Sn/84x8VFRWlvn37auvWrdqwYYPzusat2dVXX621a9dq5syZmjNnjnr27KkFCxbozjvvdK7z6KOP6uTJk7r//vtVVlam66+/XuvXr2+Rs39ibFZlpfZze4ZmMzR3/GNycnKjwwhqa2t19913KygoSK+88opbZ2TMmjVLs2bNqvexTp06aeHChVq4cGG9j1ssFs18MksJ0eGyhjY+10HdfuyOGh0pq3I5Dd2fMKs34EMfjHZv/eCLpdhHzanFD8XFxSk6Otp5NZa6uWqWLFnSKveDwOVW4GYWXrQFFzoSWGsYqnLUSu4OGw6SwkODFezGl//WeCSwMc8//7xCQkL05z//WVVVVbruuuu0YcMGpaSkuPfE/NSoUaM0atSoBh8PCgrSnDlzNGfOnBas6j9ibNY2EYQeeOABHTlyRO+8845CQ5sxN+iJ/e6tXxsiKe6Cq7UWXD4TgL+rGzrXr18/5eXlqUuXLho8eLCGDBmivXv3as+ePc4z97y9nxtuuEGbNm3SY489ptDQUF199dWaP3++F54VAo1b30SYhRdtwYWOBFY7anX0hF0XtbMoLKRp4flMjaF/napWpw5WWUKbfpS7NR4JzMzMVGZmZr1toqOj9Yc//OG85d98843L/QkTJriMpWpomz9cD/j222+1dOlShYeH6+KLL3YuX7dunYYOHerDylofLp8Jb+nRo4e+/fbb85ZPnjxZOTk5qqqq0vTp07Vy5UrZ7XalpKRo0aJF5x3AQfN4csaKt3n7DJi6IOxwODRnzhz96U9/UlhYmN544w099thjDbYbNWqUNm/eLEk6deqUVq1apfT0dEnSjBkzNGPGjAvuR5J69eqlTZs2yWq16s4771RhYaEuv/xyj57Lli1b9O677yo9PV3R0dEebcMf9oHzuRW4mYUXbUVjRwLtjhqFhQQ36bTRc9scKQtxq01L8NqRQMBPdO/e3S9mrQ8kzOCM5tqxY4fLXB179+7VzTffrLFjx0o6OwfG22+/rdWrVysqKkpTp07VmDFj9I9//MNXJQccT89Y8TZvnwGTlJSkLVu2aMWKFerdu7euueYaSeefbftDb731lvPfTZk0raH9dO7c2blOaGhoky+dWp8tW7Zo9uzZmjBhgqmB2+x94HxufcNuLbPwArgwjgQCAFpCbGysy/25c+fqv/7rv3TjjTeqvLxcy5YtU15enoYPHy5Jys3NVZ8+fVRQUKAhQ4b4ouSA4+k1x73JjDNgkpKS9OKLL6qgoKDRiWHN3s/u3bt17Nix8w5MApKbgdvfZ+EF0HQcCQQAtLTq6mr96U9/UkZGhoKCgrRz506dOXPGZcLd3r17q1u3btq6dWuDgZtJdz0TaGesJCUlqbCwUGPHjtWAAQN8sp+jR4/qkUce0Wuvvebx9jMzM51z7Zx7BaiDBw+qR48ekqTvvvtOjz/+uN5++22VlZWpV69emj59un7xi1841z9x4oQef/xx5efn68iRI4qKitKAAQM0b948/fWvf73gPmAOtwK3v8/Ci8Dj7kzdzI7rGUdNrWrcDN8hQUFuzboOAEB+fr7Kysqcp+8WFxfLYrGcd3prYxPuSky6i7NiY2ObffBg+fLlHu/Hbrfrjjvu0IIFC5o158CYMWP05Zdf6tVXX9X8+fOdZx7WnR1SUlKiIUOGKCgoSFOnTlVsbKzWrVuniRMnqqKiwjn+/MEHH9Rrr72mqVOnqm/fviotLdXmzZv1+eefX3AfMI/bgzb9fRZeBAZPrxnM7Ljuc9TU6ruy03L38yooSOocHUHoBgA02bJlyzRy5EglJiY2aztMuovGpKamavv27dq/f7/S09Od8wV424oVK1RYWKjp06dLOvtDUHJystvb6d+/vwYOHKhXX31Vqamp5x1x/s1vfqOamhoVFhY6L6X64IMP6vbbb1dmZqYeeOABRURE6O2339akSZP0u9/9ztn20Uf/c6m4xvYB8zBLEvySJ9cMZnZcz9QYhgxDuthmVVhoE2dddxg6VmlXjWHQifgphgsEDv6WbYS711yWpGFver8OE3377bfasGGDyxjY+Ph4VVdXq6yszOUod0lJiXMy3vow6S4ak5+f3yL7mTRpkiZNmmTqPgzD0Ouvv67bbrtNhmHo2LFjzsdSUlK0cuVK7dq1S9ddd52io6O1bds2FRUVNftHLXgP35Xht9rKNYP9RVhokBszqNdceBX4RFhYmKSzlzmJiIjwcTXwhpMnTyooKMj5twVaq9zcXHXq1Em33HKLc9mgQYMUFhamjRs3Ki0tTZK0b98+HTp0yKMjhYA/q66u1vHjx12WxcbGNjq7+ffff6+ysjKXebN+6OjRo5KkZ555RuPHj1fXrl01aNAg/fSnP9U999yjSy65xHtPAm4jcKPN4+hR4GtLf+OQkBBFR0c7P3zbtWvn1vXVcY5q935Yqq6Vahx22asko4k/XlU7alRz5vw2hmHI4XCooqJCFRUVio6ObtblZgBfq62tVW5ursaPH+9yGcqoqChNnDhRGRkZ6tixoyIjI/Xwww8rOTmZGcoRcLZs2aKbbrrJZdmFJi2rrT17Kbe77rpL48ePr3ed/v37S5Juu+02DR06VGvXrtW7776rZ599VvPmzdOaNWs0cuRI7zwJuI3AjTaLI4FtR1s7Qlh3GmZd6IaHqtx7/RxGsMprTupURFiT5zZw1NSq/PSZBtuEhIQoISFBUVFRbtUC+JsNGzbo0KFDLjMq15k/f76Cg4OVlpYmu92ulJQULVq0yAdVwl8Fyg/Hx48f13vvveeyrO4zu6HnGBsbqw4dOqimpsZlNv+GJCQkaPLkyZo8ebKOHj2qgQMH6qmnntLIkSMD5nVskm33S+3cvHqBScN0CNxoszw9EtjQESlvt2kpgfZ86rTlI4RBQUFKSEhQp06ddObMGV+X03ptf8at1b873V7Lj4/X5GHd1Pmidk1r869TWv7BV/W2CQ0NVUhISNv6goSA9ZOf/KTBs43Cw8OVk5OjnJycFq4KrUUgnanWUGhu3/7s5drKyspcloeEhCgtLU15eXnau3ev+vXr5/L4999/r9jYWNXU1KiystLlB9pOnTopMTHReQm9hvYBcxG4EXDcuZSYEdZBlnYOt44EXuiIlLfatJRAez4/1JaPEIaEhLSZHxlMUXvswuucI7SmWmV2KdRibfKlMEMtNW63AYC2aPHixXr55ZdVWFio3/zmN8rMzPR1SV41aNAgSWdnJB83bpzCwsI0evRotW/fXnPnztX777+vwYMHa9KkSerbt6+OHz+uXbt2acOGDTp+/LhOnDihLl266Gc/+5kGDBggm82mDRs2aMeOHc5ZyxvbB8xD4EbAaM6lxOb83z7qYGlacGzsiJQ327SUQHs+5+IIIQAA/+HOQQl/23dCQoIyMzOVl5fXpPUnTJigYcOGOa/57u+uvvpq/fa3v9WSJUu0fv161dbW6uDBg2rfvr3i4uK0fft2zZkzR2vWrNGiRYsUExOjpKQkzZs3T9LZMzUnT56sd999V2vWrFFtba169eqlRYsW6aGHHrrgPmAeAjcCRnMuJXbqjKHYSPOOSPnzUaxAez6oR6BdbijQng8AmMzTgxLeZgkNls3qWfxITU2VJP3tb3/zSi0lJSXq3LmzTp486bzE3KpVq7Rw4UJ9+OGHXm/XFLNmzdKsWbPqfaxTp05auHChFi5cWO/jFotFzzzzjJ55pvHhUI3tA+YgcCOgcCkxAAAAV54clDCDzRrqN9/T4uLiFB0drf3796tfv36qra1VZmamlixZYko7tF0EbgAAACDAcVDifElJSfriiy/Ur18/5eXlqUuXLrrhhhtkt9t14403au/evdqzZ4969erVpHabNm3SY489ptDQUF199dWaP3++j54Z/Il/z3YEAAAAAE0watQoRUdHKzo6Wnl5eZo8ebLz/ty5c89bvy44OxwOzZkzR1lZWZLOXjr2jTfe0M9+9rN699NQu169emnTpk3avHmzjh49qsLCQvOeLFoNjnADAAAAaPXeeust57+bMmlaUlKStmzZohUrVqh379665pprJEnBwcGKi4tzu13nzp2d69RN3AoETuB2dxKdU5GS0s2oBEBDmOyq1SittLs91s+fxuYBAAKLw+GQw+FQTU2NHA6HqqqqFBYW1qxQm5SUpBdffFEFBQVas2aN19rt3r1bx44dU9++fT2uDYEjcAI3AMArSqvDNSt/r6odtW61s4QGKyu1H6G7JfDjFYA2JisrS7Nnz3bef+qpp5Sbm9usy34lJSWpsLBQY8eO1YABA7zS7ujRo3rkkUf02muveVwXAguBGwDgotJhUbWjVvcNvUSJ0U277FvdJfYq7Q4CNwDA6zIzM5WZmdnk9ZcvX37BdWJjY2UYhtu1NNTObrfrjjvu0IIFCxo9JR1tC4EbAFCvxOhwdY9p7+syAABocampqdq+fbv279+v9PR0jR079oJtVqxYocLCQk2fPl2SlJ2dreTkZLNLhZ8jcAMAAADAOfLz891uM2nSJE2aNMn7xaBV47JgAAAAAACYgMANAAAAAIAJOKUcaCml26VtC6R2FU1vw6zC8JLS6nBVOixNWrfotM3kahDoisqq3FqfS8oBAAIVgRsAAlxpdbhmfTpU1bVNv1apJS5YNisfEXCPzRoqS2iwln70tVvtuKQcACBQ8W0KAAJcpcOi6toQ3dfjEyVGVDapje2G+wg/cFuMzaqs1H6qtDua3IZLygFe8u/LVHlymSvAXbzPmo7ADQBtRGJEpbo3dUgDwQceirFZCc6AD4QapyRJDkfTf/ACPHXmzBlJUkhI08+ea6uYNA0AAABo5UJUpZCQEFVUuDFXDOABwzBUXl4uq9WqsLAwX5fj9zjCDQAAALRyQTLUqVMnHTlyRFarVe3bt1dQUJCvy0IAMQxDZ86cUXl5uSorK9W5c2dfl9QqELgDibuzYJ+KlJRuYkGAF3ww2v02zO4OAGiDoqKidPr0aR07dkzff/+9r8tBgLJarercubMiIyN9XUqrQOAGAAAAAkBQUJASEhLUqVMn5xhbwJtCQkI4jdxNBG4AAAAggISEhDCZFeAnmDQNAAAAAAATELgBAAAAADABgRsAAAAAABMwhrsFlFbaVWl3uNXGZg1VjM1qUkX4oaKyKlPWBQAAANB2EbhNVlpp16z8vap21LrVzhIarKzUfoRuk9msobL8a6uW5m93q50lbohsVv77AM3l0Q+S1eGKsfDDV6Bx98dM3gf8oA8ArQGJwWSVdoeqHbW6b+glSowOb1KborIqLf3oa1XaHXwomizGZlVW0keqdFjcame74T7+NkAzefyD5L+GKivpozYfttzm7jXtT0VKSjejEhc2a6gsocFa+tHXbrVr6++D0upwftAHgFaAwN1CEqPD1T2mva/LQD1iLFXuf2HjiwrQbB7/IJm/XZUOS5sNWoEmxmZVVmo/t47U8j6QKh0WftAHgFaAwA0A8Cl+kESMzUoA9BD/fwDAvxG4AQAAADOVbpe2LZDaVfi6EgAtjMuCAQAAAABgAo5wI7C4OykQAAAAAJiEI9wAAAAAAJiAI9wAAACAyYpO29xa3xZa3WZn4QcCCYEbAAAAMInNGipLcI2WftPfrXaW4Jo2fa15IFAQuAEAAACTxNisykr6SJUOS5PbFJ22aek3/dv0teaBQEHgBgAAAEwUY6kiOANtFIHbXe5eR/FUpKR0EwsCAAAAAPgjAjcAIOCVVtpVaXe41+hUpFuruzshEgAACHwEbgBAQCutDtes/L2qdtS62fBat/dliQuWzermR6u7Z04BAIBWg8ANAAholQ6Lqh21um/oJUqMDm96w20L3N6X7Yb7FGOzut0OAAAEJgI3AKBNSIwOV/eY9k1v4MkRZ8I2AAA4R7CvCwCAQJCZmamgoCCXW+/evZ2PV1VVacqUKYqJiZHNZlNaWppKSkp8WDEAtJzvvvtOd911l2JiYhQREaHLL79cH3/8sfNxwzD0xBNPKCEhQRERERoxYoT279/vw4oBwDsI3ADgJUlJSTpy5IjztnnzZudj06ZN05tvvqnVq1dr06ZNKioq0pgxY3xYLQC0jH/961+67rrrFBYWpnXr1umzzz7T7373O1100UXOdZ555hm98MILWrJkibZt26b27dsrJSVFVVVcSgtA68Yp5XDfB6PdW/9U5NnJh9ydFGjYm+7tB/Cx0NBQxcfHn7e8vLxcy5YtU15enoYPHy5Jys3NVZ8+fVRQUKAhQ4bUuz273S673e68X1HBpFp13JkRPBBnDy+tDlelw+JWG1toNdcBhk/MmzdPXbt2VW5urnNZz549nf82DEMLFizQrFmzdOutt0qSVqxYobi4OOXn52vcuHHnbZP+EUBrQeAGAC/Zv3+/EhMTFR4eruTkZGVnZ6tbt27auXOnzpw5oxEjRjjX7d27t7p166atW7c2GLizs7M1e/bsliq/VbBZQ2UJrtHSb/q71c6j2cP9VGl1uGZ9OlTVtSFutbME1ygr6aO2HbrdnRH+VKSkdBMLahv++te/KiUlRWPHjtWmTZvUuXNnTZ48WZMmTZIkHTx4UMXFxS59ZFRUlAYPHqytW7fWG7i91j96cpUADggAcENgfPsAAB8bPHiwli9frssuu0xHjhzR7NmzNXToUO3du1fFxcWyWCyKjo52aRMXF6fi4uIGtzlz5kxlZGQ471dUVKhr165mPYVWIcZmVVbSR+4f3Q2g2cMrHRZV14bovh6fKDGiskltik7btPSb/qp0WNp24IZPfP3111q8eLEyMjL03//939qxY4ceeeQRWSwWjR8/3tkPxsXFubRrrI+kfwTQWrgVuDMzM8/7NfGyyy7TF198IenspEDTp0/XypUrZbfblZKSokWLFp3XgQJAoBk5cqTz3/3799fgwYPVvXt3rVq1ShERER5t02q1ymoNjJDoTTGWKvdDY4CE7XMlRlSqO9fuRitQW1urq666Sk8//bQk6corr9TevXu1ZMkSjR8/3qNt0j8CaC3cnjSNSYEA4MKio6P1ox/9SAcOHFB8fLyqq6tVVlbmsk5JSUm9Y74BIJAkJCSob9++Lsv69OmjQ4cOSZKzH/zhlRvoIwEEArcDd92kQHW3iy++WNJ/JgV67rnnNHz4cA0aNEi5ubnasmWLCgoKGtye3W5XRUWFyw0AWrvKykp99dVXSkhI0KBBgxQWFqaNGzc6H9+3b58OHTqk5ORkH1YJAOa77rrrtG/fPpdlX375pbp37y7p7ARq8fHxLn1kRUWFtm3bRh8JoNVzeww3kwJ5gEla4O94jzbbr371K40ePVrdu3dXUVGRnnzySYWEhOj2229XVFSUJk6cqIyMDHXs2FGRkZF6+OGHlZyc3GDfCACBYtq0abr22mv19NNP67bbbtP27dv10ksv6aWXXpIkBQUFKT09XVlZWbr00kvVs2dPPf7440pMTFRqaqpviweAZnIrcDMpEADU75///Kduv/12lZaWKjY2Vtdff70KCgoUGxsrSZo/f76Cg4OVlpbmMscFAAS6q6++WmvXrtXMmTM1Z84c9ezZUwsWLNCdd97pXOfRRx/VyZMndf/996usrEzXX3+91q9fr/DwcB9WDsDr3Lm8cN2lhdX3gqv6M7cCN5MCAUD9Vq5c2ejj4eHhysnJUU5OTgtV5AP+fKaEOx/wQGvhz//nfmDUqFEaNWpUg48HBQVpzpw5mjNnTgtWBQDmc3sM97mYFAgAAAAAgPo1K3AzKRAAAAAAAPVz65RyJgUCAAAAgLattNKuSrvDrTY2a6hiTKrHn7kVuJkUCAAAAG0ac0KgjSuttGtW/l5VO2rdamcJDVZWp3DFWKpMqsw/uRW4mRSoBXk0Ecq1ZlYEAAAAoI2rtDtU7ajVfUMvUWJ0064kUFRWpaUffa1Kh4XADQAA0KLcPWLIj8wA4HOJ0eHqHtPe12X4vWZNmgYAAAAAAOpH4AYAAAAAwAScUg4AAAAAbZS7M44XlbWtMdjNReAGAAAAgDaoOTOO23ZOkJo6AdqpSKn0WqmT+zW2dgRuAAAAAGiDPJlxXPr3NbU/5kh3UxC4AQAAAKANY8Zx8xC4AQC+U7pd2rZAalfh60oA73H3MmcAgIDFLOUAAAAAAJiAwA0AAAAAgAkI3AAAAAAAmIAx3IAfc/e6iNK/Z420WU2qCAAAAEBTEbjbOiYs8lul1eGa9fKLqq4NcaudJW6IslL7EboBAAAAHyNwA36q0mFRdW2I7uvxiRIjKpvUpui0TUtPXKNKu4PADfiCu7NTn4qUdK0ppQAAAN8jcAN+LjGiUt3dOQPhhHm1AAAAAGg6Jk0DAAAAAMAEBG4AAAAAAEzAKeVACyo6bTNlXQAAAAD+h8ANeMLNiZFs1eGyBA/V0m/6u9XOElwjW2i1W20AAAAA+AcCN9ACYixVykr6SJUOi1vtbKHVirFUmVQVAAAAADMRuIEWEmOpIjyj9eDyVgAAAM3GpGkAAAAAAJiAwA0AAAAAgAkI3AAAAAAAmIAx3ECgKd0ubVsgtato2vqejr11dz8AAABAC3D38rq20GrFmFQLgRsAAAAA3OHu5KKSNOxN79cBFzZrqCyhwR5dijfrWrtibFav10TgBgAAAAC0ejE2q7JS+6nyw+ea3KbotE1Lv+mvSruDwA0AAAAAQENibFbF+NGQRyZNAwAAAADABARuAAAAAABMEFCnlJdWh6vSYWnSuu7OXIf/cOd1lnitfcGd17wl/z7uvnekf88aaakyqSIAAADAPAETuEurwzXr06Gqrg1pchtLXLBs1oB5CVqEJ6+zdHbmP1totUlVoY4ttFqW4BqPZmY0++/TnPdOVtJHhG4AAC6EmbMBvxMwabPSYVF1bYju6/GJEiMqm9TGdsN9psxEF8g8eZ0ljlK2lBhLlbKSPvLLo8ievHecs0Y6LLx/END89awUeMaTs3ncxfsAAFqHgAncdRIjKtW9qbPSEbY95tbrjBYVY6ny63DKewf4D38+KwWeKa20e3Q2jyd4HwCA/wu4wA0AQGvhz2elwDOVdodHZ4J5gvcBAPg/AjcAAD7k72elwDOczQMAkAjcAAAAAOCfmAiv1WvbgdvdN/CpSEnXmlIKAAAAACCwBPu6AAAAAASuzMxMBQUFudx69+7tfLyqqkpTpkxRTEyMbDab0tLSVFJS4sOKAcB7CNwAAAAwVVJSko4cOeK8bd682fnYtGnT9Oabb2r16tXatGmTioqKNGbMGB9WCwDe07ZPKQcAAIDpQkNDFR8ff97y8vJyLVu2THl5eRo+fLgkKTc3V3369FFBQYGGDBnS0qUCgFcRuAEAAGCq/fv3KzExUeHh4UpOTlZ2dra6deumnTt36syZMxoxYoRz3d69e6tbt27aunVrg4HbbrfLbrc771dUMCM8ziqttKvS7nCrjc0aqhib1aSK0NYRuAEAAGCawYMHa/ny5brssst05MgRzZ49W0OHDtXevXtVXFwsi8Wi6OholzZxcXEqLi5ucJvZ2dmaPXu2yZWjtSmttGtW/l5VO2rdamcJDVZWaj9CN0xB4IaKTttMWRfwFnffd7ZKOx+aAOAnRo4c6fx3//79NXjwYHXv3l2rVq1SRESER9ucOXOmMjIynPcrKirUtWvXZteK1q3S7lC1o1b3Db1EidHhTWpTVFalpR99rUq7g+8OMAWBuw2zhVbLElyjpd/0d6udJbhGttBqk6oC/sPj9+jxvfxSDQB+Kjo6Wj/60Y904MAB3XzzzaqurlZZWZnLUe6SkpJ6x3zXsVqtslrp41G/xOhwdY9p7+syAEkE7jYtxlKlrKSPVOmwuNXOFlqtGEuVSVUB/+HJe7TotE1LT1zDL9U+whkz/o2/D/xBZWWlvvrqK919990aNGiQwsLCtHHjRqWlpUmS9u3bp0OHDik5OdnHlQJA8xG427gYSxXhGX7No/foCXNqQcM4Y8a/efz3iQuWzermV4UPRru3PgLer371K40ePVrdu3dXUVGRnnzySYWEhOj2229XVFSUJk6cqIyMDHXs2FGRkZF6+OGHlZyczAzl8mBIFQdFAL9D4AYANBtnzPg3j/8+N9zHmSJotn/+85+6/fbbVVpaqtjYWF1//fUqKChQbGysJGn+/PkKDg5WWlqa7Ha7UlJStGjRIh9X7VvN+REzK+kj8/tVT35YG/am9+sAWgECNwDAKzhjxr959PchbMMLVq5c2ejj4eHhysnJUU5OTgtV5P88HlL1TX9VOiz0xYAfCfZ1AQAQiObOnaugoCClp6c7l1VVVWnKlCmKiYmRzWZTWlqaSkpKfFckAMBvxViq1L1dRZNviRGVvi4ZQD0I3ADgZTt27NCLL76o/v1dTwWcNm2a3nzzTa1evVqbNm1SUVGRxowZ46MqAQAAYDYCNwB4UWVlpe688069/PLLuuiii5zLy8vLtWzZMj333HMaPny4Bg0apNzcXG3ZskUFBQU+rBgAAABmYQw3AHjRlClTdMstt2jEiBHKyspyLt+5c6fOnDmjESNGOJf17t1b3bp109atW+udjddut8tutzvvV1RUmFs80MpwmTMAgay00q7KU5FutbGFVivGpHrgmWYF7rlz52rmzJn65S9/qQULFkg6O0Zx+vTpWrlypctMk3Fxcd6oFwD81sqVK7Vr1y7t2LHjvMeKi4tlsVgUHR3tsjwuLk7FxcX1bi87O1uzZ882o1SgVeMydAACXWmlXbPy96q65Fq32lmCa5R1rZ0rTPgRjwN3Y2MU3377ba1evVpRUVGaOnWqxowZo3/84x/NLhYA/NXhw4f1y1/+Uu+9957Cw8O9ss2ZM2cqIyPDeb+iokJdu3b1yraB1ozL0AEIdJV2h6odtbqvxydNnhDPOVO93UHg9iMeBe5zxyiee8pk3RjFvLw8DR8+XJKUm5urPn36qKCgoN5TJgEgEOzcuVNHjx7VwIEDnctqamr04YcfauHChXrnnXdUXV2tsrIyl6PcJSUlio+Pr3ebVqtVVisfmEB9uAwdgLYgMaJS3dsxpKw18yhwM0YRgMT4yXP9+Mc/VmFhocuye++9V71799Zjjz2mrl27KiwsTBs3blRaWpokad++fTp06JCSk5N9UTIAAABM5nbgZowiAMZPnq9Dhw7q16+fy7L27dsrJibGuXzixInKyMhQx44dFRkZqYcffljJycmc/QMAQIArrQ5XZenJJq9fVMYZPIHCrcDNGEUAEuMnPTV//nwFBwcrLS3NZVJJAAAQuEqrwzXr06GqLvrMrXaW0OCAPVDhlm33S634tHq3AjdjFAHUYfzkhX3wwQcu98PDw5WTk6OcnBzfFATAfaXbpW0Lmv5l71SkJPdmFQYQ2CodFlXXhui+oZcoMbrpBy1t1lDFfMx3rdbOrcDNGEUAAAAAcF9idLi6x7T3dRloYW4FbsYoAgAAAADQNB5fh7shbWGMIjMzAwAAAAAupNmBuy2NUWRmZgAAAAD+zJ0ZzpkN3XxeP8IdyJiZGQAAAIA/ch4c/Ohrt9pZQoNlsxILzcIr6yZmZgYAoG1hKBmA1sB5cHDQRLfa2ayhirFx1SizELgBAADqwVAyAK1NjKVKMcyE7lcI3AAAAPVgKBkAoLkI3AAAAA1gKBngO6WVdlXaHU1enwnA/F9bHKJD4AYAAADgV0or7ZqVv1fVjlq32jEBmH9qy0N0eDcCAAAA8CuVdoeqHbW6b+glSowOb3I7JgDzT215iA6BGwAAAIBfSowOV3cmAQsIbXWIDoEbAADU74PRvq4AAIBWLdjXBQAAAAAAEIgI3AAAAAAAmIBTygEAAIC2iqEjgKkI3PBffAAAAAAAaMU4pRwAAAAAABMQuAEAAAAAMAGBGwAAAAAAEzCGGwAAAAAChSfzIA170/t1QBJHuAEAAAAAMAWBGwAAAAAAExC4AQAAAAAwAWO4AQAAgDaqtDpclQ6LW21sodWKsVSZVBEQWAjcAAAAQBtUWh2uWZ8OVXVtiFvtLME1ykr6iNAdSDyZaA1NQuAGAAAA2qBKh0XVtSG6r8cnSoyobFKbotM2Lf2mvyodFgI30AQEbgAAAKANS4yoVPd2Fb4uAwhITJoGAAAAAIAJCNwAAAAAAJiAwA0AAAAAgAkYww0AAAAAZtt2v8RY+TaHI9wAAAAAAJiAwA0AAIAWM3fuXAUFBSk9Pd25rKqqSlOmTFFMTIxsNpvS0tJUUlLiuyIBwEs4pdyPFZ22mbIuAACAL+zYsUMvvvii+vfv77J82rRpevvtt7V69WpFRUVp6tSpGjNmjP7xj3/4qFIA8A4Ctx+yhVbLElyjpd/0v/DK57AE18gWWm1SVQAAAJ6rrKzUnXfeqZdffllZWVnO5eXl5Vq2bJny8vI0fPhwSVJubq769OmjgoICDRky5Lxt2e122e125/2KCsbFAvBPBG4/FGOpUlbSR6p0WNxqZwutVoylyqSqAAAAPDdlyhTdcsstGjFihEvg3rlzp86cOaMRI0Y4l/Xu3VvdunXT1q1b6w3c2dnZmj17dovUDQDNQeD2UzGWKsIzAAAICCtXrtSuXbu0Y8eO8x4rLi6WxWJRdHS0y/K4uDgVFxfXu72ZM2cqIyPDeb+iokJdu3b1as0A4A0EbgAAAJjm8OHD+uUvf6n33ntP4eHhXtmm1WqV1Wr1yrYAwEzMUg4AAADT7Ny5U0ePHtXAgQMVGhqq0NBQbdq0SS+88IJCQ0MVFxen6upqlZWVubQrKSlRfHy8b4oGAC/hCDcAAABM8+Mf/1iFhYUuy+6991717t1bjz32mLp27aqwsDBt3LhRaWlpkqR9+/bp0KFDSk5O9kXJAOA1BG4AAACYpkOHDurXr5/Lsvbt2ysmJsa5fOLEicrIyFDHjh0VGRmphx9+WMnJyfVOmAYArQmBGwAAAD41f/58BQcHKy0tTXa7XSkpKVq0aJGvywKAZiNwAwAAoEV98MEHLvfDw8OVk5OjnJwc3xQEACZh0jQAAAAAAExA4AYAAAAAwAScUg4AAAAAbiitDlelw9KkdYtO20yuBv6MwA0AAAAATVRaadesT4equjakyW0swTWyhVabWBX8FYEbAAAAAJqo0u5QdW2I7uvxiRIjKpvUxhZarRhLlcmVwR8RuAEAAADATYkRlerersLXZcDPMWkaAAAAAAAmIHADAAAAAGACAjcAAAAAACZgDDcAAAAA/1O6Xdq2QHJnnPSwN00rB/AER7gBAAAAADABgRsAvGDx4sXq37+/IiMjFRkZqeTkZK1bt875eFVVlaZMmaKYmBjZbDalpaWppKTEhxUDAADAbARuAPCCLl26aO7cudq5c6c+/vhjDR8+XLfeeqs+/fRTSdK0adP05ptvavXq1dq0aZOKioo0ZswYH1cNAAAAMzGGGwC8YPTo0S73n3rqKS1evFgFBQXq0qWLli1bpry8PA0fPlySlJubqz59+qigoEBDhgypd5t2u112u915v6KCa30CAGCGorIqU9YF3Arcixcv1uLFi/XNN99IkpKSkvTEE09o5MiRks6eMjl9+nStXLlSdrtdKSkpWrRokeLi4rxeOAD4q5qaGq1evVonT55UcnKydu7cqTNnzmjEiBHOdXr37q1u3bpp69atDQbu7OxszZ49u6XKBgCg9ftg9IXXOYetOlyW0Bla+tHXbrWzBNfIFlrtVhu0TW4F7rpTJi+99FIZhqFXXnlFt956q3bv3q2kpCRNmzZNb7/9tlavXq2oqChNnTpVY8aM0T/+8Q+z6gcAv1FYWKjk5GRVVVXJZrNp7dq16tu3r/bs2SOLxaLo6GiX9ePi4lRcXNzg9mbOnKmMjAzn/YqKCnXt2tWs8gEAaHNiLFXKSu2nSrvDrXa2nc8oxsKRblyYW4GbUyYBoGGXXXaZ9uzZo/Lycr322msaP368Nm3a5PH2rFarrFarFysEAAA/FGOzKsbm5uctYRtN5PGkaTU1NVq5cmWTT5lsSHZ2tqKiopw3jt4AaK0sFot69eqlQYMGKTs7WwMGDNDzzz+v+Ph4VVdXq6yszGX9kpISxcfH+6ZYAAAAmM7twF1YWCibzSar1aoHH3zQecpkcXGxx6dMlpeXO2+HDx92+0kAgD+qra2V3W7XoEGDFBYWpo0bNzof27dvnw4dOqTk5GQfVggAAAAzuT1LOadMAsD5Zs6cqZEjR6pbt246ceKE8vLy9MEHH+idd95RVFSUJk6cqIyMDHXs2FGRkZF6+OGHlZyc3OBwGwAAPFF02mbKugA843bgrjtlUpIGDRqkHTt26Pnnn9fPf/5z5ymT5x7l5pRJAG3B0aNHdc899+jIkSOKiopS//799c477+jmm2+WJM2fP1/BwcFKS0tzuYoDAADeYAutliW4Rku/6e9WuxabbdvN2cN1KlLStaaUArSkZl+Hu75TJtPS0iRxyiSAtmPZsmWNPh4eHq6cnBzl5OS0UEUAgLYkxlKlrKSPVOmwuNXOFlrNbNuAidwK3JwyCQAAAPinGEsV4RnwM24Fbk6ZBAAAAACgadwK3JwyCQAAAMDdCdc4dR1tVbPHcAMAAABoG5ozOVtW0keEbrQ5BG4AAAAATeLJ5GxFp21a+k1/VTosBG60OQRuAAAAAE3G5GxA0wX7ugAAAAAAAAIRgRsAAAAAABNwSjkAAACAtuuD0b6uAAGMI9wAAAAAAJiAwA0AAAAAgAkI3AAAAAAAmIDADQAAAACACQjcAAAAAACYgMANAAAAAIAJCNwAAAAAAJiAwA0AAAAAgAkI3AAAAAAAmCDU1wUAAAAACHxFp22mrAv4MwI3AAAAANPYQqtlCa7R0m/6u9XOElwjW2i1SVUBLYPADQAAAMA0MZYqZSV9pEqHxa12ttBqxViqTKoKaBmM4QYAAIBpFi9erP79+ysyMlKRkZFKTk7WunXrnI9XVVVpypQpiomJkc1mU1pamkpKSnxYMcwQY6lS93YVbt0I2wgEBG4AAACYpkuXLpo7d6527typjz/+WMOHD9ett96qTz/9VJI0bdo0vfnmm1q9erU2bdqkoqIijRkzxsdVA4B3cEo5AAAATDN69GiX+0899ZQWL16sgoICdenSRcuWLVNeXp6GDx8uScrNzVWfPn1UUFCgIUOG+KJkAPAajnADAACgRdTU1GjlypU6efKkkpOTtXPnTp05c0YjRoxwrtO7d29169ZNW7dubXA7drtdFRUVLjcA8EcEbgAAAJiqsLBQNptNVqtVDz74oNauXau+ffuquLhYFotF0dHRLuvHxcWpuLi4we1lZ2crKirKeevatavJzwAAPEPgBgAAgKkuu+wy7dmzR9u2bdNDDz2k8ePH67PPPvN4ezNnzlR5ebnzdvjwYS9WCwDewxhuAAAAmMpisahXr16SpEGDBmnHjh16/vnn9fOf/1zV1dUqKytzOcpdUlKi+Pj4BrdntVpltVrNLhsAmo0j3AAAAGhRtbW1stvtGjRokMLCwrRx40bnY/v27dOhQ4eUnJzswwoBwDs4wg0AAADTzJw5UyNHjlS3bt104sQJ5eXl6YMPPtA777yjqKgoTZw4URkZGerYsaMiIyP18MMPKzk5mRnKAQQEAjcAAABMc/ToUd1zzz06cuSIoqKi1L9/f73zzju6+eabJUnz589XcHCw0tLSZLfblZKSokWLFvm4agDwDgI3AAAATLNs2bJGHw8PD1dOTo5ycnJaqCIAaDmM4QYAAAAAwAQEbgAAAAAATEDgBgAAAADABARuAAAAAABMQOAGAAAAAMAEBG4AAAAAAExA4AYAAAAAwAQEbgAAAAAATEDgBgAAAADABARuAAAAAABMQOAGAAAAAMAEBG4AAAAAAExA4AYAAAAAwAQEbgAAAAAATEDgBgAAAADABARuAAAAAABMQOAGAAAAAMAEBG4AAAAAAExA4AYAAAAAwAQEbgAAAAAATEDgBgAAAADABARuAAAAAABMQOAGAC/Izs7W1VdfrQ4dOqhTp05KTU3Vvn37XNapqqrSlClTFBMTI5vNprS0NJWUlPioYgAAAJiNwA0AXrBp0yZNmTJFBQUFeu+993TmzBn95Cc/0cmTJ53rTJs2TW+++aZWr16tTZs2qaioSGPGjPFh1QAAADCTW4GbIzgAUL/169drwoQJSkpK0oABA7R8+XIdOnRIO3fulCSVl5dr2bJleu655zR8+HANGjRIubm52rJliwoKCnxcPQAAAMzgVuDmCA4ANE15ebkkqWPHjpKknTt36syZMxoxYoRznd69e6tbt27aunVrvduw2+2qqKhwuQEAAKD1CHVn5fXr17vcX758uTp16qSdO3fqhhtucB7BycvL0/DhwyVJubm56tOnjwoKCjRkyJDztmm322W32533+UIJoLWrra1Venq6rrvuOvXr10+SVFxcLIvFoujoaJd14+LiVFxcXO92srOzNXv2bLPLBQAAgEmaNYbbG0dwsrOzFRUV5bx17dq1OSUBgM9NmTJFe/fu1cqVK5u1nZkzZ6q8vNx5O3z4sJcqBAAAQEvwOHB76wgOXygBBJKpU6fqrbfe0vvvv68uXbo4l8fHx6u6ulplZWUu65eUlCg+Pr7ebVmtVkVGRrrcAAAA0Hp4HLi9dQSHL5QAAoFhGJo6darWrl2rv//97+rZs6fL44MGDVJYWJg2btzoXLZv3z4dOnRIycnJLV0uAAAAWoBbY7jr1B3B+fDDDxs8gnPuUe7GjuAAQCCYMmWK8vLy9MYbb6hDhw7Os3qioqIUERGhqKgoTZw4URkZGerYsaMiIyP18MMPKzk5ud75LQAAAND6uXWEmyM4AFC/xYsXq7y8XMOGDVNCQoLz9pe//MW5zvz58zVq1CilpaXphhtuUHx8vNasWePDqgEAAGAmt45wcwQHAOpnGMYF1wkPD1dOTo5ycnJaoCIAAAD4mluBe/HixZKkYcOGuSzPzc3VhAkTJJ09ghMcHKy0tDTZ7XalpKRo0aJFXikWAAAAAIDWwq3AzREcAAAAAACaplnX4QYAAAAAAPUjcAMAAAAAYAICNwAAAAAAJiBwAwAAAABgAgI3AAAAAAAmIHADAAAAAGACAjcAAAAAACYgcAMAAAAAYAICNwAAAAAAJiBwAwAAwDTZ2dm6+uqr1aFDB3Xq1Empqanat2+fyzpVVVWaMmWKYmJiZLPZlJaWppKSEh9VDADeQ+AGAACAaTZt2qQpU6aooKBA7733ns6cOaOf/OQnOnnypHOdadOm6c0339Tq1au1adMmFRUVacyYMT6sGgC8I9TXBQAAACBwrV+/3uX+8uXL1alTJ+3cuVM33HCDysvLtWzZMuXl5Wn48OGSpNzcXPXp00cFBQUaMmTIedu02+2y2+3O+xUVFeY+CQDwEEe4AQAA0GLKy8slSR07dpQk7dy5U2fOnNGIESOc6/Tu3VvdunXT1q1b691Gdna2oqKinLeuXbuaXzgAeIDADQAAgBZRW1ur9PR0XXfdderXr58kqbi4WBaLRdHR0S7rxsXFqbi4uN7tzJw5U+Xl5c7b4cOHzS4dADzCKeUAAABoEVOmTNHevXu1efPmZm3HarXKarV6qSoAMA9HuAEAAGC6qVOn6q233tL777+vLl26OJfHx8erurpaZWVlLuuXlJQoPj6+hasEAO8icAMAAMA0hmFo6tSpWrt2rf7+97+rZ8+eLo8PGjRIYWFh2rhxo3PZvn37dOjQISUnJ7d0uQDgVZxSDgAAANNMmTJFeXl5euONN9ShQwfnuOyoqChFREQoKipKEydOVEZGhjp27KjIyEg9/PDDSk5OrneGcgBoTQjcAAAAMM3ixYslScOGDXNZnpubqwkTJkiS5s+fr+DgYKWlpclutyslJUWLFi1q4UoBwPsI3AAAADCNYRgXXCc8PFw5OTnKyclpgYoAoOUwhhsAAAAAABMQuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwAYEbAAAAAAATELgBAAAAADABgRsAAAAAABMQuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwAYEbAAAAAAATELgBAAAAADABgRsAAAAAABMQuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwAYEbAAAAAAATELgBAAAAADABgRsAAAAAABMQuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwAYEbAAAAAAATELgBAAAAADABgRsAAAAAABMQuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwAYEbAAAAAAATELgBAAAAADABgRsAAAAAABMQuAEAAAAAMAGBGwC84MMPP9To0aOVmJiooKAg5efnuzxuGIaeeOIJJSQkKCIiQiNGjND+/ft9UywAAABahNuBmy+VAHC+kydPasCAAcrJyan38WeeeUYvvPCClixZom3btql9+/ZKSUlRVVVVC1cKAACAluJ24OZLJQCcb+TIkcrKytL/+3//77zHDMPQggULNGvWLN16663q37+/VqxYoaKiovN+tAQAAEDgCHW3wciRIzVy5Mh6H/vhl0pJWrFiheLi4pSfn69x48Y1r1oAaIUOHjyo4uJijRgxwrksKipKgwcP1tatWxvsG+12u+x2u/N+RUWF6bUCAADAe7w6hvtCXyrrY7fbVVFR4XIDgEBSXFwsSYqLi3NZHhcX53ysPtnZ2YqKinLeunbtamqdAAAA8C6vBm5PvlTyhRIA6jdz5kyVl5c7b4cPH/Z1SQAAAHCDz2cp5wslgEAXHx8vSSopKXFZXlJS4nysPlarVZGRkS43AAAAtB5eDdyefKnkCyWAQNezZ0/Fx8dr48aNzmUVFRXatm2bkpOTfVgZAAAAzOTVwM2XSgBtVWVlpfbs2aM9e/ZIOjunxZ49e3To0CEFBQUpPT1dWVlZ+utf/6rCwkLdc889SkxMVGpqqk/rBgAAgHncnqW8srJSBw4ccN6v+1LZsWNHdevWzfml8tJLL1XPnj31+OOP86USQMD7+OOPddNNNznvZ2RkSJLGjx+v5cuX69FHH9XJkyd1//33q6ysTNdff73Wr1+v8PBwX5UMAAAAk7kduPlSCQDnGzZsmAzDaPDxoKAgzZkzR3PmzGnBqgAAAOBLbp9SXvel8oe35cuXS/rPl8ri4mJVVVVpw4YN+tGPfuTtugEAANAKfPjhhxo9erQSExMVFBSk/Px8l8cNw9ATTzyhhIQERUREaMSIEdq/f79vigUAL/P5LOUAAAAIXCdPntSAAQOUk5NT7+PPPPOMXnjhBS1ZskTbtm1T+/btlZKSoqqqqhauFAC8z+1TygEAAICmGjlypEaOHFnvY4ZhaMGCBZo1a5ZuvfVWSdKKFSsUFxen/Px8jRs3riVLBQCv4wg3AAAAfOLgwYMqLi7WiBEjnMuioqI0ePBgbd26tcF2drtdFRUVLjcA8EcEbgAAAPhEcXGxJCkuLs5leVxcnPOx+mRnZysqKsp569q1q6l1AoCnCNwAAABoVWbOnKny8nLn7fDhw74uCQDqReAGAACAT8THx0uSSkpKXJaXlJQ4H6uP1WpVZGSkyw0A/BGBGwAAAD7Rs2dPxcfHa+PGjc5lFRUV2rZtm5KTk31YGQB4B7OUAwAAwDSVlZU6cOCA8/7Bgwe1Z88edezYUd26dVN6erqysrJ06aWXqmfPnnr88ceVmJio1NRU3xUNAF5C4AYAAIBpPv74Y910003O+xkZGZKk8ePHa/ny5Xr00Ud18uRJ3X///SorK9P111+v9evXKzw83FclA4DXELgBAABgmmHDhskwjAYfDwoK0pw5czRnzpwWrAoAWgZjuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwAYEbAAAAAAATELgBAAAAADABgRsAAAAAABMQuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwAYEbAAAAAAATELgBAAAAADABgRsAAAAAABMQuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwAYEbAAAAAAATELgBAAAAADABgRsAAAAAABMQuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwAYEbAAAAAAATELgBAAAAADABgRsAAAAAABMQuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwAYEbAAAAAAATELgBAAAAADABgRsAAAAAABMQuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwAYEbAAAAAAATELgBAAAAADABgRsAAAAAABMQuAEAAAAAMAGBGwAAAAAAExC4AQAAAAAwAYEbAAAAAAATELgBAAAAADABgRsAAAAAABMQuAEAAAAAMAGBGwAAAAAAE5gWuHNyctSjRw+Fh4dr8ODB2r59u1m7AoBWhf4RAM5H3wggEJkSuP/yl78oIyNDTz75pHbt2qUBAwYoJSVFR48eNWN3ANBq0D8CwPnoGwEEqlAzNvrcc89p0qRJuvfeeyVJS5Ys0dtvv60//OEPmjFjhsu6drtddrvdeb+8vFySVFFR4dY+T5w6o+qq0zpx6owqjDPNfAYAWqsTp8+o+nSlTpyoUEVYTZPa1PU3hmGYWZok+kcAvuPP/aM7faPknf7xxImT9I0AzvaNVafd6hslN/pHw8vsdrsREhJirF271mX5PffcY/zf//t/z1v/ySefNCRx48aNm89vhw8f9naXSP/IjRu3gLiZ2T+62zfSP3Ljxs2fbhfqH71+hPvYsWOqqalRXFycy/K4uDh98cUX560/c+ZMZWRkOO/X1tbq+PHjiomJUVBQkLfLa9UqKirUtWtXHT58WJGRkb4ux+/w+jSO16dhhmHoxIkTSkxMNHU/9I/m4f3dOF6fxvH6NKwl+kd3+0ap+f0jf3Negzq8DrwGddx9HZraP5pySrk7rFarrFary7Lo6GjfFNNKREZGtun/DBfC69M4Xp/6RUVF+bqE89A/uo/3d+N4fRrH61O/QO4f+ZvzGtThdeA1qOPO69CU/tHrk6ZdfPHFCgkJUUlJicvykpISxcfHe3t3ANBq0D8CwPnoGwEEMq8HbovFokGDBmnjxo3OZbW1tdq4caOSk5O9vTsAaDXoHwHgfPSNAAKZKaeUZ2RkaPz48brqqqt0zTXXaMGCBTp58qRz5kl4xmq16sknnzzvFCqcxevTOF4f/0D/aA7e343j9Wkcr4/vtXTfyN+c16AOrwOvQR2zXocgwzDnOg8LFy7Us88+q+LiYl1xxRV64YUXNHjwYDN2BQCtCv0jAJyPvhFAIDItcAMAAAAA0JZ5fQw3AAAAAAAgcAMAAAAAYAoCNwAAAAAAJiBwAwAAAABgAgK3nzt+/LjuvPNORUZGKjo6WhMnTlRlZWWjbYYNG6agoCCX24MPPthCFZsrJydHPXr0UHh4uAYPHqzt27c3uv7q1avVu3dvhYeH6/LLL9ff/va3FqrUN9x5fZYvX37e+yQ8PLwFqwWah/7RFf1j4+gf8dRTT+naa69Vu3btFB0d3aQ2hmHoiSeeUEJCgiIiIjRixAjt37/f3EJN1Fb7TfpH+sAPP/xQo0ePVmJiooKCgpSfn3/BNh988IEGDhwoq9WqXr16afny5R7tm8Dt5+688059+umneu+99/TWW2/pww8/1P3333/BdpMmTdKRI0ect2eeeaYFqjXXX/7yF2VkZOjJJ5/Url27NGDAAKWkpOjo0aP1rr9lyxbdfvvtmjhxonbv3q3U1FSlpqZq7969LVx5y3D39ZGkyMhIl/fJt99+24IVA81D//gf9I+No3+EJFVXV2vs2LF66KGHmtzmmWee0QsvvKAlS5Zo27Ztat++vVJSUlRVVWVipeZpi/0m/SN9oCSdPHlSAwYMUE5OTpPWP3jwoG655RbddNNN2rNnj9LT03XffffpnXfecX/nBvzWZ599ZkgyduzY4Vy2bt06IygoyPjuu+8abHfjjTcav/zlL1ugwpZ1zTXXGFOmTHHer6mpMRITE43s7Ox617/tttuMW265xWXZ4MGDjQceeMDUOn3F3dcnNzfXiIqKaqHqAO+if3RF/9g4+kecq6l/39raWiM+Pt549tlnncvKysoMq9VqvPrqqyZWaI622m/SP9IH/pAkY+3atY2u8+ijjxpJSUkuy37+858bKSkpbu+PI9x+bOvWrYqOjtZVV13lXDZixAgFBwdr27Ztjbb985//rIsvvlj9+vXTzJkzderUKbPLNVV1dbV27typESNGOJcFBwdrxIgR2rp1a71ttm7d6rK+JKWkpDS4fmvmyesjSZWVlerevbu6du2qW2+9VZ9++mlLlAs0G/3jf9A/No7+EZ46ePCgiouLXd47UVFRGjx4cKv8v9IW+036R/pAT3nzfRDqraLgfcXFxerUqZPLstDQUHXs2FHFxcUNtrvjjjvUvXt3JSYm6pNPPtFjjz2mffv2ac2aNWaXbJpjx46ppqZGcXFxLsvj4uL0xRdf1NumuLi43vUbe+1aK09en8suu0x/+MMf1L9/f5WXl+t//ud/dO211+rTTz9Vly5dWqJswGP0j/9B/9g4+kd4qu7/Q6D8X2mL/Sb9I32gpxp6H1RUVOj06dOKiIho8rYI3D4wY8YMzZs3r9F1Pv/8c4+3f+5YnMsvv1wJCQn68Y9/rK+++kr/9V//5fF2EViSk5OVnJzsvH/ttdeqT58+evHFF/Xb3/7Wh5WhLaN/hD+gf2w9mtpn9O7du4Uqann0m/A2+kDvInD7wPTp0zVhwoRG17nkkksUHx9/3mQGDodDx48fV3x8fJP3N3jwYEnSgQMHWm3HePHFFyskJEQlJSUuy0tKShp8LeLj491avzXz5PX5obCwMF155ZU6cOCAGSUCTUL/6D76x8bRPwa2pvYZnqh7f5SUlCghIcG5vKSkRFdccYVH2zQD/WbD6B/pAz3V0PsgMjLSraPbErOU+0RsbKx69+7d6M1isSg5OVllZWXauXOns+3f//531dbWOju7ptizZ48kuXxYtDYWi0WDBg3Sxo0bnctqa2u1ceNGl1/gzpWcnOyyviS99957Da7fmnny+vxQTU2NCgsLW/X7BK0f/aP76B8bR/8Y2JraZ3iiZ8+eio+Pd3nvVFRUaNu2bX71f4V+s2H0j/SBnvLq+8DtadbQov7P//k/xpVXXmls27bN2Lx5s3HppZcat99+u/Pxf/7zn8Zll11mbNu2zTAMwzhw4IAxZ84c4+OPPzYOHjxovPHGG8Yll1xi3HDDDb56Cl6zcuVKw2q1GsuXLzc+++wz4/777zeio6ON4uJiwzAM4+677zZmzJjhXP8f//iHERoaavzP//yP8fnnnxtPPvmkERYWZhQWFvrqKZjK3ddn9uzZxjvvvGN89dVXxs6dO41x48YZ4eHhxqeffuqrpwC4hf7xP+gfG0f/CMMwjG+//dbYvXu3MXv2bMNmsxm7d+82du/ebZw4ccK5zmWXXWasWbPGeX/u3LlGdHS08cYbbxiffPKJceuttxo9e/Y0Tp8+7Yun0Gxtsd+kf6QPNAzDOHHihPP/vCTjueeeM3bv3m18++23hmEYxowZM4y7777buf7XX39ttGvXzvj1r39tfP7550ZOTo4REhJirF+/3u19E7j9XGlpqXH77bcbNpvNiIyMNO69916XD4aDBw8akoz333/fMAzDOHTokHHDDTcYHTt2NKxWq9GrVy/j17/+tVFeXu6jZ+Bdv//9741u3boZFovFuOaaa4yCggLnYzfeeKMxfvx4l/VXrVpl/OhHPzIsFouRlJRkvP322y1cccty5/VJT093rhsXF2f89Kc/NXbt2uWDqgHP0D+6on9sHP0jxo8fb0g671bXRxjG2csF5ebmOu/X1tYajz/+uBEXF2dYrVbjxz/+sbFv376WL95L2mq/Sf9IH/j+++/X+/+/7nmPHz/euPHGG89rc8UVVxgWi8W45JJLXPoGdwQZhmF4eqgdAAAAAADUjzHcAAAAAACYgMANAAAAAIAJCNwAAAAAAJiAwA0AAAAAgAkI3AAAAAAAmIDADQAAAACACQjcAAAAAACYgMANAAAAAIAJCNwAAAAAAJiAwA0AAAAAgAkI3AAAAAAAmOD/A+XjAu1l2CfWAAAAAElFTkSuQmCC\n" 359 | }, 360 | "metadata": {} 361 | } 362 | ] 363 | }, 364 | { 365 | "cell_type": "markdown", 366 | "source": [ 367 | "### Explicit Checks for Available Devices and Set to Cuda\n", 368 | "\n", 369 | "No need to run these cells\n", 370 | "\n", 371 | "\n", 372 | "Check below to find the cells to run" 373 | ], 374 | "metadata": { 375 | "id": "QA6dStOjGCcy" 376 | } 377 | }, 378 | { 379 | "cell_type": "code", 380 | "source": [ 381 | "def to_device(data, device):\n", 382 | " \"\"\"Move tensor(s) to chosen device\"\"\"\n", 383 | " if isinstance(data, (list, tuple)):\n", 384 | " return [to_device(x, device) for x in data]\n", 385 | " if isinstance(data, dict):\n", 386 | " return {k: to_device(v, device) for k, v in data.items()}\n", 387 | " return data.to(device, non_blocking=True)\n", 388 | "\n", 389 | "# Select the device\n", 390 | "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n", 391 | "print ('check available device: ', device)\n", 392 | "\n", 393 | "# Move the entire dataset to the GPU\n", 394 | "dataset = to_device(dataset, device)" 395 | ], 396 | "metadata": { 397 | "id": "VquYoTjV6nDJ" 398 | }, 399 | "execution_count": null, 400 | "outputs": [] 401 | }, 402 | { 403 | "cell_type": "code", 404 | "source": [ 405 | "model = KAN(width=[2, 1, 1], grid=10, k=3)\n", 406 | "model = model.to(device)" 407 | ], 408 | "metadata": { 409 | "id": "QfH8EMATjUBY" 410 | }, 411 | "execution_count": null, 412 | "outputs": [] 413 | }, 414 | { 415 | "cell_type": "code", 416 | "source": [ 417 | "def check_device(model, dataset):\n", 418 | " # Check the model's device\n", 419 | " print(\"Model is on:\", next(model.parameters()).device)\n", 420 | "\n", 421 | " # Check the dataset's device\n", 422 | " for name, tensor in dataset.items():\n", 423 | " print(f\"{name} is on {tensor.device}\")\n", 424 | "\n", 425 | "# Usage\n", 426 | "check_device(model, dataset)" 427 | ], 428 | "metadata": { 429 | "colab": { 430 | "base_uri": "https://localhost:8080/" 431 | }, 432 | "id": "aKRVfj99756C", 433 | "outputId": "4d4e4060-65da-43a2-b488-b0922f5e09b2" 434 | }, 435 | "execution_count": null, 436 | "outputs": [ 437 | { 438 | "output_type": "stream", 439 | "name": "stdout", 440 | "text": [ 441 | "Model is on: cuda:0\n", 442 | "train_input is on cuda:0\n", 443 | "test_input is on cuda:0\n", 444 | "train_label is on cuda:0\n", 445 | "test_label is on cuda:0\n" 446 | ] 447 | } 448 | ] 449 | }, 450 | { 451 | "cell_type": "markdown", 452 | "source": [ 453 | "### Run from here" 454 | ], 455 | "metadata": { 456 | "id": "jkxd5GPjF-ay" 457 | } 458 | }, 459 | { 460 | "cell_type": "code", 461 | "source": [ 462 | "### let's try this explicitly anyway\n", 463 | "\n", 464 | "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n", 465 | "model = KAN(width=[2, 1, 1], grid=10, k=3, seed=0, device=device)\n", 466 | "dataset = create_dataset(f, n_var=2,device=device)\n" 467 | ], 468 | "metadata": { 469 | "id": "_hN7K117_DmJ" 470 | }, 471 | "execution_count": null, 472 | "outputs": [] 473 | }, 474 | { 475 | "cell_type": "code", 476 | "source": [ 477 | "model.train(dataset, opt=\"LBFGS\", steps=25, device=device)\n", 478 | "\n", 479 | "model.plot(beta=10)" 480 | ], 481 | "metadata": { 482 | "colab": { 483 | "base_uri": "https://localhost:8080/", 484 | "height": 362 485 | }, 486 | "id": "KuK803Wf2gEe", 487 | "outputId": "41e4567a-c3cc-413a-a816-234e95f44885" 488 | }, 489 | "execution_count": null, 490 | "outputs": [ 491 | { 492 | "output_type": "stream", 493 | "name": "stderr", 494 | "text": [ 495 | "train loss: 1.48e-02 | test loss: 1.58e-02 | reg: 2.78e+00 : 100%|██| 25/25 [00:08<00:00, 2.86it/s]\n" 496 | ] 497 | }, 498 | { 499 | "output_type": "display_data", 500 | "data": { 501 | "text/plain": [ 502 | "
" 503 | ], 504 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZcAAAFICAYAAACcDrP3AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAqU0lEQVR4nO3daWwc530G8Gd2l8cuueRyuctzeYmiDsqiSB1UbclyZB10Zdcu6iRO7DRCjKJFm0hBGxQIUiBfCgcp0LRS3S/9EANSWiQOKqeVS1mMI0u2nOhkdPCQKN43KV7La0kud+ftB3kmHImUSHHI2eP5AYI9s7vS3/K+fOY95h1JCCFARESkI5PRBRARUeRhuBARke4YLkREpDuGCxER6Y7hQkREumO4EBGR7hguRESkO4YLERHpjuFCRES6Y7gQEZHuGC5ERKQ7hgsREemO4UJERLpjuBARke4YLkREpDuL0QUQhQMhBIaGhjAxMYHExESkpqZCkiSjyyIKWey5ED2G1+vF8ePHUVRUBLfbjYKCArjdbhQVFeH48ePwer1Gl0gUkiQ+iZJoflVVVXj99dfh8/kAPOi9KJRei81mw6lTp1BRUWFIjUShiuFCNI+qqiq8/PLLEEJAluUF32cymSBJEiorKxkwRHMwXIge4vV64fF4MDU19dhgUZhMJlitVnR1dcHhcKx8gURhgHMuRA85ceIEfD7fooIFAGRZhs/nw8mTJ1e4MqLwwZ4L0RxCCBQVFaGlpQVLaRqSJGHNmjVobGzkKjIiMFyINAYHB+F2u5f1+dTUVB0rIgpPHBYjmmNiYmJZnx8fH9epEqLwxnAhmiMxMXFZn7fb7TpVQhTeGC5Ec6SmpqKwsHDJ8yaSJKGwsBBOp3OFKiMKLwwXojkkScKRI0ee6rNHjx7lZD7RFzihT/QQ3udCtHzsuRA9xOFw4NSpU5AkCSbT45uIcof+Bx98wGAhmoPhQjSPiooKVFZWwmq1QpKkR4a7lHNWqxVnzpzBwYMHDaqUKDQxXIgWUFFRga6uLhw7dgxr1qzRvLZmzRocO3YM3d3dDBaieXDOhWgRhBA4f/489u3bh3PnzmHv3r2cvCd6DPZciBZBkiR1TsXhcDBYiJ6A4UJERLpjuBARke4YLkREpDuGCxER6Y7hQkREumO4EBGR7hguRESkO4YLERHpjuFCRES6Y7gQEZHuGC5ERKQ7hgsREemO4UJERLrjlvtEiySEgBBi3oeHEZEWey5ES8BQIVoci9EFEIULBgvR4rHnQkREumPPhQw3OTmJ5uZmcPpPHyaTCYWFhbDZbEaXQlGM4UKGq6mpwcsvv4zk5GQOPS2TEAJjY2OoqqrCtm3bjC6HohjDhQwnyzLS0tJQWVmJpKQko8sJK5OTk+jp6UFhYSFMJhO8Xi8OHToEWZaNLo2iHMOFQoLJZEJqaiqSk5ONLiVsCCFw6tQp/PCHP8TXvvY1HDlyBCkpKTCbzUaXRsQJfaJwNTAwgHfffVf95/79+1FfX290WUQAGC5EYSkYDOKnP/0p7ty5o55bu3YtCgoKDKyK6A84LEYURoQQmJ6exvHjx/GjH/1IXWEXHx+P733ve1whRiGD4UIUAoQQCAQCuH//Ppqbm9HS0oLR0VEkJCQgLy8PhYWFSEtLw+TkJH7wgx/gZz/7GQKBgPr5gwcPYu/evfD5fAb+VxD9AcOFyCBCCMzMzKCpqQlVVVX46KOPUFtbi+HhYU1wmM1mJCYmIjMzE7Ozs2hpadHcE1RaWop33nkHcXFxDBcKGQwXolWiBMLo6Cjq6upw7tw5/PrXv8bt27cxMTGx4OeCwSBGR0cxOjqqOW8ymfAnf/InOH78OHJycniPEIUUhgvRChNCIBgM4vbt2zh58iTOnj2LtrY2+P3+x35OkqQFdy2Ij4/HkSNH8A//8A+w2+0MFgo5DBeiFSSEQF9fH9555x3853/+J8bGxhZ8r9Vqxbp161BeXo7S0lK43W6Mjo6ivr4et27dQmdnJ6anp+F0OvHd734Xb731FiwWC4OFQhLDhWiFCCFQX1+Pt99+G9euXZv3PQkJCdi0aRP++I//GIcOHUJxcbG64mtuzyUYDGJqagqBQABxcXGwWq0MFQppDBeiFSCEQFNTE958803U1NRoXnM6ndi1axcOHjyIPXv2YO3atYiPj583LJRzFosFdrt9VWon0gPDhWgFjI6O4jvf+Y4mWGw2G95++218+9vfRmFhIcxmM3sfFLEYLkQ6k2UZ//7v/45z586p55KTk3H8+HG8+eabDBWKCgwXIh0JIVBXV4d3331X3Zk4NjYWP/rRj/CNb3wDJhN3XKLowG86kY6CwSD+9V//FQMDA+q5L3/5y/jWt77FYKGowm87kU6EEKipqcGvfvUr9VxaWhp+8IMfIC4uzsDKiFYfw4VIJ0IIvPfee5o76Q8fPowNGzZwjoWiDsOFSCddXV2aXovb7cbbb7/NYKGoxHAh0oEQAqdPn0Zvb6967tVXX8XatWsZLhSVGC5EOpiamsL777+v3lFvtVrx53/+55zEp6jFbz7RMgkhcPv2bdy4cUM9V1ZWhu3bt7PXQlGL4UKkg1/96lfqs1QkScJXvvIVWK1Wg6siMg7DhWiZxsbGcObMGfXY6XTi0KFD7LVQVGO4EC2DEALV1dVobGxUzz333HPIz883riiiEMBwIVqmyspK9cFfkiThtddeg8XCnZUoujFciJZhbGwMH3/8sXrscrmwd+9eDolR1GO4ED0lIQRqa2vR1NSkntu5cyc8Ho+BVRGFBoYL0TL8+te/xvT0NIAHQ2KHDh3ikBgRGC5ET216elozJJacnIwvfelLHBIjAsOF6Km1tLSgvr5ePd68eTNXiRF9geFC9BSEEPj0008xNjamntu/fz+31if6AsOF6CkEg0H85je/UY/j4+Oxb98+DokRfYHhQvQUBgYGcO3aNfU4Pz8fxcXFBlZEFFoYLkRLJITA73//e/T396vndu/ejaSkJAOrIgotDBeip/DJJ58gEAgAAEwmEw4cOMAhMaI5GC5ESzQ1NYWLFy+qx6mpqdixYwfDhWgOhgvRErW1teHevXvq8ebNm5GVlWVgRUShh+FCtARCCPzud7/D+Pi4eu6FF15ATEyMgVURhR6GC9ESyLKM8+fPq48zjo2NxQsvvMAhMaKHMFyIlsDr9eLq1avqscfjwaZNmwysiCg0MVyIlqCurg6dnZ3q8c6dO+FwOIwriChEMVyIFknZ8mXug8H27t0Lk4nNiOhhbBVEizQ7O4tPP/1UPbbb7Xj22Wc530I0D4YL0SL19PSgpqZGPV63bh0KCgoMrIgodDFciBZBCIHr169jaGhIPff888/DarUaWBVR6GK4EC3S+fPnIcsyAMBsNmPv3r0GV0QUuhguRIswMTGBzz//XD1OS0tDWVkZ51uIFsBwIVqExsZGtLS0qMelpaVIS0szsCKi0MZwIXoCIQQ+//xzTE5Oquf27t0Li8ViYFVEoY3hQvQEwWAQn3zyiXpstVqxZ88eDokRPQbDhegJ7t+/j+rqavU4Pz8f69evN7AiotDHcCF6jIWeOmm32w2siij0MVyInuDcuXOap07u27ePQ2JET8BwIXqMyclJXLhwQT12uVzYuXMnw4XoCRguRI/R0NCAxsZG9bisrIxPnSRaBIYL0QKEEPjNb34Dn8+nnjt48CCXIBMtAsOFaAF+vx9VVVXqcWJiIvbu3cshMaJFYLgQLaC1tRU3b95Uj4uLi7Fu3TrjCiIKIwwXonkoQ2Kjo6PquYqKCu6CTLRIDBeiefj9fpw+fRpCCAAP7sp/6aWXDK6KKHwwXIjm0dTUhGvXrqnHGzduxObNmznfQrRIDBeihwghcPr0ac2Q2CuvvIKEhAQDqyIKLwwXoodMTEzggw8+UI/tdjtee+019lqIloDhQjSHEAK//e1vUVNTo57bsWMHNm7caGBVROGH4UI0RyAQwHvvvQe/3w/gwV5ib775JuLi4gyujCi8MFyIviCEQHV1tebGyfz8fBw6dIhDYkRLxHAh+oLf78e//Mu/YHx8XD33jW98A+np6QZWRRSeGC5E+MMKsQ8//FA95/F4cPjwYfZaiJ4Cw4WinhACdXV1+P73v4+ZmRkAgCRJ+Ou//mvk5eUZXB1ReGK4UFQTQqC2thaHDx9Ga2uren7Hjh34y7/8S5hMbCJET4Mth6KSEALDw8P4j//4D7zyyiu4ceOG+prL5cI///M/w+l0GlghUXjjgykoLAkhIMsyJElSfy3mM4FAAHfv3sWpU6fwy1/+Evfu3YMsy+p7EhMT8ZOf/ATPPfcc51qIloHhQiFHCIGRkRHExcVptlwRQmBmZgYXL17EhQsXcPv2bSQkJKC4uBgHDhxAaWkp4uPjHwkFIQTGx8dx4cIFvPfee7hw4QLGxsYe+XNdLhd+8pOf4M033+RwGNEyMVwoZASDQTQ0NODnP/85fvGLX+Db3/42vvOd70CSJAgh0N3djb/7u7/D6dOn1ZscFT/+8Y9RWlqKr33ta3jppZeQnp6OQCCA1tZWnD17Fv/93/+N+vp6BAKBR/5ci8WCPXv24J133sGOHTsYLEQ6YLhQyGhsbMRXvvIVdHV1AQB+9rOf4fDhw/D7/fjtb3+Lf/qnf8Lly5fn/ez09DQuX76MK1eu4Ic//CFcLhdmZ2cxMDCA6enpeT9jt9uxa9cu/MVf/AUqKipgs9k4FEakE4YLhYy8vDysXbtWDZdbt27hrbfeQn19PTo6OhAMBtX3SpIEq9UKIQSmpqbU80IIjI2NzTvsBTzYzqWgoABvvPEGvvrVr2LDhg2IiYlhqBDpjOFCIcNqteKb3/wmPvvsM8iyjNnZWVRWVj7yvvj4ePz93/893njjDQQCAZw7dw6//OUvcfPmTfU+lYclJCSgvLwc3/zmN3Ho0CG4XC4GCtEKYrhQSDl06BAKCgrQ3Nz8yGsmkwlZWVn43ve+h7/5m79BTEwMAGDz5s34q7/6K1RXV+Ojjz7ClStXMDQ0hJiYGOTm5uLZZ5/F/v37sXHjRsTGxjJUiFYBw4VCitvtxltvvYV//Md/hBACkiTB7XZjz549+LM/+zO88MILSE9P10y6S5IEm82G559/Hrt370YwGMTs7CxMJpM65MVAIVpdDBcKKZIk4bvf/S4AoKWlBS+++CJefPFFZGdnw2QyPTEkJEmCxWKBxcKvNpGR2AIpJMiyjJGREXXS/ujRo2rPBYDmkcO0sLl/h0RGYriQ4SRJQn9/P/bt28d7TJZJlmV4vV7+PZLhJCGEMLoIim4TExNobGwEv4r6kCQJ69at0+xuQLTaGC5ERKQ79p2JiEh3DBciItIdJ/SJFmnuCDLvmyF6PPZciBbpxo0bMJlMmgeLEdH8GC5ERKQ7hgsREemO4UJERLpjuBARke4YLkREpDuGCxER6Y7hQkREumO4EBGR7hguRESkO4YLERHpjuFCRES6Y7gQEZHuGC5ERKQ7hgsREemO4UK0CEIIjIyMAABGRkbAp4MTPR7DhegxvF4vjh8/jqKiIuzfvx8AsH//fhQVFeH48ePwer3GFkgUoiTBSzCieVVVVeH111+Hz+cDMP+TKG02G06dOoWKigpDaiQKVQwXonlUVVXh5ZdfhhACsiwv+D6TyQRJklBZWcmAIZqD4UL0EK/XC4/Hg6mpqccGi8JkMsFqtaKrqwsOh2PlCyQKA5xzIXrIiRMn4PP5FhUsACDLMnw+H06ePLnClRGFD/ZciOYQQqCoqAgtLS1LWhEmSRLWrFmDxsZGdT6GKJoxXIjmGBwchNvtXtbnU1NTdayIKDxxWIxojrGxsWV9fnx8XKdKiMKbxegCiIwUCATQ09ODjo4OdHZ2oqGhYVm/32effYaRkRHk5+fD4XBwiIyiFofFKKrMzMygq6sLnZ2d6OjoQG9vL4LBIOLj4+HxeJCTk4Ovf/3raG9vX/LvnZGRge9///u4f/8+ACA5ORn5+fnqL7fbzbChqMFwoYjm8/nUIOns7ER/fz+EEEhISEBubi5ycnKQm5ur+cF//Phx/O3f/u2SJ/SPHTuGo0ePYmpqCu3t7Whra0Nrayt6enogyzJsNpsmbDIzM2EycWSaIhPDhSLK2NiYGiQdHR0YGhoCADgcDjVIcnJy4HQ6F/w99L7Pxe/3o7OzE62trWhra0NnZycCgQDi4uKQm5uL/Px8FBQUIDs7GxYLR6opMjBcKKwNDQ1peiajo6MAAJfLpemZ2O32Jf2+S71D/8yZMzh48OCifu9AIIDu7m60tbWhra0N7e3tmJmZgcVigcfjQUFBAfLy8pCbm4u4uLgl1U0UKhguFDaEELh//74aJJ2dnZicnIQkScjIyFCDxOPxwGazLfvPW+zeYh988MGig2U+siyjr69PDZu2tjZMTk7CZDIhKysLeXl5auDo8d9FtBoYLhSygsEgent71Z5JV1cXZmZmYDabkZWVhdzcXOTm5iI7OxuxsbErUoPX68XJkyfxb//2b2hublbPFxYW4ujRozh8+DCSk5N1/TOFEBgcHNSEjbL7clpaGgoKCtR5m6SkJF3/bCK9MFwoZMzOzqK7u1vtmXR3dyMQCCA2NhbZ2dlqmGRmZq763IQQAsPDwxgfH4fdbofT6VzVlV9er1cNmtbWVgwODgIAnE6nZpHAatdFtBCGCxlmenpaHd7q6OhAX18fZFmG1WpFTk6OOsyVnp7OVVUPmZiYQHt7O1pbW9He3o7e3l4IIWC329WgKSgoQFpaGsOGDMFwoVUzMTGhmXxX7gex2+3q5HtOTg5cLhd/IC7R9PQ02tvb1cDp7u5GMBiE1WpFXl6eGjhZWVkwm81Gl0tRgOFCK8br9aph0tHRoT4m2Ol0apYFc5t6/c3OzqKrq0td/tzR0YHZ2VnExMRolj97PB7ExMQYXS5FIIYL6UKZhJ47zKXss5WWlqbpmSQmJhpcbfQJBoPo6enRLBKYnp6G2WxGdna2uhotLy8P8fHxRpdLEYDhQk9FlmX09/drlgVPTU3BZDIhMzNTsyyYP6xCj7KsW+nZtLW1YXx8HJIkITMzU13+nJ+fj4SEBKPLpTDEcKFFUTZ4VHol3d3d8Pv9sFgsyM7OVsMkOzubwyxhSFkNN7dnMzw8DODBDalzlz9zGJMWg+FC8/L7/ejq6lJ7Jj09PQgGg4iLi4PH41GXBWdkZHCCOEKNjo5q9khTFmA4HA7N8mcuwKD5MFwIwB82eFR6JnM3eJy7LJhLW6OXz+fTLH9WNuRMSEjQhE1GRgaXjhPDJVqNjY1plgUrN+UlJydrVnLxqYq0kJmZGXR0dKjDaF1dXeqGnHOXP3NDzujEcIkSw8PDmmXBczd4nNsz4XYi9LQCgQC6uro0G3Iq83I5OTnq8uecnJwV266HQgfDJQIJITAwMKDZel7Z4DE9PV3tlXg8Hq4EohUjyzJ6e3s1iwR8Pp+6Iaey/Dk/Px9Wq9XocklnDJcIEAwG0dfXpxnmUjZ4zMzM1GzwyC3cySjKRc/csFF60Onp6WrPJi8vjz3oCMBwCUPKBo/KBLwy1h0TE6M+qjc3NxdZWVkc66aQJYR4ZENO5eFuTqdTDZqCggKkpKRwIUmYYbiEiebmZrS3t6OzsxO9vb2QZRnx8fGayXeu0qFwNz4+rlmR1tfXByEEkpKS1KDZuHEjezZhgOESJq5fvw6/3w+Hw4Hk5GQ4HA7YbDZezVFEm52dxejoqPprfHwcmzZtgsvlMro0egKGS5hQNh0kimbBYBCSJLGHHgYYLkREpDvGPxER6S7qlxL5/X54vV6wA6cPSZKQkpLCIbww4/f7MTw8zHagE0mS4HQ6o/pm0agPl4GBAbz//vu8/0MnMzMz+PrXv47MzEyjS6El6Ovrw8mTJxEXF8dFIsskhMDMzAy+9a1vITs72+hyDBP14SKEgM1mwxtvvMGAWaJAIACz2az+MJqensb777/Pq98wpGxSevjwYbaDJZqdnYXZbFYXGUxPT+PEiRNR3w6iPlyAB11Yq9XKh1otQSAQwNmzZzE1NQWHw4HMzEysW7eOV71hTJIk2Gw2toMlCAQC+PDDDzExMQGXywWPx8N28AWGCz0Vv9+Pnp4ejI2Nob29HQMDA1i3bp3RZRGtKr/fj87OTni9XrS2tqK7uxtFRUVGlxUSuFqMnorP58P09LR6nJyczHsPKOpMTEzA5/Opx6mpqdxy6Qv8aUBPZXx8HIFAQD3m3k8UjYaHhzE7O6se86mcf8BwoSUTQmBkZASyLKvnnE6ngRURrT4hBAYHBzUT926328CKQgvDhZ6KsnstAJhMJjgcDuOKITJIf3+/+u8Wi4VPbp2D4UJLJoTA8PCwehwbGwu73W5gRUSrLxAIYGBgQD22Wq1ITk42sKLQwnChJZudnYXX61WPExISYLPZjCuIyABTU1MYGRlRjx0OB5+oOQfDhZZscnISExMT6jG3e6FoNDw8jKmpKfU4PT0dZrPZwIpCC8OFlmxkZAR+v1895goZijZCCPT29iIYDKrnonmrl/kwXGhJhBC4f/++ZoVMenq6gRURrT4hBDo7O9Vji8WCzMxMXmTNwXChJVGu2BQxMTHsuVDUUXaoUCQmJnI5/kMYLrQkfr9fs0ImISGBzzOnqDM8PKxZ1JKRkcE92R7CcKElGR0dxdjYmHrsdru5iy5FFSEE2traNHfmFxQUcPujh/BvgxZNCIGenh5No/J4PBwSo6giyzLu3bunHlssFuTl5bEdPIThQoumXLEpzGYzw4WizsjICLq7u9Vjl8uFtLQ0AysKTQwXWjSfz6dpVElJSXC5XAZWRLS6hBC4e/euZifkoqKiqH6c8UIYLrQoQgh0dHRgfHxcPZebm8tJTIoq09PTuHnzpnocExOD4uJi4woKYQwXWhRZllFXV6fe32IymfhQJIoqQgjU19ejr69PPZeTk8P7WxbAcKEnEkKgr68P7e3t6rmUlBTOt1DUEEJgdHQUn332meYCa8eOHXw42AIYLvREwWAQly9f1mz5UlxczE36KCoIIeD3+3H27FkMDg6q53Nzc7F+/XpeYC2A4UKPJYTA7du30dTUpJ6z2+3YvHkzGxVFPCEEZmdncfbsWdTW1qrn4+LisG/fPk7kPwb7c7QgIQTu3buHCxcuqBv0SZKE7du387kVFPGEEJiZmcFHH32E6upqdThMkiTs3r0b+fn5vMB6DIYLzUsIgcbGRpw5cwbT09Pq+by8PJSVlbFRUUQTQmBiYgIffvgh6uvrNcGydetW7N69m3fkPwHDhR4hyzLu3r2LqqoqzfMqUlNTcfDgQW73QhFNWcDyv//7v5qdjyVJQmlpKQ4dOsTnFy0Cw4U0gsEgbt26hU8++UQzgZ+SkoJXX30Vqamp7LVQRBJCIBgMora2FlVVVZo99EwmE7Zv346KigrExcWxDSwCw4UA/GHi8tKlS7hy5QoCgYD6mtPpxGuvvYaMjAw2KopIylLjTz75BDdv3tQ8BMxisWDPnj3Ys2cPLBYL28AiMVxIHV8+d+4c7ty5o3kQWFpaGl599VW43W42Koo4ykVVXV0dzp8/j6GhIc3riYmJeOmll1BSUsJHGC8RwyXKKU/U+/jjj9Hf3695zePx4JVXXkFKSgqDhSKKEAKyLKOjowMXLlxAS0sLZFnWvCcnJwevvPIKsrOz+f1/CgyXKCWEwPT0NK5fv46rV69iZmZGfU2SJGzYsAEHDhxAQkICGxZFDCVUenp68Lvf/Q537tzRPEICeHAPS3l5Ofbs2QOr1crv/1NiuEQZZdKypaUFn3/+uWafJODBRnzl5eV49tlnERMTw4ZFEUH53nd1deHKlStoaGjQXFABDy6qcnJycODAAfUeFn7/nx7DJUooV2xK42ptbdVMWgKAw+HAiy++iHXr1nENP0UEZeuWlpYWXL16Fa2trY/0VIAHqyF37dqFsrIyrgbTCcMlws29Yrt27RpaW1s1K8GABw/9Wr9+PV544QU4HA42LAprQggIITA+Po47d+7g97//PXp7ex+ZUwEebGW0bds2lJeXIykpid99HTFcIpQQAoFAAG1tbbh+/To6OzsfCRXgwWqwXbt2oaioCGazmY2LwpZyIdXb24ubN2+ivr5ec6/KXElJSSgtLcWOHTvgcDjYU18BDJcIo+yH1NzcjOrqavT09Mx7xZacnIxt27ahpKSEk5YU1oQQmJycRGNjI27cuIGOjo55h74kSYLT6cTWrVtRWlqKpKQkhsoKYrhECKWB3b17Fzdv3sTAwIDmfhVFcnIySktLUVJSgsTERIYKhSWll9LX14fbt2+jrq4Oo6Oj837nzWYzsrOzsX37dmzYsAE2m43f+1XAcAljytjyyMgIamtrUVtbi9HR0UfeJ0kSUlJSUFpaik2bNjFUKCzNnUu5d+8ebt++jc7Oznl7KQBgs9lQVFSEbdu2IScnh6sfVxnDJQwp8yk9PT3qs1bmbjCpkCQJbrcbZWVlvGKjsKWs+Oru7satW7fQ0NCA8fHxed9rMpngdrtRUlKCZ555Bk6nk0uKDcJwCRNKd39ychJNTU2oqalBb2/vvJP0ZrMZmZmZ2LZtGwoLC7m0ksKOsnTe6/Wivr4et2/fRn9//yPL5xVWqxVr1qxBWVkZCgoK+J0PAQyXEKc0soGBAdTW1qKhoQFjY2Pzji3HxMQgPz8fW7duRW5uLjfZo7Cj7BzR2tqKmzdvorW1FT6fb973mkwmpKWlYfPmzdi0aROcTidMJhO/8yGC4RKi5jayJ40tJyYmYv369diyZQvcbjcbGIUV5QJqaGgINTU1qKmpwdDQ0LyrHIEH3/eioiKUlpYiJycHsbGx/L6HIIZLCFEmLIeGhlBfX487d+5gZGRk3l6KMra8efNmbNiwAXa7HQDYyChsKHMp7e3tqK6uRnNz87xzh8CDbe+zs7OxZcsWrF+/Xr3hkd/30MVwCQHKtt+dnZ24desWWltbH9n3SBEfH4/8/Hxs2bIFHo+HV20UdpRHPNTX16O6unrBu+clSUJycjKKi4tRUlKCjIwMDvWGEYaLgZR7U+7du4dbt26hv79/wUbmdDpRXFyMjRs3cgUMhR2lVz48PIwbN27g1q1bGBkZmfe9sbGxyMvLQ2lpKdauXcuducMUw2WVzb03paamRr35az6xsbHIyclBSUkJ8vPzER8fz0ZGYUWZT+nr68PVq1dRX1+/4AS9w+HApk2bUFpairS0NG5HFOYYLqtECZX+/n7cuHEDDQ0NC44vJycnY/369di8eTNcLhcn6CnszH1uyqVLl3D37t15h3rnLpvfuHGj2kvh9z38MVxWmNLIent7cf36dTQ1NcHv9z/yPrPZjPT0dGzZsgVFRUUcCqCwJcsyBgcHcfHiRdTV1c0bKnFxcSgsLER5eTny8vJ493wEYrisEKWn0tvbiytXrqC5uXnepcSxsbEoKChAWVkZPB4PGxmFLSEEpqamcPnyZVy+fBmTk5OPvCchIQGbNm3C9u3bkZGRwV55BGO4rAAhBAYGBnDlyhXcvXt33lCx2WzYsGEDSktLeW8KhT1ZltHa2oqzZ8+ip6fnkdftdjvKysqwbds2LkiJEgwXHSmrv65fv44bN27MO6dit9vxzDPPYMuWLUhJSQHAe1MofCmPeLh48SIuXbr0yBCYzWbD1q1bsXPnTj43JcowXHSgbP997949XLx4EUNDQ4+8x263o6SkRH2OBMBQofCm3PD7f//3f2hqatLc7Gs2m1FcXIwvfelLSEtLY6hEIYbLMgkh4PV68emnn6KhoeGRjfWsViu2bNmCrVu3Ijk5mYFCEUGWZbS3t+N//ud/MDg4qHnN5XLhwIED2LBhA5cTRzGGyzLIsozGxkacO3cOXq9X85rFYsGGDRvw7LPPwuVyAWBPhSKDLMuoq6vDhx9+qJm0N5lMKCkpwYEDB3ghRQyXp6Fs13Lp0iVcvXpVM2EvSRIyMzOxZ88e5OXlcaKeIkowGER1dTXOnj2rmV+xWq3Yt28ftm/fzi1aCADDZcmEEPD5fPj4449x584dzThzXFwcysvLsX37dt5NTxFHlmVcu3YNZ8+e1VxQORwO/Omf/ikKCws5t0IqhssSKI9YraysRGtrq+a19PR0HDhwADk5OQwVijhCCNTW1qKqqkoTLBkZGfjyl7+MjIwMfu9Jg+GySMpOrqdPn0ZHR4d6XpIkbNy4Efv27eOz6SkiKfdtffTRR5rdJTweD7761a+q960QzcVwWQTlzuPKykpNsJjNZuzcuRPPPfcc76yniBUMBnH+/HmMjY2p57KysvDGG28gJSWF33uaF8NlEQKBAM6dO4eWlhb1nMViwfPPP4/y8nKYzWYDqyNaOUIIdHZ24s6dO+q5hIQEvPbaawwWeizOvj2BEALV1dWoq6tTz5nNZuzevZvBQhFPmcSfO8/yR3/0R8jKymKw0GMxXB5D2Xjy0qVL6kO8JEnCjh07sHPnTgYLRTyv14umpib12OFwYNu2bVwVRk/Eb8hjBAIBfP7555o9woqKirBr1y4GC0U8IQQaGxs1N0oWFxfDbrcbWBWFC4bLAoQQaGtr0yw5TkpKwt69exEbG2tgZUSrQ5ZlNDQ0qMcWiwXPPPMMh8NoURguC1DuRFb2CpMkCeXl5Vx2SVFjfHxcs32+y+Xi/Sy0aAyXeSiPI+7s7FTPpaam8qqNooYy3zh3SKygoIC9dlo0hssC6uvrNStkSkpKYLVaDayIaHW1t7er2xtJkoSCggKDK6JwwnCZx9TUlGaFjM1mw/r169lroagRDAbR1dWlHsfHxyMzM5NtgBaN4fIQZThgdHRUPZebm4vk5GQDqyJaXT6fT/OcFqfTyVVitCQMl3k0Nzdr7mthr4WizcjICHw+n3qcmZkJi4UbetDiMVweMjs7q9k/zGq1wuPxMFwoaigLWuY+VTUrK8vAiigcMVwe4vV6MTIyoh5nZGQgMTHRwIqIVl9fX5/672azGenp6bzAoiVhuMwhhEBPT49mlVhubi63uqCoIssyBgYG1OP4+Hg4HA7jCqKwxJ+aD5l7b4vZbOaQGEUdv9+v6b3b7XbYbDYDK6JwxHCZY3Z2Fv39/epxQkICUlNTDayIaPVNTExoJvOdTicn82nJGC5zTExMaJYgu1wuxMfHG1gR0eobHR3VPHHS7Xaz905LxnCZY3BwUNOoMjIyON9CUWdoaEi9Mx94cJHFcKGl4k/OOfr7+zXbXXCTPoo2QggMDQ2pxyaTCSkpKQZWROGK4fIFZW2/IiYmhvMtFHWEEBgeHlaPY2NjkZSUZGBFFK4YLl+YnZ3VXLElJiZyuwuKOsFgUDPvaLVauVKMngqXgHzBZDJh586d6O7uxsDAAJxOJ7cXp6gjSRLKysqQnp6OkZER2O12xMTEGF0WhSGGCx4MBQQCAaxfvx7r1q2DLMsIBoOYmZkxurSwMj09rZkIpvAihMDs7CxKS0tRWlqqtgO/369Z6EKPNzU1xXYAhgsAYHJyEv/1X//FyftlEkJgZmaGf49hSJIkTExM4Kc//Sn//y2TEALT09NR//coiSiPWL/fr5nApOXjkGL4mZmZeWQJMj09SZLgcrmiuh1EfbgQEZH+uFqMiIh0x3AhIiLdMVzCRDAYxPj4uOYBTkTRhu0gfDBcwsTAwADeffddzXM2iKJNf38/fvzjH2t206DQxHAhIiLdMVyIiEh3DBciItIdw4WIiHTHcCEiIt0xXIiISHcMFyIi0h3DhYiIdMdwISIi3TFciIhIdwwXIiLSHcOFiIh0x3AhIiLdMVyIiEh3DJcwIITA0NAQRkZG+JxzilpCCAwPD8Pr9WJ4eJjtIMRJgv+HQpbX68WJEyfw7rvvorm5WT1fWFiII0eO4PDhw3A4HMYVSLQK2A7CE8MlRFVVVeH111+Hz+cDAM1VmiRJAACbzYZTp06hoqLCkBqJVhrbQfhiuISgqqoqvPzyyxBCQJblBd9nMpkgSRIqKyvZsCjisB2EN4ZLiPF6vfB4PJiamnpsg1KYTCZYrVZ0dXVxaIAiBttB+OOEfog5ceIEfD7fohoUAMiyDJ/Ph5MnT65wZUSrh+0g/LHnEkKEECgqKkJLS8uSVsJIkoQ1a9agsbFRHYcmCldsB5GB4RJCBgcH4Xa7l/X51NRUHSsiWn1sB5GBw2IhZGJiYlmfHx8f16kSIuOwHUQGhksISUxMXNbn7Xa7TpUQGYftIDIwXEJIamoqCgsLlzxeLEkSCgsL4XQ6V6gyotXDdhAZGC4hRJIkHDly5Kk+e/ToUU5iUkRgO4gMnNAPMVzfT8R2EAnYcwkxDocDp06dgiRJMJke/79HuTP5gw8+YIOiiMJ2EP4YLiGooqIClZWVsFqtkCTpkW6+cs5qteLMmTM4ePCgQZUSrRy2g/DGcAlRFRUV6OrqwrFjx7BmzRrNa2vWrMGxY8fQ3d3NBkURje0gfHHOJQwoz7EYHx+H3W6H0+nkpCVFHbaD8MJwISIi3XFYjIiIdMdwISIi3TFciIhIdwwXIiLSHcOFiIh0x3AhIiLdMVyIiEh3DBciItIdw4WIiHTHcCEiIt0xXIiISHcMFyIi0h3DhYiIdMdwISIi3f0/yLnz94ls8iwAAAAASUVORK5CYII=\n" 505 | }, 506 | "metadata": {} 507 | } 508 | ] 509 | }, 510 | { 511 | "cell_type": "markdown", 512 | "source": [ 513 | "the idea here is that we see both the input ('activation') functions look the same; so we can lock them together and retrain the model again" 514 | ], 515 | "metadata": { 516 | "id": "s0t4m68TA5Ac" 517 | } 518 | }, 519 | { 520 | "cell_type": "code", 521 | "source": [ 522 | "model.lock(0,[[0,0],[1,0]])\n", 523 | "#model.plot(beta=10)" 524 | ], 525 | "metadata": { 526 | "id": "csChbSuN9Ae3" 527 | }, 528 | "execution_count": null, 529 | "outputs": [] 530 | }, 531 | { 532 | "cell_type": "code", 533 | "source": [ 534 | "model.train(dataset, opt=\"LBFGS\", steps=25, device=device);" 535 | ], 536 | "metadata": { 537 | "colab": { 538 | "base_uri": "https://localhost:8080/" 539 | }, 540 | "id": "CXBPx8RQBXK9", 541 | "outputId": "b38575c7-342a-4db9-a0f6-2f4210805904" 542 | }, 543 | "execution_count": null, 544 | "outputs": [ 545 | { 546 | "output_type": "stream", 547 | "name": "stderr", 548 | "text": [ 549 | "train loss: 1.40e-02 | test loss: 1.84e-02 | reg: 2.76e+00 : 100%|██| 25/25 [00:08<00:00, 2.81it/s]\n" 550 | ] 551 | } 552 | ] 553 | }, 554 | { 555 | "cell_type": "markdown", 556 | "source": [ 557 | "the loss kinda remains the same; and now we can ask KAN for suggestion, what kinda function this could be:" 558 | ], 559 | "metadata": { 560 | "id": "fdIKMx2EBwcR" 561 | } 562 | }, 563 | { 564 | "cell_type": "code", 565 | "source": [ 566 | "model.suggest_symbolic(0, 1, 0)" 567 | ], 568 | "metadata": { 569 | "colab": { 570 | "base_uri": "https://localhost:8080/" 571 | }, 572 | "id": "BfU3DLTGBq--", 573 | "outputId": "ff748595-bb91-4fd8-c812-2899a343fe95" 574 | }, 575 | "execution_count": null, 576 | "outputs": [ 577 | { 578 | "output_type": "stream", 579 | "name": "stdout", 580 | "text": [ 581 | "function , r2\n", 582 | "arctanh , 0.9986623525619507\n", 583 | "tan , 0.9961022138595581\n", 584 | "arcsin , 0.968244731426239\n", 585 | "x^3 , 0.9537016749382019\n", 586 | "sqrt , 0.9368619918823242\n" 587 | ] 588 | }, 589 | { 590 | "output_type": "execute_result", 591 | "data": { 592 | "text/plain": [ 593 | "('arctanh',\n", 594 | " ((x)>, (x)>),\n", 595 | " 0.9986623525619507)" 596 | ] 597 | }, 598 | "metadata": {}, 599 | "execution_count": 16 600 | } 601 | ] 602 | }, 603 | { 604 | "cell_type": "markdown", 605 | "source": [ 606 | "This is really cool that the activation on the first layer were automatically suggested as `arctanh` ! We want to retrain now the model with this as our prior knowledge.\n", 607 | "\n", 608 | "Sadly this step turned out to be notoriously difficult as suggested in this [GitHub issue](https://github.com/KindXiaoming/pykan/issues/89) since optimization with complex symbolic function like $\\text{tanh}^{-1}$ that goes to infinity are problematic. \n", 609 | "\n", 610 | "Let's see if KAN recognizes the activation function at the second layer or not" 611 | ], 612 | "metadata": { 613 | "id": "xuUUDs3gEG7F" 614 | } 615 | }, 616 | { 617 | "cell_type": "code", 618 | "source": [ 619 | "# model.fix_symbolic(0,0,0,'arctanh')\n", 620 | "# model.fix_symbolic(0,1,0,'arctanh')\n", 621 | "\n", 622 | "#model.train(dataset, opt=\"Adam\", steps=100, update_grid=False, device=device);\n" 623 | ], 624 | "metadata": { 625 | "colab": { 626 | "base_uri": "https://localhost:8080/" 627 | }, 628 | "id": "qQ3GBd7nCGyz", 629 | "outputId": "320f7058-a0bd-44e9-e216-f68a398b7555" 630 | }, 631 | "execution_count": null, 632 | "outputs": [ 633 | { 634 | "output_type": "stream", 635 | "name": "stderr", 636 | "text": [ 637 | "train loss: 6.80e-01 | test loss: 6.86e-01 | reg: 3.20e+00 : 100%|█| 100/100 [00:02<00:00, 44.50it/s\n" 638 | ] 639 | } 640 | ] 641 | }, 642 | { 643 | "cell_type": "code", 644 | "source": [ 645 | "model.suggest_symbolic(1, 0, 0) # suggest the function at the final layer; expectation tanh\n" 646 | ], 647 | "metadata": { 648 | "colab": { 649 | "base_uri": "https://localhost:8080/" 650 | }, 651 | "id": "yvZuWaJ5Empa", 652 | "outputId": "13de4dcc-fea3-4ee3-8022-8a4e7065b231" 653 | }, 654 | "execution_count": null, 655 | "outputs": [ 656 | { 657 | "output_type": "stream", 658 | "name": "stdout", 659 | "text": [ 660 | "function , r2\n", 661 | "tanh , 0.9995558857917786\n", 662 | "arctan , 0.995667040348053\n", 663 | "gaussian , 0.9793974757194519\n", 664 | "sin , 0.9784255027770996\n", 665 | "sigmoid , 0.9764178991317749\n" 666 | ] 667 | }, 668 | { 669 | "output_type": "execute_result", 670 | "data": { 671 | "text/plain": [ 672 | "('tanh',\n", 673 | " ((x)>, (x)>),\n", 674 | " 0.9995558857917786)" 675 | ] 676 | }, 677 | "metadata": {}, 678 | "execution_count": 17 679 | } 680 | ] 681 | }, 682 | { 683 | "cell_type": "markdown", 684 | "source": [ 685 | "That's fantastic! Now we can retrain the model with this activation function in the final layer;\n", 686 | "\n", 687 | "`tanh` is well behaved compared to `arctanh` so here we can get a better loss by fixing the activation to this symbolic formula; Let's see" 688 | ], 689 | "metadata": { 690 | "id": "KGOwYC34Ma8P" 691 | } 692 | }, 693 | { 694 | "cell_type": "code", 695 | "source": [ 696 | "model.fix_symbolic(1,0,0,'tanh')\n", 697 | "\n", 698 | "model.train(dataset, opt=\"LBFGS\", steps=25, device=device);\n", 699 | "\n" 700 | ], 701 | "metadata": { 702 | "id": "H2w-VDh7EuTp", 703 | "colab": { 704 | "base_uri": "https://localhost:8080/" 705 | }, 706 | "outputId": "47b20278-dc4f-471d-81d4-6a7bee31aa24" 707 | }, 708 | "execution_count": null, 709 | "outputs": [ 710 | { 711 | "output_type": "stream", 712 | "name": "stdout", 713 | "text": [ 714 | "r2 is 0.9995558857917786\n" 715 | ] 716 | }, 717 | { 718 | "output_type": "stream", 719 | "name": "stderr", 720 | "text": [ 721 | "train loss: 1.07e-02 | test loss: 8.61e-03 | reg: 2.77e+00 : 100%|██| 25/25 [00:06<00:00, 4.03it/s]\n" 722 | ] 723 | } 724 | ] 725 | }, 726 | { 727 | "cell_type": "code", 728 | "source": [ 729 | "###let's view the discovered formula\n", 730 | "\n", 731 | "model.fix_symbolic(0,0,0,'arctanh')\n", 732 | "model.fix_symbolic(0,1,0,'arctanh')\n", 733 | "model.symbolic_formula()[0][0]" 734 | ], 735 | "metadata": { 736 | "colab": { 737 | "base_uri": "https://localhost:8080/", 738 | "height": 73 739 | }, 740 | "id": "CyA5SBoyMv9L", 741 | "outputId": "23e1a04b-d501-4af8-b527-f98dfdbc28ba" 742 | }, 743 | "execution_count": null, 744 | "outputs": [ 745 | { 746 | "output_type": "stream", 747 | "name": "stdout", 748 | "text": [ 749 | "r2 is 0.9991527199745178\n", 750 | "r2 is 0.998881459236145\n" 751 | ] 752 | }, 753 | { 754 | "output_type": "execute_result", 755 | "data": { 756 | "text/plain": [ 757 | "1.0*tanh(1.03*atanh(0.99*x_1) + 1.03*atanh(0.99*x_2))" 758 | ], 759 | "text/latex": "$\\displaystyle 1.0 \\tanh{\\left(1.03 \\operatorname{atanh}{\\left(0.99 x_{1} \\right)} + 1.03 \\operatorname{atanh}{\\left(0.99 x_{2} \\right)} \\right)}$" 760 | }, 761 | "metadata": {}, 762 | "execution_count": 21 763 | } 764 | ] 765 | }, 766 | { 767 | "cell_type": "markdown", 768 | "source": [ 769 | "Pretty Damn cool!\n", 770 | "\n", 771 | "\n", 772 | "#### To Try:\n", 773 | "\n", 774 | "1. Pruning (From deep to shallow)\n", 775 | "\n", 776 | "2. Discovering Phase Transition" 777 | ], 778 | "metadata": { 779 | "id": "Yrih3oRbNik9" 780 | } 781 | }, 782 | { 783 | "cell_type": "code", 784 | "source": [], 785 | "metadata": { 786 | "id": "4yg3l5fTNM5T" 787 | }, 788 | "execution_count": null, 789 | "outputs": [] 790 | } 791 | ] 792 | } 793 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | ## Repo License 2 | 3 | 4 | Apache License 5 | Version 2.0, January 2004 6 | http://www.apache.org/licenses/ 7 | 8 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 9 | 10 | 1. Definitions. 11 | 12 | "License" shall mean the terms and conditions for use, reproduction, 13 | and distribution as defined by Sections 1 through 9 of this document. 14 | 15 | "Licensor" shall mean the copyright owner or entity authorized by 16 | the copyright owner that is granting the License. 17 | 18 | "Legal Entity" shall mean the union of the acting entity and all 19 | other entities that control, are controlled by, or are under common 20 | control with that entity. For the purposes of this definition, 21 | "control" means (i) the power, direct or indirect, to cause the 22 | direction or management of such entity, whether by contract or 23 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 24 | outstanding shares, or (iii) beneficial ownership of such entity. 25 | 26 | "You" (or "Your") shall mean an individual or Legal Entity 27 | exercising permissions granted by this License. 28 | 29 | "Source" form shall mean the preferred form for making modifications, 30 | including but not limited to software source code, documentation 31 | source, and configuration files. 32 | 33 | "Object" form shall mean any form resulting from mechanical 34 | transformation or translation of a Source form, including but 35 | not limited to compiled object code, generated documentation, 36 | and conversions to other media types. 37 | 38 | "Work" shall mean the work of authorship, whether in Source or 39 | Object form, made available under the License, as indicated by a 40 | copyright notice that is included in or attached to the work 41 | (an example is provided in the Appendix below). 42 | 43 | "Derivative Works" shall mean any work, whether in Source or Object 44 | form, that is based on (or derived from) the Work and for which the 45 | editorial revisions, annotations, elaborations, or other modifications 46 | represent, as a whole, an original work of authorship. For the purposes 47 | of this License, Derivative Works shall not include works that remain 48 | separable from, or merely link (or bind by name) to the interfaces of, 49 | the Work and Derivative Works thereof. 50 | 51 | "Contribution" shall mean any work of authorship, including 52 | the original version of the Work and any modifications or additions 53 | to that Work or Derivative Works thereof, that is intentionally 54 | submitted to Licensor for inclusion in the Work by the copyright owner 55 | or by an individual or Legal Entity authorized to submit on behalf of 56 | the copyright owner. For the purposes of this definition, "submitted" 57 | means any form of electronic, verbal, or written communication sent 58 | to the Licensor or its representatives, including but not limited to 59 | communication on electronic mailing lists, source code control systems, 60 | and issue tracking systems that are managed by, or on behalf of, the 61 | Licensor for the purpose of discussing and improving the Work, but 62 | excluding communication that is conspicuously marked or otherwise 63 | designated in writing by the copyright owner as "Not a Contribution." 64 | 65 | "Contributor" shall mean Licensor and any individual or Legal Entity 66 | on behalf of whom a Contribution has been received by Licensor and 67 | subsequently incorporated within the Work. 68 | 69 | 2. Grant of Copyright License. Subject to the terms and conditions of 70 | this License, each Contributor hereby grants to You a perpetual, 71 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 72 | copyright license to reproduce, prepare Derivative Works of, 73 | publicly display, publicly perform, sublicense, and distribute the 74 | Work and such Derivative Works in Source or Object form. 75 | 76 | 3. Grant of Patent License. Subject to the terms and conditions of 77 | this License, each Contributor hereby grants to You a perpetual, 78 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 79 | (except as stated in this section) patent license to make, have made, 80 | use, offer to sell, sell, import, and otherwise transfer the Work, 81 | where such license applies only to those patent claims licensable 82 | by such Contributor that are necessarily infringed by their 83 | Contribution(s) alone or by combination of their Contribution(s) 84 | with the Work to which such Contribution(s) was submitted. If You 85 | institute patent litigation against any entity (including a 86 | cross-claim or counterclaim in a lawsuit) alleging that the Work 87 | or a Contribution incorporated within the Work constitutes direct 88 | or contributory patent infringement, then any patent licenses 89 | granted to You under this License for that Work shall terminate 90 | as of the date such litigation is filed. 91 | 92 | 4. Redistribution. You may reproduce and distribute copies of the 93 | Work or Derivative Works thereof in any medium, with or without 94 | modifications, and in Source or Object form, provided that You 95 | meet the following conditions: 96 | 97 | (a) You must give any other recipients of the Work or 98 | Derivative Works a copy of this License; and 99 | 100 | (b) You must cause any modified files to carry prominent notices 101 | stating that You changed the files; and 102 | 103 | (c) You must retain, in the Source form of any Derivative Works 104 | that You distribute, all copyright, patent, trademark, and 105 | attribution notices from the Source form of the Work, 106 | excluding those notices that do not pertain to any part of 107 | the Derivative Works; and 108 | 109 | (d) If the Work includes a "NOTICE" text file as part of its 110 | distribution, then any Derivative Works that You distribute must 111 | include a readable copy of the attribution notices contained 112 | within such NOTICE file, excluding those notices that do not 113 | pertain to any part of the Derivative Works, in at least one 114 | of the following places: within a NOTICE text file distributed 115 | as part of the Derivative Works; within the Source form or 116 | documentation, if provided along with the Derivative Works; or, 117 | within a display generated by the Derivative Works, if and 118 | wherever such third-party notices normally appear. The contents 119 | of the NOTICE file are for informational purposes only and 120 | do not modify the License. You may add Your own attribution 121 | notices within Derivative Works that You distribute, alongside 122 | or as an addendum to the NOTICE text from the Work, provided 123 | that such additional attribution notices cannot be construed 124 | as modifying the License. 125 | 126 | You may add Your own copyright statement to Your modifications and 127 | may provide additional or different license terms and conditions 128 | for use, reproduction, or distribution of Your modifications, or 129 | for any such Derivative Works as a whole, provided Your use, 130 | reproduction, and distribution of the Work otherwise complies with 131 | the conditions stated in this License. 132 | 133 | 5. Submission of Contributions. Unless You explicitly state otherwise, 134 | any Contribution intentionally submitted for inclusion in the Work 135 | by You to the Licensor shall be under the terms and conditions of 136 | this License, without any additional terms or conditions. 137 | Notwithstanding the above, nothing herein shall supersede or modify 138 | the terms of any separate license agreement you may have executed 139 | with Licensor regarding such Contributions. 140 | 141 | 6. Trademarks. This License does not grant permission to use the trade 142 | names, trademarks, service marks, or product names of the Licensor, 143 | except as required for reasonable and customary use in describing the 144 | origin of the Work and reproducing the content of the NOTICE file. 145 | 146 | 7. Disclaimer of Warranty. Unless required by applicable law or 147 | agreed to in writing, Licensor provides the Work (and each 148 | Contributor provides its Contributions) on an "AS IS" BASIS, 149 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 150 | implied, including, without limitation, any warranties or conditions 151 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 152 | PARTICULAR PURPOSE. You are solely responsible for determining the 153 | appropriateness of using or redistributing the Work and assume any 154 | risks associated with Your exercise of permissions under this License. 155 | 156 | 8. Limitation of Liability. In no event and under no legal theory, 157 | whether in tort (including negligence), contract, or otherwise, 158 | unless required by applicable law (such as deliberate and grossly 159 | negligent acts) or agreed to in writing, shall any Contributor be 160 | liable to You for damages, including any direct, indirect, special, 161 | incidental, or consequential damages of any character arising as a 162 | result of this License or out of the use or inability to use the 163 | Work (including but not limited to damages for loss of goodwill, 164 | work stoppage, computer failure or malfunction, or any and all 165 | other commercial damages or losses), even if such Contributor 166 | has been advised of the possibility of such damages. 167 | 168 | 9. Accepting Warranty or Additional Liability. While redistributing 169 | the Work or Derivative Works thereof, You may choose to offer, 170 | and charge a fee for, acceptance of support, warranty, indemnity, 171 | or other liability obligations and/or rights consistent with this 172 | License. However, in accepting such obligations, You may act only 173 | on Your own behalf and on Your sole responsibility, not on behalf 174 | of any other Contributor, and only if You agree to indemnify, 175 | defend, and hold each Contributor harmless for any liability 176 | incurred by, or claims asserted against, such Contributor by reason 177 | of your accepting any such warranty or additional liability. 178 | 179 | END OF TERMS AND CONDITIONS 180 | 181 | APPENDIX: How to apply the Apache License to your work. 182 | 183 | To apply the Apache License to your work, attach the following 184 | boilerplate notice, with the fields enclosed by brackets "[]" 185 | replaced with your own identifying information. (Don't include 186 | the brackets!) The text should be enclosed in the appropriate 187 | comment syntax for the file format. We also recommend that a 188 | file or class name and description of purpose be included on the 189 | same "printed page" as the copyright notice for easier 190 | identification within third-party archives. 191 | 192 | Copyright 2019 Saptashwa 193 | 194 | Licensed under the Apache License, Version 2.0 (the "License"); 195 | you may not use this file except in compliance with the License. 196 | You may obtain a copy of the License at 197 | 198 | http://www.apache.org/licenses/LICENSE-2.0 199 | 200 | Unless required by applicable law or agreed to in writing, software 201 | distributed under the License is distributed on an "AS IS" BASIS, 202 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 203 | See the License for the specific language governing permissions and 204 | limitations under the License. 205 | -------------------------------------------------------------------------------- /LassoandLin.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import math 4 | import matplotlib.pyplot as plt 5 | import pandas as pd 6 | import numpy as np 7 | 8 | 9 | # difference of lasso and ridge regression is that some of the coefficients can be zero i.e. some of the features are 10 | # completely neglected 11 | 12 | from sklearn.linear_model import Lasso 13 | from sklearn.linear_model import LinearRegression 14 | from sklearn.datasets import load_breast_cancer 15 | from sklearn.cross_validation import train_test_split 16 | 17 | cancer = load_breast_cancer() 18 | #print cancer.keys() 19 | 20 | cancer_df = pd.DataFrame(cancer.data, columns=cancer.feature_names) 21 | 22 | #print cancer_df.head(3) 23 | 24 | X = cancer.data 25 | Y = cancer.target 26 | 27 | 28 | X_train,X_test,y_train,y_test=train_test_split(X,Y, test_size=0.3, random_state=31) 29 | 30 | 31 | lasso = Lasso() 32 | lasso.fit(X_train,y_train) 33 | train_score=lasso.score(X_train,y_train) 34 | test_score=lasso.score(X_test,y_test) 35 | coeff_used = np.sum(lasso.coef_!=0) 36 | 37 | 38 | 39 | print "training score:", train_score 40 | print "test score: ", test_score 41 | print "number of features used: ", coeff_used 42 | 43 | 44 | 45 | lasso001 = Lasso(alpha=0.01, max_iter=10e5) 46 | lasso001.fit(X_train,y_train) 47 | 48 | train_score001=lasso001.score(X_train,y_train) 49 | test_score001=lasso001.score(X_test,y_test) 50 | coeff_used001 = np.sum(lasso001.coef_!=0) 51 | 52 | 53 | print "training score for alpha=0.01:", train_score001 54 | print "test score for alpha =0.01: ", test_score001 55 | print "number of features used: for alpha =0.01:", coeff_used001 56 | 57 | 58 | 59 | 60 | 61 | lasso00001 = Lasso(alpha=0.0001, max_iter=10e5) 62 | lasso00001.fit(X_train,y_train) 63 | 64 | train_score00001=lasso00001.score(X_train,y_train) 65 | test_score00001=lasso00001.score(X_test,y_test) 66 | coeff_used00001 = np.sum(lasso00001.coef_!=0) 67 | 68 | 69 | 70 | 71 | print "training score for alpha=0.0001:", train_score00001 72 | print "test score for alpha =0.0001: ", test_score00001 73 | print "number of features used: for alpha =0.0001:", coeff_used00001 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | lr = LinearRegression() 83 | lr.fit(X_train,y_train) 84 | lr_train_score=lr.score(X_train,y_train) 85 | lr_test_score=lr.score(X_test,y_test) 86 | 87 | 88 | 89 | print "LR training score:", lr_train_score 90 | print "LR test score: ", lr_test_score 91 | #print "number of features used: ", coeff_used 92 | 93 | 94 | 95 | 96 | 97 | 98 | plt.subplot(1,2,1) 99 | plt.plot(lasso.coef_,alpha=0.7,linestyle='none',marker='*',markersize=5,color='red',label=r'Lasso; $\alpha = 1$',zorder=7) # alpha here is for transparency 100 | plt.plot(lasso001.coef_,alpha=0.5,linestyle='none',marker='d',markersize=6,color='blue',label=r'Lasso; $\alpha = 0.01$') # alpha here is for transparency 101 | #plt.plot(lr.coef_,alpha=0.7,linestyle='none',marker='o',markersize=5,color='green',label='Linear Regression',zorder=2) 102 | plt.xlabel('Coefficient Index',fontsize=16) 103 | plt.ylabel('Coefficient Magnitude',fontsize=16) 104 | plt.legend(fontsize=13,loc=4) 105 | plt.subplot(1,2,2) 106 | 107 | plt.plot(lasso.coef_,alpha=0.7,linestyle='none',marker='*',markersize=5,color='red',label=r'Lasso; $\alpha = 1$',zorder=7) # alpha here is for transparency 108 | plt.plot(lasso001.coef_,alpha=0.5,linestyle='none',marker='d',markersize=6,color='blue',label=r'Lasso; $\alpha = 0.01$') # alpha here is for transparency 109 | plt.plot(lasso00001.coef_,alpha=0.8,linestyle='none',marker='v',markersize=6,color='black',label=r'Lasso; $\alpha = 0.0001$') # alpha here is for transparency 110 | plt.plot(lr.coef_,alpha=0.7,linestyle='none',marker='o',markersize=5,color='green',label='Linear Regression',zorder=2) 111 | 112 | 113 | #plt.plot(lr.coef_,alpha=0.4,linestyle='none',marker='o',markersize=7,color='green',label='Linear Regression') 114 | plt.xlabel('Coefficient Index',fontsize=16) 115 | plt.ylabel('Coefficient Magnitude',fontsize=16) 116 | plt.legend(fontsize=13,loc=4) 117 | plt.tight_layout() 118 | plt.show() 119 | -------------------------------------------------------------------------------- /PCA_Muller.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # this program check the effect of pca (dimensionality reduction) on cancer data 4 | 5 | 6 | # check the histograms at the beginning. Cancer Data set has 30 features 7 | 8 | from sklearn.datasets import load_breast_cancer 9 | from sklearn.cross_validation import train_test_split 10 | from matplotlib.pyplot import matplotlib 11 | from sklearn.preprocessing import MinMaxScaler 12 | from sklearn.preprocessing import StandardScaler 13 | from sklearn.decomposition import PCA 14 | 15 | 16 | 17 | 18 | import matplotlib.pyplot as plt 19 | import numpy as np 20 | import seaborn as sns 21 | import pandas as pd 22 | 23 | #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 24 | #+ effect of features on target 25 | #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 26 | cancer=load_breast_cancer() 27 | cancer_df=pd.DataFrame(cancer.data,columns=cancer.feature_names) 28 | 29 | #print cancer_df[feature_mean].corr() 30 | 31 | 32 | print cancer_df.info() 33 | #print cancer_df.head(4) 34 | 35 | feature_mean=list(cancer_df.columns[0:10]) 36 | feature_worst=list(cancer_df.columns[20:31]) 37 | 38 | #print len(cancer.target) 39 | 40 | #print cancer.DESCR # it's better to know about the data-set 41 | #print len(cancer.data[cancer.target==1]) # it's confusing because benign is listed as 1 42 | 43 | 44 | 45 | 46 | #print cancer.feature_names 47 | 48 | #plt.subplot(1,2,1) 49 | #plt.scatter(cancer_df['worst symmetry'],cancer_df['worst texture'],s=cancer_df['worst area']*0.05,color='magenta',label='check',alpha=0.3) 50 | #plt.xlabel('Worst Symmetry',fontsize=12) 51 | #plt.ylabel('Worst Texture',fontsize=12) 52 | #plt.subplot(1,2,2) 53 | #plt.scatter(cancer_df['mean radius'],cancer_df['mean concave points'],s=cancer_df['mean area']*0.05,color='purple',label='check',alpha=0.3) 54 | #plt.xlabel('Mean Radius',fontsize=12) 55 | #plt.ylabel('Mean Concave Points',fontsize=12) 56 | #plt.tight_layout() 57 | #plt.show() 58 | 59 | ''' 60 | 61 | fig,axes =plt.subplots(10,3, figsize=(12, 9)) 62 | 63 | malignant=cancer.data[cancer.target==0] 64 | benign=cancer.data[cancer.target==1] 65 | 66 | 67 | 68 | ax=axes.ravel() 69 | 70 | for i in range(30): 71 | _,bins=np.histogram(cancer.data[:,i],bins=40) 72 | ax[i].hist(malignant[:,i],bins=bins,color='r',alpha=.5) 73 | ax[i].hist(benign[:,i],bins=bins,color='g',alpha=0.3) 74 | ax[i].set_title(cancer.feature_names[i],fontsize=9) 75 | ax[i].axes.get_xaxis().set_visible(False) 76 | ax[i].set_yticks(()) 77 | 78 | ax[0].legend(['malignant','benign'],loc='best',fontsize=8) 79 | plt.tight_layout() 80 | #fig.subplots_adjust(lspace=2) 81 | #plt.suptitle("Cancer Histograms", fontsize=4) 82 | 83 | plt.show() # not given in Muller book but it's necessary to see the plots 84 | 85 | # from the plots 1>worst smoothness and 2> worst perimeter produces well separated histograms # important features to distinguish between 86 | # malignant and benign tumors 87 | 88 | # before aplying pca it's necessary to process the features so that they lie within similar range (StandardScaler or MinMaxScaler) 89 | 90 | ''' 91 | #+++++++++++++++++++++++++++++++ 92 | #+ before PCA scale the data 93 | #+++++++++++++++++++++++++++++++ 94 | 95 | scaler = StandardScaler() # standardized feature by removing mean and scaled to unit variance 96 | scaler.fit(cancer.data) 97 | 98 | X_scaled = scaler.transform(cancer.data) 99 | 100 | 101 | print "after scaling", X_scaled.min(axis=0) 102 | 103 | 104 | 105 | #+++++++++++++++++++++++++++++++++++++++++++ 106 | #+ PCA (Principal component analysis) 107 | #+++++++++++++++++++++++++++++++++++++++++++ 108 | pca = PCA(n_components=3) # instantiate the PCA and keep the first n components 109 | pca.fit(X_scaled) 110 | # now transform 111 | 112 | x_pca=pca.transform(X_scaled) 113 | 114 | # check the shape 115 | 116 | print x_pca.shape 117 | #print pca.explained_variance 118 | 119 | ex_variance=np.var(x_pca,axis=0) 120 | 121 | ex_variance_rat=ex_variance/np.sum(ex_variance) 122 | 123 | print ex_variance_rat 124 | 125 | ''' 126 | #+++++++++++++++++++++++++++++ 127 | #+ plot the pcs 128 | #+++++++++++++++++++++++++++++ 129 | 130 | #fig=plt.figure(figsize=(6,4)) 131 | 132 | 133 | #p=plt.scatter(x_pca[:,2],x_pca[:,0],s=40,c=cancer.target) # c =cancer.target tells that minimum/maximum values of c corresponds to bottom/up of the plots 134 | 135 | 136 | 137 | Xax=x_pca[:,0] 138 | Yax=x_pca[:,1] 139 | labels=cancer.target 140 | #labels=['Malignant','Benign'] 141 | cdict={0:'red',1:'green'} 142 | labl={0:'Malignant',1:'Benign'} 143 | marker={0:'*',1:'o'} 144 | alpha={0:.3, 1:.5} 145 | fig,ax=plt.subplots(figsize=(7,5)) 146 | fig.patch.set_facecolor('white') 147 | for l in np.unique(labels): 148 | ix=np.where(labels==l) 149 | ax.scatter(Xax[ix],Yax[ix],c=cdict[l],label=labl[l],s=40,marker=marker[l],alpha=alpha[l]) 150 | 151 | 152 | 153 | 154 | plt.xlabel("First Principal Component",fontsize=14) 155 | plt.ylabel("Second Principal Component",fontsize=14) 156 | 157 | plt.legend() 158 | 159 | 160 | plt.show() # Malignant data are way more spreadout than benign data 161 | 162 | 163 | 164 | 165 | #print pca.components_.shape 166 | 167 | ''' 168 | 169 | #+++++++++++++++++++++++++++++++++++++++ 170 | #+ visualize the effect of PCA 171 | #+++++++++++++++++++++++++++++++++++++++ 172 | 173 | 174 | #plt.matshow(pca.components_,cmap='viridis') 175 | #plt.yticks([0,1,2],['1st Comp','2nd Comp','3rd Comp'],fontsize=10) 176 | #plt.colorbar() 177 | #plt.xticks(range(len(cancer.feature_names)),cancer.feature_names,rotation=65,ha='left') 178 | #plt.tight_layout() 179 | #plt.show()# since the direction of the arrow doesn't matter in pca plot, it can be concluded that the features have a strong correlation 180 | 181 | 182 | 183 | #+++++++++++++++++++++++++++++++++++++++++++++++++++++ 184 | #+ finally check the correlation of the features 185 | #+++++++++++++++++++++++++++++++++++++++++++++++++++++ 186 | 187 | #correlation = cancer.feature_.corr() 188 | 189 | s=sns.heatmap(cancer_df[feature_worst].corr(),cmap='coolwarm') # fantastic tool to study the features 190 | s.set_yticklabels(s.get_yticklabels(),rotation=30,fontsize=7) 191 | s.set_xticklabels(s.get_xticklabels(),rotation=30,fontsize=7) 192 | plt.show()# super happy to complete this project 193 | 194 | 195 | # finished 196 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Machine Learning and Data Analysis 2 | 3 | 4 | 5 | ### This repo contains introduction and examples of some of the most important machine learning and data-analysis techniques. 6 | #### Filenames are preceded by DDMMYY. For descriptions and more check the Wiki Page. 7 | #### Dedicated _Deep Learning Repository_ similar to this is [here](https://github.com/suvoooo/Learn-TensorFlow). 8 | ---------------------------------------------------------------------------------------------------------------------------- 9 | 10 | #### Libraries 11 | ![Python](https://img.shields.io/badge/python-3670A0?style=for-the-badge&logo=python&logoColor=ffdd54) ![NumPy](https://img.shields.io/badge/numpy-%23013243.svg?style=for-the-badge&logo=numpy&logoColor=white) ![Pandas](https://img.shields.io/badge/pandas-%23150458.svg?style=for-the-badge&logo=pandas&logoColor=white) ![scikit-learn](https://img.shields.io/badge/scikit--learn-%23F7931E.svg?style=for-the-badge&logo=scikit-learn&logoColor=white) ![TensorFlow](https://img.shields.io/badge/TensorFlow-%23FF6F00.svg?style=for-the-badge&logo=TensorFlow&logoColor=white) ![SciPy](https://img.shields.io/badge/SciPy-%230C55A5.svg?style=for-the-badge&logo=scipy&logoColor=%white) ![pymc3](https://drive.google.com/uc?export=view&id=1oi-5--D8kcgJdVV_GAI-pZq-ZKr0STOX) 12 | 13 | 14 | ----------------------------------------------------------------------------------------------------------------------------------------- 15 | 16 | *PCA_Muller.py 190818:* Principal component analysis example with breast cancer data-set. 17 | 18 | *270918: RidgeandLin.py, LassoandLin.py:* Lasso and Ridge regression examples. 19 | 20 | *081018: bank.csv*, data set of selling products of a portuguese company to random customers over phone call(s). Data-set description is available [here](http://archive.ics.uci.edu/ml/datasets/Bank+Marketing). 21 | 22 | *161018: gender_purchase.csv*, data-set of two columns describing customers buying a product depending on gender. 23 | 24 | *111118: winequality-red.csv*, red wine data set, where the output is the quality column which ranges from 0 to 10. 25 | 26 | *121118: pipelineWine.py*, A simple example of applying pipeline and gridsearchCV together using the red wine data. 27 | 28 | *24112018: lagmult.py*, This program just demonstrate a simple constrained optimization problem using figures. 29 | 30 | *11122018: Consumer_Complaints_short.csv*, 3 columns describing the complaints, product_label and category. Complete file can be obtained from [Govt.data](https://catalog.data.gov/dataset/consumer-complaint-database/resource/2f297213-7198-4be1-af1e-2d2623e7f6e9). 31 | 32 | *13122018: Text-classification_compain_suvo.py*, Classify the consumer complaints data, which is already described above. 33 | 34 | 1912018: SVMdemo.py*, this program shows the effect of using RBF kernel to map from 2d space to 3d space. Animation requires ffmpeg in unix system. 35 | 36 | *05032019: IBM_Python_Web_Scrapping.ipynb*, Deals with basic web scrapping, string handling, image manipulation. 37 | 38 | *06042019: datacleaning*, Folder containing files and images related to data cleaning with pandas. 39 | 40 | *08062010: DBSCAN_Complete*, Folder containing files and images related to application of DBSCAN algorithm to cluster Weather Stations in Canada. 41 | 42 | *13072019: SVM_Decision_Boundary*, Pipeline + GridSearchCV were performed to find best-fit parameters for SVM and then decision function contours of SVM classifier for binary classification are plotted. 43 | 44 | *28122019: DecsTree*, Folder contains notebook using a decision tree classifier on the [Bank Marketing Data-Set](http://archive.ics.uci.edu/ml/datasets/Bank+Marketing). 45 | 46 | *07032020: Conjugate Prior*, Folder contains a notebook where concept of conjugate prior is discussed including an introduction to [PyMC3](https://docs.pymc.io/). 47 | 48 | *29052020: ExMax_Algo*, Folder contains a notebook completely explaining the Expectation Maximization algorithm. 49 | 50 | *11092020: AdaptiveLoss.ipynb*, File contains description and a simple implemetation of robust and adaptive loss function. [Original Paper by J. Barron](https://arxiv.org/pdf/1701.03077.pdf). More details on [TDS](https://medium.com/@saptashwa/the-most-awesome-loss-function-172ffc106c99). 51 | 52 | *31092020: pima_diabetes.ipynb*, file contains description of data preparation and choosing best machine learning algorithm for binary classification task. 53 | Little more details on [kaggle kernel](https://www.kaggle.com/suvoooo/eda-and-choosing-best-classifier-on-pima-diabetes). 54 | 55 | 56 | *15112020: terrorism_kaggle.ipynb*, Notebook contains elaborate examples on how to think about problems and interpret large scale data using [Global Terrorism Database](https://www.kaggle.com/START-UMD/gtd). Apart from Pandas Groupby, Crosstab methods I have also used Folium, Basemap libraries for visualizing Leaflet map and 2D data on maps respectively. More on [The Startup](https://medium.com/swlh/practical-data-analysis-using-pandas-global-terrorism-database-20b29009adad). 57 | 58 | *15022021: FocalLoss_Ex.ipynb*, Notebook contains explanation on detail of how Focal Loss works. Please read the original [Focal Loss paper](https://arxiv.org/abs/1708.02002). Example of implementing Focal Loss using Tensorflow is also shown. For more detail check the post on [TDS](https://towardsdatascience.com/a-loss-function-suitable-for-class-imbalanced-data-focal-loss-af1702d75d75). 59 | 60 | 61 | *19062021: Augly_Try.ipynb*, Notebook contains examples of image augmentation using [Facebook's Augly](https://ai.facebook.com/blog/augly-a-new-data-augmentation-library-to-help-build-more-robust-ai-models/) Library. For more detail check the notebook and [TDS](https://towardsdatascience.com/facebook-just-launched-the-coolest-augmentation-library-augly-3910c05db505) post. 62 | 63 | *24122021: NB_LogisticReg.ipynb*, Notebook clearly explains connection between Gaussian Naive Bayes and Logistic Regression and determine parameters of Logistic Regression starting from GNB. The notebook is self-explanatory but you can also check the [TDS post](https://towardsdatascience.com/connecting-naive-bayes-and-logistic-regression-binary-classification-ce69e527157f). 64 | 65 | 66 | 67 | 68 | ------------------------ 69 | 70 | ## License 71 | 72 | Distributed under Apache License. Read `LICENSE.md` for detail. 73 | 74 | ----------------------------- 75 | ## Contacts 76 | 77 | [Saptashwa](https://www.linkedin.com/in/saptashwa/). 78 | 79 | 80 | 81 | 82 | -------------------------------------------------------------------------------- /RidgeandLin.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import math 4 | import matplotlib.pyplot as plt 5 | import seaborn as sns 6 | import numpy as np 7 | import pandas as pd 8 | import matplotlib 9 | matplotlib.rcParams.update({'font.size': 12}) 10 | 11 | 12 | from sklearn.datasets import load_boston 13 | from sklearn.datasets import load_breast_cancer 14 | from sklearn.cross_validation import train_test_split 15 | from sklearn.linear_model import LinearRegression 16 | from sklearn.linear_model import Ridge 17 | 18 | boston=load_boston() 19 | boston_df=pd.DataFrame(boston.data,columns=boston.feature_names) 20 | #print boston_df.info() 21 | 22 | X=boston_df 23 | #print type(X) # X is in pandas data-frame format 24 | #print X[0:10] check 25 | Y=boston.target 26 | #print Y[0:10] 27 | #print "target variable type:", type(Y) 28 | 29 | # add another column that contains the house prices which in scikit learn datasets are considered as 30 | boston_df['Price']=boston.target 31 | #print boston_df.head(3) 32 | 33 | 34 | newX = boston_df 35 | #print newX[0:5] 36 | 37 | newX=boston_df.drop('Price',axis=1) 38 | print newX[0:3] 39 | newY=boston_df['Price'] 40 | 41 | #print type(newY)# pandas core frame 42 | 43 | X_train,X_test,y_train,y_test=train_test_split(newX,newY,test_size=0.3,random_state=3) 44 | print len(X_test), len(y_test) 45 | 46 | 47 | lr = LinearRegression() 48 | lr.fit(X_train, y_train) 49 | 50 | 51 | rr = Ridge(alpha=0.01) # higher the alpha value, more restriction on the coefficients; low alpha more generalization, coefficients are barely 52 | # restricted and in this case linear and ridge regression resembles 53 | rr.fit(X_train, y_train) 54 | 55 | 56 | 57 | rr100 = Ridge(alpha=100) 58 | rr100.fit(X_train, y_train) 59 | 60 | train_score=lr.score(X_train, y_train) 61 | test_score=lr.score(X_test, y_test) 62 | 63 | 64 | Ridge_train_score = rr.score(X_train,y_train) 65 | Ridge_test_score = rr.score(X_test, y_test) 66 | 67 | 68 | Ridge_train_score100 = rr100.score(X_train,y_train) 69 | Ridge_test_score100 = rr100.score(X_test, y_test) 70 | 71 | 72 | 73 | 74 | print "linear regression train score:", train_score 75 | print "linear regression test score:", test_score 76 | print "ridge regression train score low alpha:", Ridge_train_score 77 | print "ridge regression test score low alpha:", Ridge_test_score 78 | print "ridge regression train score high alpha:", Ridge_train_score100 79 | print "ridge regression test score high alpha:", Ridge_test_score100 80 | 81 | 82 | 83 | 84 | # for some reason the test score are higher than the training scores, possibly because of low sample numbers, this can be checked by changing the test_size fraction from 0.2 to 0.3 where test_score eventually falls below the training score 85 | 86 | 87 | plt.plot(rr.coef_,alpha=0.7,linestyle='none',marker='*',markersize=5,color='red',label=r'Ridge; $\alpha = 0.01$',zorder=7) # alpha here is for transparency 88 | plt.plot(rr100.coef_,alpha=0.5,linestyle='none',marker='d',markersize=6,color='blue',label=r'Ridge; $\alpha = 100$') # alpha here is for transparency 89 | 90 | plt.plot(lr.coef_,alpha=0.4,linestyle='none',marker='o',markersize=7,color='green',label='Linear Regression') 91 | plt.xlabel('Coefficient Index',fontsize=16) 92 | plt.ylabel('Coefficient Magnitude',fontsize=16) 93 | plt.legend(fontsize=13,loc=4) 94 | plt.show() 95 | -------------------------------------------------------------------------------- /SVM_Decision_Boundary/Cancer_labels_PCAs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/SVM_Decision_Boundary/Cancer_labels_PCAs.png -------------------------------------------------------------------------------- /SVM_Decision_Boundary/ClassifyMalignant_Benign2D_Decs_Funct.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/SVM_Decision_Boundary/ClassifyMalignant_Benign2D_Decs_Funct.png -------------------------------------------------------------------------------- /SVM_Decision_Boundary/ClassifyMalignant_Benign2D_Decs_FunctG0d01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/SVM_Decision_Boundary/ClassifyMalignant_Benign2D_Decs_FunctG0d01.png -------------------------------------------------------------------------------- /SVM_Decision_Boundary/ClassifyMalignant_Benign2D_Decs_FunctG10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/SVM_Decision_Boundary/ClassifyMalignant_Benign2D_Decs_FunctG10.png -------------------------------------------------------------------------------- /SVM_Decision_Boundary/Pairplots_Area_Texture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/SVM_Decision_Boundary/Pairplots_Area_Texture.png -------------------------------------------------------------------------------- /SVM_Decision_Boundary/cancer_jointplots1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/SVM_Decision_Boundary/cancer_jointplots1.png -------------------------------------------------------------------------------- /SVMdemo.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import numpy as np 4 | import math 5 | import sklearn 6 | import matplotlib.pyplot as plt 7 | from mpl_toolkits import mplot3d 8 | from mpl_toolkits.mplot3d import Axes3D 9 | from matplotlib import animation 10 | from sklearn.datasets.samples_generator import make_circles 11 | 12 | X,y = make_circles(90, factor=0.2, noise=0.1) 13 | #print type(X) 14 | 15 | 16 | #plt.scatter(X[:,0],X[:,1], c=y, s=50, cmap='seismic') 17 | 18 | r = np.exp(-(X**2).sum(1)) 19 | zaxis = [0.2,0.4,0.6,0.8, 1.0] 20 | zaxislabel = [r'0.2',r'0.4', r'0.6', r'0.8', r'1.0'] 21 | 22 | fig = plt.figure() 23 | ax = Axes3D(fig) 24 | 25 | 26 | def plot3dim(): 27 | #ax=plt.subplot(111, projection='3d') 28 | ax.scatter(X[:,0], X[:,1], r, c=y, s=50, cmap='seismic') 29 | #ax.view_init(elev=elev,azim=azim) 30 | ax.set_xlabel('X') 31 | ax.set_ylabel('y') 32 | ax.set_zlabel('!! SHAKE !!', fontsize=15, labelpad=-1, color='lime') 33 | ax.set_zticklabels(zaxislabel, fontsize=7) 34 | ax.set_zticks(zaxis) 35 | ax.grid('False') 36 | return fig, 37 | 38 | def animate(k): 39 | ax.view_init(elev=k,azim=30) 40 | #return fig, 41 | 42 | ani = animation.FuncAnimation(fig, animate, init_func=plot3dim, frames=360, interval=30, blit=False) 43 | 44 | #ani.save('SVManim.mp4', fps=30, dpi=200, extra_args=['-vcodec', 'libx264']) 45 | 46 | #plot3dim(elev=10, azim=-20, X=X, y=y) 47 | 48 | plt.show() 49 | -------------------------------------------------------------------------------- /SklearnV1d2/old_Sklearn.py: -------------------------------------------------------------------------------- 1 | import time, psutil 2 | import matplotlib.pyplot as plt 3 | import sklearn 4 | print ('sklearn version: ', sklearn.__version__) # 0.24 5 | 6 | from sklearn.datasets import load_wine 7 | from sklearn.datasets import fetch_openml 8 | from sklearn.model_selection import train_test_split 9 | from sklearn.preprocessing import OrdinalEncoder 10 | from sklearn.pipeline import make_pipeline 11 | from sklearn.ensemble import GradientBoostingRegressor 12 | from sklearn.compose import ColumnTransformer 13 | 14 | ######################## 15 | X, y = load_wine(as_frame=True, return_X_y=True) # available from version >=0.23 16 | ######################## 17 | 18 | X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y, 19 | random_state=0) 20 | X_train.head(3) 21 | 22 | 23 | ### standadrdizing transformation that leads to numpy array (dataframe in newer release) 24 | 25 | from sklearn.preprocessing import StandardScaler 26 | 27 | scaler = StandardScaler() #.set_output(transform='pandas') #change here, doesn't exist in version below 1.2 28 | # AttributeError: 'StandardScaler' object has no attribute 'set_output' 29 | 30 | scaler.fit(X_train) 31 | X_test_scaled = scaler.transform(X_test) 32 | print ('type after scaling: ', type(X_test_scaled)) 33 | # X_test_scaled.head(3) # throws error 34 | 35 | 36 | ###fetch openml (new addition in ver 1.2 is parser='pandas', memory efficient) 37 | start_t = time.time() 38 | X, y = fetch_openml("titanic", version=1, as_frame=True, return_X_y=True,) # parser="pandas") # parser as a keyword in the 1.2 version 39 | X = X.select_dtypes(["number", "category"]).drop(columns=["body"]) 40 | print ('check types: ', type(X), '\n', X.head(3)) 41 | print ('check shapes: ', X.shape) 42 | end_t = time.time() 43 | print ('time taken: ', end_t-start_t) 44 | 45 | process_names = [proc.name() for proc in psutil.process_iter()] 46 | print (process_names) 47 | print('cpu percent: ', psutil.cpu_percent()) 48 | 49 | ######################################### 50 | ###### for simplicity ignore the nans 51 | ########################################## 52 | print ('check for nans in columns: ', '\n', X.isna().sum()) 53 | # drop all the nans for making it suitable forGradientBoostingRegressor 54 | X_NotNan = X.dropna(how='any', inplace=False) 55 | print ('check shapes after dropping nans: ', X_NotNan.shape) 56 | 57 | nonan_indices = X_NotNan.index.to_list() 58 | y_NotNan = y[y.index.isin(nonan_indices)] 59 | print ('check shape y: ', y_NotNan.shape) 60 | print ('check for indices: ', X_nonan.index.to_list()) 61 | 62 | 63 | #### pipeline for encoder + gradient boosting 64 | from sklearn.preprocessing import OrdinalEncoder 65 | from sklearn.pipeline import make_pipeline 66 | from sklearn.ensemble import GradientBoostingRegressor #HistGradientBoostingRegressor 67 | from sklearn.compose import ColumnTransformer 68 | 69 | categorical_features = ["pclass", "sex", "embarked"] 70 | model = make_pipeline(ColumnTransformer(transformers=[("cat", OrdinalEncoder(), categorical_features)], 71 | remainder="passthrough",), 72 | GradientBoostingRegressor(random_state=0),).fit(X_NotNan, y_NotNan) 73 | 74 | # gradientboosting doesn't work with nan entries 75 | 76 | 77 | ########################## 78 | # partial dependence 79 | ########################## 80 | 81 | from sklearn.inspection import PartialDependenceDisplay 82 | from sklearn.inspection import plot_partial_dependence 83 | 84 | #fig, ax = plt.subplots(figsize=(14, 4), constrained_layout=True) 85 | #disp = PartialDependenceDisplay.from_estimator(model, 86 | # X_NotNan, features=["age", "sex", ("pclass", "sex")], 87 | # categorical_features=categorical_features, ax=ax,) 88 | # from_estimator method is non existent in older versions, but what about categorical features 89 | #fig.savefig('./part_disp_old.png', dpi=200) 90 | 91 | fig, ax = plt.subplots(figsize=(12, 6)) 92 | ax.set_title('GradientBoostingRegressor') 93 | GBR_disp = plot_partial_dependence(model, X_NotNan, ['age', 'fare', ('age', 'fare')], ax=ax) 94 | fig.savefig('./part_disp_old_NotCat.png', dpi=200) 95 | 96 | #fig, ax = plt.subplots(figsize=(12, 6)) 97 | #ax.set_title('GradientBoostingRegressor') 98 | #GBR_disp = plot_partial_dependence(model, X_NotNan, ['age', 'sex', ('age', 'sex')], ax=ax) 99 | #fig.savefig('./part_disp_old_wCat.png', dpi=200) 100 | # valueerror 101 | -------------------------------------------------------------------------------- /Text-classification_Complain_Suvo.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # this is checking and learning text classification using an amazing post by Susan Li, in Towards Data Science 4 | 5 | # the problem here is to assign a category when a new complaint comes in 6 | 7 | import math 8 | import matplotlib.pyplot as plt 9 | import pandas as pd 10 | import numpy as np 11 | 12 | 13 | 14 | from io import StringIO 15 | from sklearn.feature_extraction.text import TfidfVectorizer 16 | from sklearn.feature_selection import chi2 17 | from sklearn.model_selection import train_test_split, cross_val_score 18 | from sklearn.feature_extraction.text import CountVectorizer 19 | from sklearn.feature_extraction.text import TfidfTransformer 20 | from sklearn.naive_bayes import MultinomialNB 21 | from sklearn.linear_model import LogisticRegression 22 | from sklearn.ensemble import RandomForestClassifier 23 | from sklearn.svm import LinearSVC 24 | #from sklearn.model 25 | 26 | 27 | 28 | 29 | 30 | 31 | complain_df = pd.read_csv('Consumer_Complaints.csv') 32 | 33 | #print complain_df.head(3) 34 | 35 | # necessary columns required for text classifications are product (which will be label) and consumer complaint narrative (which will be feature(s)) 36 | #print complain_df.columns.values 37 | df_columns = complain_df.columns.values 38 | #print df_columns 39 | #print type(df_columns) 40 | df_columns[1] = 'product_label' 41 | 42 | complain_df.columns = df_columns # 'product' has been changed to 'product_label' 43 | 44 | #print complain_df.columns.values 45 | 46 | 47 | #Product_df=complain_df[['Consumer complaint narrative']] 48 | #print Product_df.head(5) 49 | 50 | #++++++++++++++++++++++++++++++++++++++++++++++ 51 | # input: consumer_complaint_narrative 52 | # 53 | # example: "I have outdated information on my credit 54 | # report that I have previously disputed that...." 55 | # 56 | # output: product 57 | # example: Credit reporting 58 | #++++++++++++++++++++++++++++++++++++++++++++++ 59 | 60 | 61 | columns = ['product_label', 'Consumer complaint narrative'] 62 | 63 | # we choose a new data-frame with only these columns as 'Consumer complaint narrative' 64 | # represents feature and 'product_label' is indeed what we want to predict i.e. label 65 | 66 | new_df = complain_df[columns] 67 | #print new_df.head(3) 68 | new_df_columns = new_df.columns.values 69 | #print new_df_columns 70 | new_df_columns[1]='consumer_complaint_narrative' 71 | new_df.columns = new_df_columns 72 | #print new_df.shape # (1144848,2) 73 | #print new_df['product_label'].unique() # check the unique product labels 74 | # drop rows that have NaN values in Consumer complaint narrative 75 | 76 | new_df = new_df.dropna(axis=0, how='any') # # drop the rows which contains missing values; any NA 77 | #print new_df.shape #(332361, 2) 78 | 79 | # create a new column where product_label is catagorized. 80 | 81 | new_df['category'] = new_df['product_label'].factorize()[0] # to quote scikit_learn 'factorize is useful for obtaining numeric representation 82 | # of an array when all that matters is identifying missing values. available as Series.factorize()'. new_df['product_label'] is the series here. 83 | 84 | #print new_df.head(4) 85 | 86 | 87 | 88 | category_id_df=new_df[['product_label','category']].drop_duplicates().sort_values('category') # drop duplicates which matches column product_label and category and then sort according to category, there will 89 | # be use of this for later purpose 90 | 91 | 92 | #print category_id_df.shape # (18,2) 93 | #print category_id_df.head(18) 94 | 95 | 96 | category_to_id=dict(category_id_df.values) 97 | #print category_to_id 98 | 99 | 100 | #for k, d in sorted(category_to_id.iteritems()): # we will use this later 101 | # print k, 'correspond to', d # sweeeeet 102 | 103 | 104 | 105 | 106 | #__________________________________________________________________ 107 | #+ plot to see that product label is biased towards 108 | # credit complains 109 | #__________________________________________________________________ 110 | #fig = plt.figure(figsize=(11.,10.)) 111 | #fig = plt.figure() 112 | #fig.patch.set_facecolor('white') 113 | 114 | #new_df.groupby('product_label').consumer_complaint_narrative.count().plot.bar(ylim=0, rot=75, fontsize=7) 115 | #plt.show() # the plot shows that few of the product labels totally dominate the number of complaints and we need to avoid our model from 116 | # being biased towards the majority of classes. It could be problem for handling data-sets of fraud detection or cancer prediction but here 117 | # it helps since the classifier may give high prediction accuracy for majority of the labels. 118 | #________________________________________________________________________________ 119 | # 120 | 121 | 122 | 123 | 124 | 125 | 126 | # +++++++++++++++++++++++++++++++++++++++++++++++++ 127 | #+ How to represent text 128 | # +++++++++++++++++++++++++++++++++++++++++++++++++ 129 | print "shape of new_df", new_df.shape 130 | print new_df.head(4) 131 | new_df=new_df[100:25000] # we select a smaller data-set otherwise tfidf method will cause segmentation fault (memory error) 132 | new_df.to_csv("Consumer_Complaints_short.csv", sep='\t', encoding='utf-8') 133 | print "after selecting few rows", new_df.shape 134 | print len(new_df['category'].unique()) # check that even selecting a smaller sample won't reduce the unique category 135 | # learning algorithms and classifiers can not directly process text in original form, as most them are dependent on 136 | # numerical feature vector with fixed size rather than text of variable length. so the texts need to be converted 137 | # into something more manageable representation. 138 | 139 | # usual method is to use bag of words model; where occurences of words are checked but orderings are ignored. 140 | 141 | # we will use tfidfvectorizer which converts a collection of raw documents to a matrix of tf-idf features. 142 | 143 | 144 | # sublinear_df is set to True to use a logarithmic form for frequency. 145 | # min_df is the minimum numbers of documents a word must be present in to be kept. 146 | # norm is set to l2, to ensure all our feature vectors have a euclidian norm of 1. 147 | # stop_words remove "a", "the" from the files (here the consumer complain). 148 | 149 | 150 | tfidf = TfidfVectorizer(sublinear_tf=True, min_df=5, norm='l2', ngram_range=(1,2), stop_words = 'english') 151 | 152 | 153 | 154 | 155 | features_text = tfidf.fit_transform(new_df.consumer_complaint_narrative).toarray() 156 | print features_text.shape # so here we see 24900 complains which are represented by 63530 features 157 | labels = new_df.category 158 | 159 | 160 | 161 | 162 | #for Product, category in sorted(category_to_id.items()): 163 | # features_chi2=chi2(features_text, labels==category) 164 | # indices = np.argsort(features_chi2[0]) 165 | # feature_names = np.array(tfidf.get_feature_names())[indices] 166 | # unigrams = [v for v in feature_names if len(v.split(' '))==1] 167 | # bigrams = [v for v in feature_names if len(v.split(' '))==2] 168 | # print "# '{}': ".format(Product) 169 | # print "Most correlated unigrams:\n. {}".format('\n.'.join(unigrams[-2:])) 170 | # print "Most correlated bigrams: \n. {}".format('\n.'.join(bigrams[-2:])) 171 | 172 | 173 | 174 | 175 | 176 | 177 | X_train, X_test, y_train, y_test = train_test_split(new_df['consumer_complaint_narrative'], new_df['product_label'], test_size=0.3, random_state=30) 178 | 179 | count_vect = CountVectorizer() 180 | print "train and test length", len(X_train), len(X_test) 181 | 182 | 183 | 184 | X_train_count = count_vect.fit_transform(X_train) 185 | #print "train_count", len(X_train_count) 186 | 187 | 188 | X_test_count = count_vect.transform(X_test) # do not apply fit method on test data. Only transform it to a matrix of token counts using CountVectorizer 189 | 190 | tfidf_transform = TfidfTransformer() 191 | 192 | X_train_tfidf = tfidf_transform.fit_transform(X_train_count) 193 | X_test_tfidf = tfidf_transform.fit_transform(X_test_count) 194 | 195 | 196 | clf = MultinomialNB().fit(X_train_tfidf, y_train) 197 | 198 | 199 | #++++++++++++++++++++++++++++++++++++ 200 | #+ perdiction time 201 | #++++++++++++++++++++++++++++++++++++ 202 | print clf.predict(count_vect.transform(["This company refuses to provide me verification and validation of debt per my right under the FDCPA. I do not believe this debt is mine."])) 203 | 204 | print(clf.predict(count_vect.transform(["I am disputing the inaccurate information the Chex-Systems has on my credit report. I initially submitted a police report on XXXX/XXXX/16 and Chex Systems only deleted the items that I mentioned in the letter and not all the items that were actually listed on the police report. In other words they wanted me to say word for word to them what items were fraudulent. The total disregard of the police report and what accounts that it states that are fraudulent. If they just had paid a little closer attention to the police report I would not been in this position now and they would n't have to research once again. I would like the reported information to be removed : XXXX XXXX XXXX"]))) 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | #++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 215 | #+ Final part : Selecting which algorithm works best 216 | #++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 217 | 218 | # create a list of models 219 | 220 | test_ML_Models = [MultinomialNB(), LinearSVC(), LogisticRegression(), RandomForestClassifier(n_estimators=1000, max_depth=3, random_state=0)] #n_estimator: number of trees in forest, max_depth= 221 | 222 | test_ML_Models_columns = [] 223 | 224 | test_ML_df = pd.DataFrame(columns=test_ML_Models_columns) 225 | 226 | print test_ML_df.head(3) 227 | 228 | 229 | row_index = 0 230 | 231 | for algorithm in test_ML_Models: 232 | predicted = algorithm.fit(X_train_tfidf, y_train)#.predict(X_test) 233 | test_ML_Models_name = algorithm.__class__.__name__ 234 | test_ML_df.loc[row_index,'test_ML_Models_name'] = test_ML_Models_name 235 | test_ML_df.loc[row_index, 'Train Accuracy'] = round(algorithm.score(X_train_tfidf,y_train),3) 236 | test_ML_df.loc[row_index, 'Test Accuracy'] = round(algorithm.score(X_test_tfidf,y_test),3) 237 | row_index = row_index + 1 238 | 239 | 240 | test_ML_df.sort_values(by=['Train Accuracy'], ascending=False, inplace=True) 241 | 242 | print test_ML_df.head(4)# support vector machine has the highest accuracy on train (93%) and test (71%) data. 243 | 244 | 245 | 246 | 247 | -------------------------------------------------------------------------------- /VotingClassifier/EnsembleClassifier.py: -------------------------------------------------------------------------------- 1 | ########################## 2 | # import Libraries 3 | ######################### 4 | 5 | from itertools import combinations 6 | 7 | import pandas as pd 8 | import numpy as np 9 | from scipy import stats 10 | 11 | import matplotlib.pyplot as plt 12 | import seaborn as sns 13 | from tqdm.notebook import tqdm 14 | 15 | from sklearn.preprocessing import StandardScaler 16 | from sklearn.model_selection import train_test_split 17 | 18 | 19 | from sklearn.svm import SVC 20 | from sklearn.linear_model import LogisticRegression 21 | from sklearn.ensemble import AdaBoostClassifier, VotingClassifier 22 | from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score 23 | from sklearn.inspection import DecisionBoundaryDisplay 24 | from sklearn.model_selection import GridSearchCV 25 | 26 | 27 | 28 | #################### 29 | # Load Data 30 | # Original (https://www.kaggle.com/datasets/fedesoriano/heart-failure-prediction) 31 | ################### 32 | 33 | heart_df = pd.read_csv('./heart.csv', ) 34 | # heart_df.head(4) 35 | 36 | ########### 37 | print ('check label counts: ', heart_df['HeartDisease'].value_counts()) 38 | 39 | label_class = {0:'Healthy', 1:'Ill'} 40 | heart_df['ClassLabel'] = heart_df['HeartDisease'].map(label_class) 41 | print (heart_df.head(5)) 42 | 43 | ##################################### 44 | # Plots for Checking Some Features 45 | ##################################### 46 | 47 | #countplot 48 | fig, axs = plt.subplots(figsize=(8, 6)) 49 | sns.countplot(data = heart_df, x=heart_df['ClassLabel'], hue='Sex', ax=axs) 50 | # plt.show 51 | 52 | #violinplots 53 | fig, axs = plt.subplots(1, 2, figsize=(9, 5)) 54 | axs=axs.flatten() 55 | sns.violinplot(data = heart_df, x='Sex', y='Cholesterol', 56 | hue='ClassLabel', palette='cubehelix', 57 | split=True, ax=axs[0]) 58 | sns.violinplot(data = heart_df, x='Sex', y='RestingBP', 59 | hue='ClassLabel', palette='cubehelix', 60 | split=True, ax=axs[1]) 61 | plt.tight_layout() 62 | # plt.show 63 | 64 | #boxplots 65 | ### Check Few Parameter Distributions: 66 | 67 | fig, axes = plt.subplots(2, 2, figsize=(9, 7)) 68 | axes = axes.flatten() 69 | sns.boxplot(x=heart_df['Age'], color='orange', ax=axes[0]) 70 | sns.boxplot(x=heart_df['Cholesterol'], color='orange', ax=axes[1]) 71 | sns.boxplot(x=heart_df['RestingBP'], color='orange', ax=axes[2]) 72 | sns.boxplot(x=heart_df['MaxHR'], color='orange', ax=axes[3]) 73 | plt.tight_layout() 74 | # plt.show 75 | 76 | ######## 77 | # Dummies for categorical variables 78 | ######## 79 | 80 | selected_rows_heart = pd.get_dummies(heart_df, drop_first=True) 81 | # selected_rows_heart.head(3) 82 | 83 | ################### 84 | # Training Data Preparation 85 | ################### 86 | 87 | X = selected_rows_heart.drop(['HeartDisease', 'ClassLabel_Ill'], axis=1) 88 | y = selected_rows_heart['HeartDisease'] 89 | 90 | 91 | X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y, test_size = 0.25, random_state = 42) 92 | 93 | print ('train data shape: ', X_train.shape, y_train.shape) 94 | print ('test data shape: ', X_test.shape, y_test.shape) 95 | 96 | ### standardize the dataframe (training and test data should be separated) 97 | ### we only standardize the numerical columns 98 | 99 | numerical_cols = ['Age', 'RestingBP', 'Cholesterol', 'MaxHR', 'Oldpeak'] 100 | 101 | scaler = StandardScaler() 102 | X_train[numerical_cols] = scaler.fit_transform(X_train[numerical_cols]) 103 | X_test[numerical_cols] = scaler.fit_transform(X_test[numerical_cols]) 104 | 105 | # X_train.head(3) 106 | 107 | ######################### 108 | # Check Model Performance Separately 109 | ######################### 110 | 111 | # Initialize the classifiers 112 | svc_classifier = SVC() 113 | logreg_classifier = LogisticRegression() 114 | adaboost_classifier = AdaBoostClassifier() 115 | 116 | # Train the models on the training data 117 | svc_classifier.fit(X_train, y_train) 118 | logreg_classifier.fit(X_train, y_train) 119 | adaboost_classifier.fit(X_train, y_train) 120 | 121 | # Step 3: Evaluate the models 122 | # Make predictions on the test data 123 | svc_predictions = svc_classifier.predict(X_test) 124 | logreg_predictions = logreg_classifier.predict(X_test) 125 | adaboost_predictions = adaboost_classifier.predict(X_test) 126 | 127 | # Evaluate the performance of each classifier using various metrics 128 | 129 | svc_precision = precision_score(y_test, svc_predictions, average='weighted') 130 | logreg_precision = precision_score(y_test, logreg_predictions, average='weighted') 131 | adaboost_precision = precision_score(y_test, adaboost_predictions, average='weighted') 132 | 133 | svc_recall = recall_score(y_test, svc_predictions, average='weighted') 134 | logreg_recall = recall_score(y_test, logreg_predictions, average='weighted') 135 | adaboost_recall = recall_score(y_test, adaboost_predictions, average='weighted') 136 | 137 | svc_f1 = f1_score(y_test, svc_predictions, average='weighted') 138 | logreg_f1 = f1_score(y_test, logreg_predictions, average='weighted') 139 | adaboost_f1 = f1_score(y_test, adaboost_predictions, average='weighted') 140 | 141 | # Print the evaluation metrics 142 | print("SVC, LogReg, AdaBoost Precisions:", svc_precision, logreg_precision, adaboost_precision) 143 | print("SVC, LogReg, AdaBoost Recalls:", svc_recall, logreg_recall, adaboost_recall) 144 | print("SVC, LogReg, AdaBoost F1-scores:", svc_f1, logreg_f1, adaboost_f1) 145 | 146 | 147 | ################# 148 | # Plot AdaBoost Feature Importances 149 | ################# 150 | 151 | n_features = len(X_train.columns) 152 | 153 | sns.set(style="whitegrid") 154 | 155 | fig = plt.figure(figsize=(15, 11)) 156 | fig.tight_layout() 157 | plt.bar(range(n_features), adaboost_classifier.feature_importances_, color="magenta", align="center", alpha=0.6) 158 | plt.xticks(np.arange(n_features), X_train.columns.to_list(), rotation=80, fontsize=11) 159 | plt.xlabel("Features", fontsize=14) 160 | plt.ylabel("Feature Importance", fontsize=14) 161 | # plt.savefig("./Feature_Importance_AdaBoost.png", dpi=300, bbox_inches='tight') 162 | # xticks are not clipped with 'bbox' 163 | # plt.show 164 | 165 | ############################ 166 | # Check Performance with VC 167 | ############################ 168 | 169 | 170 | 171 | # Create the Voting Classifier 172 | # One can choose either 'hard' voting or 'soft' voting. 173 | # 'hard' voting - Majority vote (best classifier is choosen) 174 | # 'soft' voting - Weighted average of probabilities (combine all; we can also specify the weight array; 175 | # usually all models have weight 1) 176 | 177 | svc_classifier = SVC(probability=True) 178 | voting_classifier_hard = VotingClassifier(estimators=[('svm', svc_classifier), ('logreg', logreg_classifier), ('adaboost', adaboost_classifier)], 179 | voting='hard') 180 | 181 | voting_classifier_soft = VotingClassifier(estimators=[('svm', svc_classifier), ('logreg', logreg_classifier), ('adaboost', adaboost_classifier)], 182 | voting='soft') 183 | 184 | # Step 4: Train the Voting Classifier on the training data 185 | voting_classifier_hard.fit(X_train, y_train) 186 | voting_classifier_soft.fit(X_train, y_train) 187 | 188 | # Step 5: Make predictions on the test data 189 | voting_predictions_hard = voting_classifier_hard.predict(X_test) 190 | voting_predictions_soft = voting_classifier_soft.predict(X_test) 191 | 192 | 193 | vc_precision_soft = precision_score(y_test, voting_predictions_soft) 194 | vc_precision_hard = precision_score(y_test, voting_predictions_hard,) 195 | 196 | vc_recall_soft = recall_score(y_test, voting_predictions_soft) 197 | vc_recall_hard = recall_score(y_test, voting_predictions_hard) 198 | 199 | vc_f1_soft = f1_score(y_test, voting_predictions_soft) 200 | vc_f1_hard = f1_score(y_test, voting_predictions_hard) 201 | 202 | 203 | # Print the evaluation metrics 204 | print("VC_soft, VC_hard Precisions:", vc_precision_soft, vc_precision_hard) 205 | print("VC_soft, VC_hard Recalls:", vc_recall_soft, vc_recall_hard) 206 | print("VC_soft, VC_hard F1-scores:", vc_f1_soft, vc_f1_hard) 207 | 208 | ###################### 209 | # Define the Generic Confusion Matrix Performance 210 | ###################### 211 | 212 | class_types = list(heart_df['ClassLabel'].unique()) 213 | 214 | from sklearn.metrics import confusion_matrix, classification_report 215 | 216 | def conf_matrix(predictions, plot_title:str, save_path:str): 217 | ''' Plots conf. matrix and classification report ''' 218 | cm=confusion_matrix(y_test, predictions) 219 | print("Classification Report:\n") 220 | cr=classification_report(y_test, predictions, 221 | target_names=[class_types[i] for i in range(len(class_types))]) 222 | print(cr) 223 | plt.figure(figsize=(6, 6)) 224 | plt.title('Confusion Matrix: %s'%(plot_title)) 225 | sns_hmp = sns.heatmap(cm, annot=True, xticklabels = [class_types[i] for i in range(len(class_types))], 226 | yticklabels = [class_types[i] for i in range(len(class_types))], fmt="d") 227 | fig = sns_hmp.get_figure() 228 | fig.savefig(save_path, dpi=150) 229 | 230 | conf_matrix(voting_predictions_hard, 'Voting Classifier (Hard)', save_path='') 231 | conf_matrix(adaboost_predictions, 'AdaBoost Classifier', save_path='') 232 | 233 | ############################### 234 | # Decision Boundaries for Pairs of Feautres (Numerical) 235 | ############################### 236 | 237 | ### Check the Effect of Voting Classifier 238 | 239 | ### select only the numerical cols 240 | 241 | X_test_numeric = X_test[numerical_cols] 242 | 243 | # Generating all pairs of numbers from 0 to 5 244 | comb = combinations(np.arange(0, 5), 2) 245 | 246 | # Using sets to obtain all unique combinations from 0 to 5 pairs 247 | unique_combinations = set(comb) 248 | labels = ['Healthy', 'Ill'] 249 | 250 | 251 | color_palette = plt.cm.cividis 252 | plot_colors = ['g', 'r'] 253 | markers = ['*', 'o'] 254 | n_classes = len(y_train.unique()) 255 | plt.figure(figsize=(15, 10)) 256 | 257 | for pair_idx, pair in enumerate(sorted(unique_combinations)): 258 | # Only two corresponding features are taken each time 259 | X_test_cols = X_test_numeric.iloc[:, [pair[0], pair[1]]] 260 | 261 | # Creating and fitting the classifier to train data 262 | classifier = voting_classifier_hard.fit(X_test_cols, y_test) 263 | 264 | # Defining a grid of 5 columns and 2 rows 265 | ax = plt.subplot(2, 5, pair_idx + 1) 266 | # Plotting the pairs decision boundaries 267 | DecisionBoundaryDisplay.from_estimator(classifier, 268 | X_test_cols, 269 | cmap=color_palette, 270 | response_method="predict", 271 | ax=ax, 272 | xlabel=X_test_numeric.columns.to_list()[pair[0]], 273 | ylabel=X_test_numeric.columns.to_list()[pair[1]], 274 | alpha = 0.5) 275 | 276 | # Plotting the training points according to y_train class colors 277 | for i, color in zip(range(n_classes), plot_colors): 278 | idx = np.where(y_test == i) 279 | plt.scatter(X_test_cols.iloc[idx[:][0], 0], 280 | X_test_cols.iloc[idx[:][0], 1], 281 | c=color, 282 | label=labels[i], marker=markers[i], 283 | s=20, alpha=0.3) 284 | 285 | plt.suptitle("Decision surface of Voting Classifier (Hard): Pairs of Numerical Features", fontsize=12) 286 | plt.legend(loc="upper right", fontsize=9) 287 | plt.tight_layout() 288 | # plt.show 289 | 290 | 291 | ############### 292 | #Check GridSearch CV within Voting Classifier 293 | ############## 294 | 295 | svc_classifier = SVC(probability=True) #kernel = rbf 296 | 297 | voting_classifier_hard = VotingClassifier(estimators=[('svm', svc_classifier), ('logreg', logreg_classifier), 298 | ('adaboost', adaboost_classifier)], voting='hard') 299 | 300 | 301 | # 0.1, 0.5, 302 | params = {'svm__C':[0.1, 0.5, 1, 30, 75, 100], 303 | 'svm__gamma':[0.005, 0.01, 0.05, 1, 10, 100], 304 | 'logreg__C':[0.05, 0.1, 0.5, 1, 30, 75, 100], 'adaboost__n_estimators':[20, 50, 70]} 305 | 306 | 307 | for cv in tqdm(range(3, 6)): 308 | create_grid = GridSearchCV(estimator=voting_classifier_hard, param_grid=params, cv=cv) 309 | create_grid.fit(X_train, y_train) 310 | print ('score for %d fold CV := %3.2f'%(cv, create_grid.score(X_test, y_test))) 311 | print ('!!!!!!!! Best Params from Grid Search CV !!!!!!!!') 312 | print (create_grid.best_params_) 313 | 314 | print ('Out of the Loop') 315 | 316 | 317 | print ('grid CV best params: ', create_grid.best_params_) 318 | grid_CV_predictions = create_grid.predict(X_test) 319 | 320 | conf_matrix(grid_CV_predictions, 'VC (Hard) with GridSearch (CV=5)', save_path='./GridCV_VC.png') 321 | 322 | ################ 323 | # libraries Used 324 | ################ 325 | ''' 326 | Matplotlib: 3.7.1 327 | Numpy: 1.24.3 328 | Scipy: 1.10.1 329 | Pandas: 1.5.3 330 | Seaborn: 0.12.2 331 | sklearn: 1.3.0 332 | ''' 333 | -------------------------------------------------------------------------------- /VotingClassifier/heart.csv: -------------------------------------------------------------------------------- 1 | Age,Sex,ChestPainType,RestingBP,Cholesterol,FastingBS,RestingECG,MaxHR,ExerciseAngina,Oldpeak,ST_Slope,HeartDisease 2 | 40,M,ATA,140,289,0,Normal,172,N,0,Up,0 3 | 49,F,NAP,160,180,0,Normal,156,N,1,Flat,1 4 | 37,M,ATA,130,283,0,ST,98,N,0,Up,0 5 | 48,F,ASY,138,214,0,Normal,108,Y,1.5,Flat,1 6 | 54,M,NAP,150,195,0,Normal,122,N,0,Up,0 7 | 39,M,NAP,120,339,0,Normal,170,N,0,Up,0 8 | 45,F,ATA,130,237,0,Normal,170,N,0,Up,0 9 | 54,M,ATA,110,208,0,Normal,142,N,0,Up,0 10 | 37,M,ASY,140,207,0,Normal,130,Y,1.5,Flat,1 11 | 48,F,ATA,120,284,0,Normal,120,N,0,Up,0 12 | 37,F,NAP,130,211,0,Normal,142,N,0,Up,0 13 | 58,M,ATA,136,164,0,ST,99,Y,2,Flat,1 14 | 39,M,ATA,120,204,0,Normal,145,N,0,Up,0 15 | 49,M,ASY,140,234,0,Normal,140,Y,1,Flat,1 16 | 42,F,NAP,115,211,0,ST,137,N,0,Up,0 17 | 54,F,ATA,120,273,0,Normal,150,N,1.5,Flat,0 18 | 38,M,ASY,110,196,0,Normal,166,N,0,Flat,1 19 | 43,F,ATA,120,201,0,Normal,165,N,0,Up,0 20 | 60,M,ASY,100,248,0,Normal,125,N,1,Flat,1 21 | 36,M,ATA,120,267,0,Normal,160,N,3,Flat,1 22 | 43,F,TA,100,223,0,Normal,142,N,0,Up,0 23 | 44,M,ATA,120,184,0,Normal,142,N,1,Flat,0 24 | 49,F,ATA,124,201,0,Normal,164,N,0,Up,0 25 | 44,M,ATA,150,288,0,Normal,150,Y,3,Flat,1 26 | 40,M,NAP,130,215,0,Normal,138,N,0,Up,0 27 | 36,M,NAP,130,209,0,Normal,178,N,0,Up,0 28 | 53,M,ASY,124,260,0,ST,112,Y,3,Flat,0 29 | 52,M,ATA,120,284,0,Normal,118,N,0,Up,0 30 | 53,F,ATA,113,468,0,Normal,127,N,0,Up,0 31 | 51,M,ATA,125,188,0,Normal,145,N,0,Up,0 32 | 53,M,NAP,145,518,0,Normal,130,N,0,Flat,1 33 | 56,M,NAP,130,167,0,Normal,114,N,0,Up,0 34 | 54,M,ASY,125,224,0,Normal,122,N,2,Flat,1 35 | 41,M,ASY,130,172,0,ST,130,N,2,Flat,1 36 | 43,F,ATA,150,186,0,Normal,154,N,0,Up,0 37 | 32,M,ATA,125,254,0,Normal,155,N,0,Up,0 38 | 65,M,ASY,140,306,1,Normal,87,Y,1.5,Flat,1 39 | 41,F,ATA,110,250,0,ST,142,N,0,Up,0 40 | 48,F,ATA,120,177,1,ST,148,N,0,Up,0 41 | 48,F,ASY,150,227,0,Normal,130,Y,1,Flat,0 42 | 54,F,ATA,150,230,0,Normal,130,N,0,Up,0 43 | 54,F,NAP,130,294,0,ST,100,Y,0,Flat,1 44 | 35,M,ATA,150,264,0,Normal,168,N,0,Up,0 45 | 52,M,NAP,140,259,0,ST,170,N,0,Up,0 46 | 43,M,ASY,120,175,0,Normal,120,Y,1,Flat,1 47 | 59,M,NAP,130,318,0,Normal,120,Y,1,Flat,0 48 | 37,M,ASY,120,223,0,Normal,168,N,0,Up,0 49 | 50,M,ATA,140,216,0,Normal,170,N,0,Up,0 50 | 36,M,NAP,112,340,0,Normal,184,N,1,Flat,0 51 | 41,M,ASY,110,289,0,Normal,170,N,0,Flat,1 52 | 50,M,ASY,130,233,0,Normal,121,Y,2,Flat,1 53 | 47,F,ASY,120,205,0,Normal,98,Y,2,Flat,1 54 | 45,M,ATA,140,224,1,Normal,122,N,0,Up,0 55 | 41,F,ATA,130,245,0,Normal,150,N,0,Up,0 56 | 52,F,ASY,130,180,0,Normal,140,Y,1.5,Flat,0 57 | 51,F,ATA,160,194,0,Normal,170,N,0,Up,0 58 | 31,M,ASY,120,270,0,Normal,153,Y,1.5,Flat,1 59 | 58,M,NAP,130,213,0,ST,140,N,0,Flat,1 60 | 54,M,ASY,150,365,0,ST,134,N,1,Up,0 61 | 52,M,ASY,112,342,0,ST,96,Y,1,Flat,1 62 | 49,M,ATA,100,253,0,Normal,174,N,0,Up,0 63 | 43,F,NAP,150,254,0,Normal,175,N,0,Up,0 64 | 45,M,ASY,140,224,0,Normal,144,N,0,Up,0 65 | 46,M,ASY,120,277,0,Normal,125,Y,1,Flat,1 66 | 50,F,ATA,110,202,0,Normal,145,N,0,Up,0 67 | 37,F,ATA,120,260,0,Normal,130,N,0,Up,0 68 | 45,F,ASY,132,297,0,Normal,144,N,0,Up,0 69 | 32,M,ATA,110,225,0,Normal,184,N,0,Up,0 70 | 52,M,ASY,160,246,0,ST,82,Y,4,Flat,1 71 | 44,M,ASY,150,412,0,Normal,170,N,0,Up,0 72 | 57,M,ATA,140,265,0,ST,145,Y,1,Flat,1 73 | 44,M,ATA,130,215,0,Normal,135,N,0,Up,0 74 | 52,M,ASY,120,182,0,Normal,150,N,0,Flat,1 75 | 44,F,ASY,120,218,0,ST,115,N,0,Up,0 76 | 55,M,ASY,140,268,0,Normal,128,Y,1.5,Flat,1 77 | 46,M,NAP,150,163,0,Normal,116,N,0,Up,0 78 | 32,M,ASY,118,529,0,Normal,130,N,0,Flat,1 79 | 35,F,ASY,140,167,0,Normal,150,N,0,Up,0 80 | 52,M,ATA,140,100,0,Normal,138,Y,0,Up,0 81 | 49,M,ASY,130,206,0,Normal,170,N,0,Flat,1 82 | 55,M,NAP,110,277,0,Normal,160,N,0,Up,0 83 | 54,M,ATA,120,238,0,Normal,154,N,0,Up,0 84 | 63,M,ASY,150,223,0,Normal,115,N,0,Flat,1 85 | 52,M,ATA,160,196,0,Normal,165,N,0,Up,0 86 | 56,M,ASY,150,213,1,Normal,125,Y,1,Flat,1 87 | 66,M,ASY,140,139,0,Normal,94,Y,1,Flat,1 88 | 65,M,ASY,170,263,1,Normal,112,Y,2,Flat,1 89 | 53,F,ATA,140,216,0,Normal,142,Y,2,Flat,0 90 | 43,M,TA,120,291,0,ST,155,N,0,Flat,1 91 | 55,M,ASY,140,229,0,Normal,110,Y,0.5,Flat,0 92 | 49,F,ATA,110,208,0,Normal,160,N,0,Up,0 93 | 39,M,ASY,130,307,0,Normal,140,N,0,Up,0 94 | 52,F,ATA,120,210,0,Normal,148,N,0,Up,0 95 | 48,M,ASY,160,329,0,Normal,92,Y,1.5,Flat,1 96 | 39,F,NAP,110,182,0,ST,180,N,0,Up,0 97 | 58,M,ASY,130,263,0,Normal,140,Y,2,Flat,1 98 | 43,M,ATA,142,207,0,Normal,138,N,0,Up,0 99 | 39,M,NAP,160,147,1,Normal,160,N,0,Up,0 100 | 56,M,ASY,120,85,0,Normal,140,N,0,Up,0 101 | 41,M,ATA,125,269,0,Normal,144,N,0,Up,0 102 | 65,M,ASY,130,275,0,ST,115,Y,1,Flat,1 103 | 51,M,ASY,130,179,0,Normal,100,N,0,Up,0 104 | 40,F,ASY,150,392,0,Normal,130,N,2,Flat,1 105 | 40,M,ASY,120,466,1,Normal,152,Y,1,Flat,1 106 | 46,M,ASY,118,186,0,Normal,124,N,0,Flat,1 107 | 57,M,ATA,140,260,1,Normal,140,N,0,Up,0 108 | 48,F,ASY,120,254,0,ST,110,N,0,Up,0 109 | 34,M,ATA,150,214,0,ST,168,N,0,Up,0 110 | 50,M,ASY,140,129,0,Normal,135,N,0,Up,0 111 | 39,M,ATA,190,241,0,Normal,106,N,0,Up,0 112 | 59,F,ATA,130,188,0,Normal,124,N,1,Flat,0 113 | 57,M,ASY,150,255,0,Normal,92,Y,3,Flat,1 114 | 47,M,ASY,140,276,1,Normal,125,Y,0,Up,0 115 | 38,M,ATA,140,297,0,Normal,150,N,0,Up,0 116 | 49,F,NAP,130,207,0,ST,135,N,0,Up,0 117 | 33,F,ASY,100,246,0,Normal,150,Y,1,Flat,1 118 | 38,M,ASY,120,282,0,Normal,170,N,0,Flat,1 119 | 59,F,ASY,130,338,1,ST,130,Y,1.5,Flat,1 120 | 35,F,TA,120,160,0,ST,185,N,0,Up,0 121 | 34,M,TA,140,156,0,Normal,180,N,0,Flat,1 122 | 47,F,NAP,135,248,1,Normal,170,N,0,Flat,1 123 | 52,F,NAP,125,272,0,Normal,139,N,0,Up,0 124 | 46,M,ASY,110,240,0,ST,140,N,0,Up,0 125 | 58,F,ATA,180,393,0,Normal,110,Y,1,Flat,1 126 | 58,M,ATA,130,230,0,Normal,150,N,0,Up,0 127 | 54,M,ATA,120,246,0,Normal,110,N,0,Up,0 128 | 34,F,ATA,130,161,0,Normal,190,N,0,Up,0 129 | 48,F,ASY,108,163,0,Normal,175,N,2,Up,0 130 | 54,F,ATA,120,230,1,Normal,140,N,0,Up,0 131 | 42,M,NAP,120,228,0,Normal,152,Y,1.5,Flat,0 132 | 38,M,NAP,145,292,0,Normal,130,N,0,Up,0 133 | 46,M,ASY,110,202,0,Normal,150,Y,0,Flat,1 134 | 56,M,ASY,170,388,0,ST,122,Y,2,Flat,1 135 | 56,M,ASY,150,230,0,ST,124,Y,1.5,Flat,1 136 | 61,F,ASY,130,294,0,ST,120,Y,1,Flat,0 137 | 49,M,NAP,115,265,0,Normal,175,N,0,Flat,1 138 | 43,F,ATA,120,215,0,ST,175,N,0,Up,0 139 | 39,M,ATA,120,241,0,ST,146,N,2,Up,0 140 | 54,M,ASY,140,166,0,Normal,118,Y,0,Flat,1 141 | 43,M,ASY,150,247,0,Normal,130,Y,2,Flat,1 142 | 52,M,ASY,160,331,0,Normal,94,Y,2.5,Flat,1 143 | 50,M,ASY,140,341,0,ST,125,Y,2.5,Flat,1 144 | 47,M,ASY,160,291,0,ST,158,Y,3,Flat,1 145 | 53,M,ASY,140,243,0,Normal,155,N,0,Up,0 146 | 56,F,ATA,120,279,0,Normal,150,N,1,Flat,1 147 | 39,M,ASY,110,273,0,Normal,132,N,0,Up,0 148 | 42,M,ATA,120,198,0,Normal,155,N,0,Up,0 149 | 43,F,ATA,120,249,0,ST,176,N,0,Up,0 150 | 50,M,ATA,120,168,0,Normal,160,N,0,Up,0 151 | 54,M,ASY,130,603,1,Normal,125,Y,1,Flat,1 152 | 39,M,ATA,130,215,0,Normal,120,N,0,Up,0 153 | 48,M,ATA,100,159,0,Normal,100,N,0,Up,0 154 | 40,M,ATA,130,275,0,Normal,150,N,0,Up,0 155 | 55,M,ASY,120,270,0,Normal,140,N,0,Up,0 156 | 41,M,ATA,120,291,0,ST,160,N,0,Up,0 157 | 56,M,ASY,155,342,1,Normal,150,Y,3,Flat,1 158 | 38,M,ASY,110,190,0,Normal,150,Y,1,Flat,1 159 | 49,M,ASY,140,185,0,Normal,130,N,0,Up,0 160 | 44,M,ASY,130,290,0,Normal,100,Y,2,Flat,1 161 | 54,M,ATA,160,195,0,ST,130,N,1,Up,0 162 | 59,M,ASY,140,264,1,LVH,119,Y,0,Flat,1 163 | 49,M,ASY,128,212,0,Normal,96,Y,0,Flat,1 164 | 47,M,ATA,160,263,0,Normal,174,N,0,Up,0 165 | 42,M,ATA,120,196,0,Normal,150,N,0,Up,0 166 | 52,F,ATA,140,225,0,Normal,140,N,0,Up,0 167 | 46,M,TA,140,272,1,Normal,175,N,2,Flat,1 168 | 50,M,ASY,140,231,0,ST,140,Y,5,Flat,1 169 | 48,M,ATA,140,238,0,Normal,118,N,0,Up,0 170 | 58,M,ASY,135,222,0,Normal,100,N,0,Up,0 171 | 58,M,NAP,140,179,0,Normal,160,N,0,Up,0 172 | 29,M,ATA,120,243,0,Normal,160,N,0,Up,0 173 | 40,M,NAP,140,235,0,Normal,188,N,0,Up,0 174 | 53,M,ATA,140,320,0,Normal,162,N,0,Up,0 175 | 49,M,NAP,140,187,0,Normal,172,N,0,Up,0 176 | 52,M,ASY,140,266,0,Normal,134,Y,2,Flat,1 177 | 43,M,ASY,140,288,0,Normal,135,Y,2,Flat,1 178 | 54,M,ASY,140,216,0,Normal,105,N,1.5,Flat,1 179 | 59,M,ATA,140,287,0,Normal,150,N,0,Up,0 180 | 37,M,NAP,130,194,0,Normal,150,N,0,Up,0 181 | 46,F,ASY,130,238,0,Normal,90,N,0,Up,0 182 | 52,M,ASY,130,225,0,Normal,120,Y,2,Flat,1 183 | 51,M,ATA,130,224,0,Normal,150,N,0,Up,0 184 | 52,M,ASY,140,404,0,Normal,124,Y,2,Flat,1 185 | 46,M,ASY,110,238,0,ST,140,Y,1,Flat,0 186 | 54,F,ATA,160,312,0,Normal,130,N,0,Up,0 187 | 58,M,NAP,160,211,1,ST,92,N,0,Flat,1 188 | 58,M,ATA,130,251,0,Normal,110,N,0,Up,0 189 | 41,M,ASY,120,237,1,Normal,138,Y,1,Flat,1 190 | 50,F,ASY,120,328,0,Normal,110,Y,1,Flat,0 191 | 53,M,ASY,180,285,0,ST,120,Y,1.5,Flat,1 192 | 46,M,ASY,180,280,0,ST,120,N,0,Up,0 193 | 50,M,ATA,170,209,0,ST,116,N,0,Up,0 194 | 48,M,ATA,130,245,0,Normal,160,N,0,Up,0 195 | 45,M,NAP,135,192,0,Normal,110,N,0,Up,0 196 | 41,F,ATA,125,184,0,Normal,180,N,0,Up,0 197 | 62,F,TA,160,193,0,Normal,116,N,0,Up,0 198 | 49,M,ASY,120,297,0,Normal,132,N,1,Flat,0 199 | 42,M,ATA,150,268,0,Normal,136,N,0,Up,0 200 | 53,M,ASY,120,246,0,Normal,116,Y,0,Flat,1 201 | 57,F,TA,130,308,0,Normal,98,N,1,Flat,0 202 | 47,M,TA,110,249,0,Normal,150,N,0,Up,0 203 | 46,M,NAP,120,230,0,Normal,150,N,0,Up,0 204 | 42,M,NAP,160,147,0,Normal,146,N,0,Up,0 205 | 31,F,ATA,100,219,0,ST,150,N,0,Up,0 206 | 56,M,ATA,130,184,0,Normal,100,N,0,Up,0 207 | 50,M,ASY,150,215,0,Normal,140,Y,0,Up,0 208 | 35,M,ATA,120,308,0,LVH,180,N,0,Up,0 209 | 35,M,ATA,110,257,0,Normal,140,N,0,Flat,1 210 | 28,M,ATA,130,132,0,LVH,185,N,0,Up,0 211 | 54,M,ASY,125,216,0,Normal,140,N,0,Flat,1 212 | 48,M,ASY,106,263,1,Normal,110,N,0,Flat,1 213 | 50,F,NAP,140,288,0,Normal,140,Y,0,Flat,1 214 | 56,M,NAP,130,276,0,Normal,128,Y,1,Up,0 215 | 56,F,NAP,130,219,0,ST,164,N,0,Up,0 216 | 47,M,ASY,150,226,0,Normal,98,Y,1.5,Flat,1 217 | 30,F,TA,170,237,0,ST,170,N,0,Up,0 218 | 39,M,ASY,110,280,0,Normal,150,N,0,Flat,1 219 | 54,M,NAP,120,217,0,Normal,137,N,0,Up,0 220 | 55,M,ATA,140,196,0,Normal,150,N,0,Up,0 221 | 29,M,ATA,140,263,0,Normal,170,N,0,Up,0 222 | 46,M,ASY,130,222,0,Normal,112,N,0,Flat,1 223 | 51,F,ASY,160,303,0,Normal,150,Y,1,Flat,1 224 | 48,F,NAP,120,195,0,Normal,125,N,0,Up,0 225 | 33,M,NAP,120,298,0,Normal,185,N,0,Up,0 226 | 55,M,ATA,120,256,1,Normal,137,N,0,Up,0 227 | 50,M,ASY,145,264,0,Normal,150,N,0,Flat,1 228 | 53,M,NAP,120,195,0,Normal,140,N,0,Up,0 229 | 38,M,ASY,92,117,0,Normal,134,Y,2.5,Flat,1 230 | 41,M,ATA,120,295,0,Normal,170,N,0,Up,0 231 | 37,F,ASY,130,173,0,ST,184,N,0,Up,0 232 | 37,M,ASY,130,315,0,Normal,158,N,0,Up,0 233 | 40,M,NAP,130,281,0,Normal,167,N,0,Up,0 234 | 38,F,ATA,120,275,0,Normal,129,N,0,Up,0 235 | 41,M,ASY,112,250,0,Normal,142,N,0,Up,0 236 | 54,F,ATA,140,309,0,ST,140,N,0,Up,0 237 | 39,M,ATA,120,200,0,Normal,160,Y,1,Flat,0 238 | 41,M,ASY,120,336,0,Normal,118,Y,3,Flat,1 239 | 55,M,TA,140,295,0,Normal,136,N,0,Flat,1 240 | 48,M,ASY,160,355,0,Normal,99,Y,2,Flat,1 241 | 48,M,ASY,160,193,0,Normal,102,Y,3,Flat,1 242 | 55,M,ATA,145,326,0,Normal,155,N,0,Up,0 243 | 54,M,ASY,200,198,0,Normal,142,Y,2,Flat,1 244 | 55,M,ATA,160,292,1,Normal,143,Y,2,Flat,1 245 | 43,F,ATA,120,266,0,Normal,118,N,0,Up,0 246 | 48,M,ASY,160,268,0,Normal,103,Y,1,Flat,1 247 | 54,M,TA,120,171,0,Normal,137,N,2,Up,0 248 | 54,M,NAP,120,237,0,Normal,150,Y,1.5,Flat,1 249 | 48,M,ASY,122,275,1,ST,150,Y,2,Down,1 250 | 45,M,ASY,130,219,0,ST,130,Y,1,Flat,1 251 | 49,M,ASY,130,341,0,Normal,120,Y,1,Flat,1 252 | 44,M,ASY,135,491,0,Normal,135,N,0,Flat,1 253 | 48,M,ASY,120,260,0,Normal,115,N,2,Flat,1 254 | 61,M,ASY,125,292,0,ST,115,Y,0,Up,0 255 | 62,M,ATA,140,271,0,Normal,152,N,1,Up,0 256 | 55,M,ASY,145,248,0,Normal,96,Y,2,Flat,1 257 | 53,F,NAP,120,274,0,Normal,130,N,0,Up,0 258 | 55,F,ATA,130,394,0,LVH,150,N,0,Up,0 259 | 36,M,NAP,150,160,0,Normal,172,N,0,Up,0 260 | 51,F,NAP,150,200,0,Normal,120,N,0.5,Up,0 261 | 55,F,ATA,122,320,0,Normal,155,N,0,Up,0 262 | 46,M,ATA,140,275,0,Normal,165,Y,0,Up,0 263 | 54,F,ATA,120,221,0,Normal,138,N,1,Up,0 264 | 46,M,ASY,120,231,0,Normal,115,Y,0,Flat,1 265 | 59,M,ASY,130,126,0,Normal,125,N,0,Flat,1 266 | 47,M,NAP,140,193,0,Normal,145,Y,1,Flat,1 267 | 54,M,ATA,160,305,0,Normal,175,N,0,Up,0 268 | 52,M,ASY,130,298,0,Normal,110,Y,1,Flat,1 269 | 34,M,ATA,98,220,0,Normal,150,N,0,Up,0 270 | 54,M,ASY,130,242,0,Normal,91,Y,1,Flat,1 271 | 47,F,NAP,130,235,0,Normal,145,N,2,Flat,0 272 | 45,M,ASY,120,225,0,Normal,140,N,0,Up,0 273 | 32,F,ATA,105,198,0,Normal,165,N,0,Up,0 274 | 55,M,ASY,140,201,0,Normal,130,Y,3,Flat,1 275 | 55,M,NAP,120,220,0,LVH,134,N,0,Up,0 276 | 45,F,ATA,180,295,0,Normal,180,N,0,Up,0 277 | 59,M,NAP,180,213,0,Normal,100,N,0,Up,0 278 | 51,M,NAP,135,160,0,Normal,150,N,2,Flat,1 279 | 52,M,ASY,170,223,0,Normal,126,Y,1.5,Flat,1 280 | 57,F,ASY,180,347,0,ST,126,Y,0.8,Flat,0 281 | 54,F,ATA,130,253,0,ST,155,N,0,Up,0 282 | 60,M,NAP,120,246,0,LVH,135,N,0,Up,0 283 | 49,M,ASY,150,222,0,Normal,122,N,2,Flat,1 284 | 51,F,NAP,130,220,0,Normal,160,Y,2,Up,0 285 | 55,F,ATA,110,344,0,ST,160,N,0,Up,0 286 | 42,M,ASY,140,358,0,Normal,170,N,0,Up,0 287 | 51,F,NAP,110,190,0,Normal,120,N,0,Up,0 288 | 59,M,ASY,140,169,0,Normal,140,N,0,Up,0 289 | 53,M,ATA,120,181,0,Normal,132,N,0,Up,0 290 | 48,F,ATA,133,308,0,ST,156,N,2,Up,0 291 | 36,M,ATA,120,166,0,Normal,180,N,0,Up,0 292 | 48,M,NAP,110,211,0,Normal,138,N,0,Up,0 293 | 47,F,ATA,140,257,0,Normal,135,N,1,Up,0 294 | 53,M,ASY,130,182,0,Normal,148,N,0,Up,0 295 | 65,M,ASY,115,0,0,Normal,93,Y,0,Flat,1 296 | 32,M,TA,95,0,1,Normal,127,N,0.7,Up,1 297 | 61,M,ASY,105,0,1,Normal,110,Y,1.5,Up,1 298 | 50,M,ASY,145,0,1,Normal,139,Y,0.7,Flat,1 299 | 57,M,ASY,110,0,1,ST,131,Y,1.4,Up,1 300 | 51,M,ASY,110,0,1,Normal,92,N,0,Flat,1 301 | 47,M,ASY,110,0,1,ST,149,N,2.1,Up,1 302 | 60,M,ASY,160,0,1,Normal,149,N,0.4,Flat,1 303 | 55,M,ATA,140,0,0,ST,150,N,0.2,Up,0 304 | 53,M,ASY,125,0,1,Normal,120,N,1.5,Up,1 305 | 62,F,ASY,120,0,1,ST,123,Y,1.7,Down,1 306 | 51,M,ASY,95,0,1,Normal,126,N,2.2,Flat,1 307 | 51,F,ASY,120,0,1,Normal,127,Y,1.5,Up,1 308 | 55,M,ASY,115,0,1,Normal,155,N,0.1,Flat,1 309 | 53,M,ATA,130,0,0,ST,120,N,0.7,Down,0 310 | 58,M,ASY,115,0,1,Normal,138,N,0.5,Up,1 311 | 57,M,ASY,95,0,1,Normal,182,N,0.7,Down,1 312 | 65,M,ASY,155,0,0,Normal,154,N,1,Up,0 313 | 60,M,ASY,125,0,1,Normal,110,N,0.1,Up,1 314 | 41,M,ASY,125,0,1,Normal,176,N,1.6,Up,1 315 | 34,M,ASY,115,0,1,Normal,154,N,0.2,Up,1 316 | 53,M,ASY,80,0,0,Normal,141,Y,2,Down,0 317 | 74,M,ATA,145,0,1,ST,123,N,1.3,Up,1 318 | 57,M,NAP,105,0,1,Normal,148,N,0.3,Flat,1 319 | 56,M,ASY,140,0,1,Normal,121,Y,1.8,Up,1 320 | 61,M,ASY,130,0,1,Normal,77,N,2.5,Flat,1 321 | 68,M,ASY,145,0,1,Normal,136,N,1.8,Up,1 322 | 59,M,NAP,125,0,1,Normal,175,N,2.6,Flat,1 323 | 63,M,ASY,100,0,1,Normal,109,N,-0.9,Flat,1 324 | 38,F,ASY,105,0,1,Normal,166,N,2.8,Up,1 325 | 62,M,ASY,115,0,1,Normal,128,Y,2.5,Down,1 326 | 46,M,ASY,100,0,1,ST,133,N,-2.6,Flat,1 327 | 42,M,ASY,105,0,1,Normal,128,Y,-1.5,Down,1 328 | 45,M,NAP,110,0,0,Normal,138,N,-0.1,Up,0 329 | 59,M,ASY,125,0,1,Normal,119,Y,0.9,Up,1 330 | 52,M,ASY,95,0,1,Normal,82,Y,0.8,Flat,1 331 | 60,M,ASY,130,0,1,ST,130,Y,1.1,Down,1 332 | 60,M,NAP,115,0,1,Normal,143,N,2.4,Up,1 333 | 56,M,ASY,115,0,1,ST,82,N,-1,Up,1 334 | 38,M,NAP,100,0,0,Normal,179,N,-1.1,Up,0 335 | 40,M,ASY,95,0,1,ST,144,N,0,Up,1 336 | 51,M,ASY,130,0,1,Normal,170,N,-0.7,Up,1 337 | 62,M,TA,120,0,1,LVH,134,N,-0.8,Flat,1 338 | 72,M,NAP,160,0,0,LVH,114,N,1.6,Flat,0 339 | 63,M,ASY,150,0,1,ST,154,N,3.7,Up,1 340 | 63,M,ASY,140,0,1,LVH,149,N,2,Up,1 341 | 64,F,ASY,95,0,1,Normal,145,N,1.1,Down,1 342 | 43,M,ASY,100,0,1,Normal,122,N,1.5,Down,1 343 | 64,M,ASY,110,0,1,Normal,114,Y,1.3,Down,1 344 | 61,M,ASY,110,0,1,Normal,113,N,1.4,Flat,1 345 | 52,M,ASY,130,0,1,Normal,120,N,0,Flat,1 346 | 51,M,ASY,120,0,1,Normal,104,N,0,Flat,1 347 | 69,M,ASY,135,0,0,Normal,130,N,0,Flat,1 348 | 59,M,ASY,120,0,0,Normal,115,N,0,Flat,1 349 | 48,M,ASY,115,0,1,Normal,128,N,0,Flat,1 350 | 69,M,ASY,137,0,0,ST,104,Y,1.6,Flat,1 351 | 36,M,ASY,110,0,1,Normal,125,Y,1,Flat,1 352 | 53,M,ASY,120,0,1,Normal,120,N,0,Flat,1 353 | 43,M,ASY,140,0,0,ST,140,Y,0.5,Up,1 354 | 56,M,ASY,120,0,0,ST,100,Y,-1,Down,1 355 | 58,M,ASY,130,0,0,ST,100,Y,1,Flat,1 356 | 55,M,ASY,120,0,0,ST,92,N,0.3,Up,1 357 | 67,M,TA,145,0,0,LVH,125,N,0,Flat,1 358 | 46,M,ASY,115,0,0,Normal,113,Y,1.5,Flat,1 359 | 53,M,ATA,120,0,0,Normal,95,N,0,Flat,1 360 | 38,M,NAP,115,0,0,Normal,128,Y,0,Flat,1 361 | 53,M,NAP,105,0,0,Normal,115,N,0,Flat,1 362 | 62,M,NAP,160,0,0,Normal,72,Y,0,Flat,1 363 | 47,M,ASY,160,0,0,Normal,124,Y,0,Flat,1 364 | 56,M,NAP,155,0,0,ST,99,N,0,Flat,1 365 | 56,M,ASY,120,0,0,ST,148,N,0,Flat,1 366 | 56,M,NAP,120,0,0,Normal,97,N,0,Flat,0 367 | 64,F,ASY,200,0,0,Normal,140,Y,1,Flat,1 368 | 61,M,ASY,150,0,0,Normal,117,Y,2,Flat,1 369 | 68,M,ASY,135,0,0,ST,120,Y,0,Up,1 370 | 57,M,ASY,140,0,0,Normal,120,Y,2,Flat,1 371 | 63,M,ASY,150,0,0,Normal,86,Y,2,Flat,1 372 | 60,M,ASY,135,0,0,Normal,63,Y,0.5,Up,1 373 | 66,M,ASY,150,0,0,Normal,108,Y,2,Flat,1 374 | 63,M,ASY,185,0,0,Normal,98,Y,0,Up,1 375 | 59,M,ASY,135,0,0,Normal,115,Y,1,Flat,1 376 | 61,M,ASY,125,0,0,Normal,105,Y,0,Down,1 377 | 73,F,NAP,160,0,0,ST,121,N,0,Up,1 378 | 47,M,NAP,155,0,0,Normal,118,Y,1,Flat,1 379 | 65,M,ASY,160,0,1,ST,122,N,1.2,Flat,1 380 | 70,M,ASY,140,0,1,Normal,157,Y,2,Flat,1 381 | 50,M,ASY,120,0,0,ST,156,Y,0,Up,1 382 | 60,M,ASY,160,0,0,ST,99,Y,0.5,Flat,1 383 | 50,M,ASY,115,0,0,Normal,120,Y,0.5,Flat,1 384 | 43,M,ASY,115,0,0,Normal,145,Y,2,Flat,1 385 | 38,F,ASY,110,0,0,Normal,156,N,0,Flat,1 386 | 54,M,ASY,120,0,0,Normal,155,N,0,Flat,1 387 | 61,M,ASY,150,0,0,Normal,105,Y,0,Flat,1 388 | 42,M,ASY,145,0,0,Normal,99,Y,0,Flat,1 389 | 53,M,ASY,130,0,0,LVH,135,Y,1,Flat,1 390 | 55,M,ASY,140,0,0,Normal,83,N,0,Flat,1 391 | 61,M,ASY,160,0,1,ST,145,N,1,Flat,1 392 | 51,M,ASY,140,0,0,Normal,60,N,0,Flat,1 393 | 70,M,ASY,115,0,0,ST,92,Y,0,Flat,1 394 | 61,M,ASY,130,0,0,LVH,115,N,0,Flat,1 395 | 38,M,ASY,150,0,1,Normal,120,Y,0.7,Flat,1 396 | 57,M,ASY,160,0,1,Normal,98,Y,2,Flat,1 397 | 38,M,ASY,135,0,1,Normal,150,N,0,Flat,1 398 | 62,F,TA,140,0,1,Normal,143,N,0,Flat,1 399 | 58,M,ASY,170,0,1,ST,105,Y,0,Flat,1 400 | 52,M,ASY,165,0,1,Normal,122,Y,1,Up,1 401 | 61,M,NAP,200,0,1,ST,70,N,0,Flat,1 402 | 50,F,ASY,160,0,1,Normal,110,N,0,Flat,1 403 | 51,M,ASY,130,0,1,ST,163,N,0,Flat,1 404 | 65,M,ASY,145,0,1,ST,67,N,0.7,Flat,1 405 | 52,M,ASY,135,0,1,Normal,128,Y,2,Flat,1 406 | 47,M,NAP,110,0,1,Normal,120,Y,0,Flat,1 407 | 35,M,ASY,120,0,1,Normal,130,Y,1.2,Flat,1 408 | 57,M,ASY,140,0,1,Normal,100,Y,0,Flat,1 409 | 62,M,ASY,115,0,1,Normal,72,Y,-0.5,Flat,1 410 | 59,M,ASY,110,0,1,Normal,94,N,0,Flat,1 411 | 53,M,NAP,160,0,1,LVH,122,Y,0,Flat,1 412 | 62,M,ASY,150,0,1,ST,78,N,2,Flat,1 413 | 54,M,ASY,180,0,1,Normal,150,N,1.5,Flat,1 414 | 56,M,ASY,125,0,1,Normal,103,Y,1,Flat,1 415 | 56,M,NAP,125,0,1,Normal,98,N,-2,Flat,1 416 | 54,M,ASY,130,0,1,Normal,110,Y,3,Flat,1 417 | 66,F,ASY,155,0,1,Normal,90,N,0,Flat,1 418 | 63,M,ASY,140,260,0,ST,112,Y,3,Flat,1 419 | 44,M,ASY,130,209,0,ST,127,N,0,Up,0 420 | 60,M,ASY,132,218,0,ST,140,Y,1.5,Down,1 421 | 55,M,ASY,142,228,0,ST,149,Y,2.5,Up,1 422 | 66,M,NAP,110,213,1,LVH,99,Y,1.3,Flat,0 423 | 66,M,NAP,120,0,0,ST,120,N,-0.5,Up,0 424 | 65,M,ASY,150,236,1,ST,105,Y,0,Flat,1 425 | 60,M,NAP,180,0,0,ST,140,Y,1.5,Flat,0 426 | 60,M,NAP,120,0,1,Normal,141,Y,2,Up,1 427 | 60,M,ATA,160,267,1,ST,157,N,0.5,Flat,1 428 | 56,M,ATA,126,166,0,ST,140,N,0,Up,0 429 | 59,M,ASY,140,0,0,ST,117,Y,1,Flat,1 430 | 62,M,ASY,110,0,0,Normal,120,Y,0.5,Flat,1 431 | 63,M,NAP,133,0,0,LVH,120,Y,1,Flat,1 432 | 57,M,ASY,128,0,1,ST,148,Y,1,Flat,1 433 | 62,M,ASY,120,220,0,ST,86,N,0,Up,0 434 | 63,M,ASY,170,177,0,Normal,84,Y,2.5,Down,1 435 | 46,M,ASY,110,236,0,Normal,125,Y,2,Flat,1 436 | 63,M,ASY,126,0,0,ST,120,N,1.5,Down,0 437 | 60,M,ASY,152,0,0,ST,118,Y,0,Up,0 438 | 58,M,ASY,116,0,0,Normal,124,N,1,Up,1 439 | 64,M,ASY,120,0,1,ST,106,N,2,Flat,1 440 | 63,M,NAP,130,0,0,ST,111,Y,0,Flat,1 441 | 74,M,NAP,138,0,0,Normal,116,N,0.2,Up,0 442 | 52,M,NAP,128,0,0,ST,180,N,3,Up,1 443 | 69,M,ASY,130,0,1,ST,129,N,1,Flat,1 444 | 51,M,ASY,128,0,1,ST,125,Y,1.2,Flat,1 445 | 60,M,ASY,130,186,1,ST,140,Y,0.5,Flat,1 446 | 56,M,ASY,120,100,0,Normal,120,Y,1.5,Flat,1 447 | 55,M,NAP,136,228,0,ST,124,Y,1.6,Flat,1 448 | 54,M,ASY,130,0,0,ST,117,Y,1.4,Flat,1 449 | 77,M,ASY,124,171,0,ST,110,Y,2,Up,1 450 | 63,M,ASY,160,230,1,Normal,105,Y,1,Flat,1 451 | 55,M,NAP,0,0,0,Normal,155,N,1.5,Flat,1 452 | 52,M,NAP,122,0,0,Normal,110,Y,2,Down,1 453 | 64,M,ASY,144,0,0,ST,122,Y,1,Flat,1 454 | 60,M,ASY,140,281,0,ST,118,Y,1.5,Flat,1 455 | 60,M,ASY,120,0,0,Normal,133,Y,2,Up,0 456 | 58,M,ASY,136,203,1,Normal,123,Y,1.2,Flat,1 457 | 59,M,ASY,154,0,0,ST,131,Y,1.5,Up,0 458 | 61,M,NAP,120,0,0,Normal,80,Y,0,Flat,1 459 | 40,M,ASY,125,0,1,Normal,165,N,0,Flat,1 460 | 61,M,ASY,134,0,1,ST,86,N,1.5,Flat,1 461 | 41,M,ASY,104,0,0,ST,111,N,0,Up,0 462 | 57,M,ASY,139,277,1,ST,118,Y,1.9,Flat,1 463 | 63,M,ASY,136,0,0,Normal,84,Y,0,Flat,1 464 | 59,M,ASY,122,233,0,Normal,117,Y,1.3,Down,1 465 | 51,M,ASY,128,0,0,Normal,107,N,0,Up,0 466 | 59,M,NAP,131,0,0,Normal,128,Y,2,Down,1 467 | 42,M,NAP,134,240,0,Normal,160,N,0,Up,0 468 | 55,M,NAP,120,0,0,ST,125,Y,2.5,Flat,1 469 | 63,F,ATA,132,0,0,Normal,130,N,0.1,Up,0 470 | 62,M,ASY,152,153,0,ST,97,Y,1.6,Up,1 471 | 56,M,ATA,124,224,1,Normal,161,N,2,Flat,0 472 | 53,M,ASY,126,0,0,Normal,106,N,0,Flat,1 473 | 68,M,ASY,138,0,0,Normal,130,Y,3,Flat,1 474 | 53,M,ASY,154,0,1,ST,140,Y,1.5,Flat,1 475 | 60,M,NAP,141,316,1,ST,122,Y,1.7,Flat,1 476 | 62,M,ATA,131,0,0,Normal,130,N,0.1,Up,0 477 | 59,M,ASY,178,0,1,LVH,120,Y,0,Flat,1 478 | 51,M,ASY,132,218,1,LVH,139,N,0.1,Up,0 479 | 61,M,ASY,110,0,1,Normal,108,Y,2,Down,1 480 | 57,M,ASY,130,311,1,ST,148,Y,2,Flat,1 481 | 56,M,NAP,170,0,0,LVH,123,Y,2.5,Flat,1 482 | 58,M,ATA,126,0,1,Normal,110,Y,2,Flat,1 483 | 69,M,NAP,140,0,1,ST,118,N,2.5,Down,1 484 | 67,M,TA,142,270,1,Normal,125,N,2.5,Up,1 485 | 58,M,ASY,120,0,0,LVH,106,Y,1.5,Down,1 486 | 65,M,ASY,134,0,0,Normal,112,Y,1.1,Flat,1 487 | 63,M,ATA,139,217,1,ST,128,Y,1.2,Flat,1 488 | 55,M,ATA,110,214,1,ST,180,N,0.4,Up,0 489 | 57,M,ASY,140,214,0,ST,144,Y,2,Flat,1 490 | 65,M,TA,140,252,0,Normal,135,N,0.3,Up,0 491 | 54,M,ASY,136,220,0,Normal,140,Y,3,Flat,1 492 | 72,M,NAP,120,214,0,Normal,102,Y,1,Flat,1 493 | 75,M,ASY,170,203,1,ST,108,N,0,Flat,1 494 | 49,M,TA,130,0,0,ST,145,N,3,Flat,1 495 | 51,M,NAP,137,339,0,Normal,127,Y,1.7,Flat,1 496 | 60,M,ASY,142,216,0,Normal,110,Y,2.5,Flat,1 497 | 64,F,ASY,142,276,0,Normal,140,Y,1,Flat,1 498 | 58,M,ASY,132,458,1,Normal,69,N,1,Down,0 499 | 61,M,ASY,146,241,0,Normal,148,Y,3,Down,1 500 | 67,M,ASY,160,384,1,ST,130,Y,0,Flat,1 501 | 62,M,ASY,135,297,0,Normal,130,Y,1,Flat,1 502 | 65,M,ASY,136,248,0,Normal,140,Y,4,Down,1 503 | 63,M,ASY,130,308,0,Normal,138,Y,2,Flat,1 504 | 69,M,ASY,140,208,0,ST,140,Y,2,Flat,1 505 | 51,M,ASY,132,227,1,ST,138,N,0.2,Up,0 506 | 62,M,ASY,158,210,1,Normal,112,Y,3,Down,1 507 | 55,M,NAP,136,245,1,ST,131,Y,1.2,Flat,1 508 | 75,M,ASY,136,225,0,Normal,112,Y,3,Flat,1 509 | 40,M,NAP,106,240,0,Normal,80,Y,0,Up,0 510 | 67,M,ASY,120,0,1,Normal,150,N,1.5,Down,1 511 | 58,M,ASY,110,198,0,Normal,110,N,0,Flat,1 512 | 60,M,ASY,136,195,0,Normal,126,N,0.3,Up,0 513 | 63,M,ASY,160,267,1,ST,88,Y,2,Flat,1 514 | 35,M,NAP,123,161,0,ST,153,N,-0.1,Up,0 515 | 62,M,TA,112,258,0,ST,150,Y,1.3,Flat,1 516 | 43,M,ASY,122,0,0,Normal,120,N,0.5,Up,1 517 | 63,M,NAP,130,0,1,ST,160,N,3,Flat,0 518 | 68,M,NAP,150,195,1,Normal,132,N,0,Flat,1 519 | 65,M,ASY,150,235,0,Normal,120,Y,1.5,Flat,1 520 | 48,M,NAP,102,0,1,ST,110,Y,1,Down,1 521 | 63,M,ASY,96,305,0,ST,121,Y,1,Up,1 522 | 64,M,ASY,130,223,0,ST,128,N,0.5,Flat,0 523 | 61,M,ASY,120,282,0,ST,135,Y,4,Down,1 524 | 50,M,ASY,144,349,0,LVH,120,Y,1,Up,1 525 | 59,M,ASY,124,160,0,Normal,117,Y,1,Flat,1 526 | 55,M,ASY,150,160,0,ST,150,N,0,Up,0 527 | 45,M,NAP,130,236,0,Normal,144,N,0.1,Up,0 528 | 65,M,ASY,144,312,0,LVH,113,Y,1.7,Flat,1 529 | 61,M,ATA,139,283,0,Normal,135,N,0.3,Up,0 530 | 49,M,NAP,131,142,0,Normal,127,Y,1.5,Flat,1 531 | 72,M,ASY,143,211,0,Normal,109,Y,1.4,Flat,1 532 | 50,M,ASY,133,218,0,Normal,128,Y,1.1,Flat,1 533 | 64,M,ASY,143,306,1,ST,115,Y,1.8,Flat,1 534 | 55,M,ASY,116,186,1,ST,102,N,0,Flat,1 535 | 63,M,ASY,110,252,0,ST,140,Y,2,Flat,1 536 | 59,M,ASY,125,222,0,Normal,135,Y,2.5,Down,1 537 | 56,M,ASY,130,0,0,LVH,122,Y,1,Flat,1 538 | 62,M,NAP,133,0,1,ST,119,Y,1.2,Flat,1 539 | 74,M,ASY,150,258,1,ST,130,Y,4,Down,1 540 | 54,M,ASY,130,202,1,Normal,112,Y,2,Flat,1 541 | 57,M,ASY,110,197,0,LVH,100,N,0,Up,0 542 | 62,M,NAP,138,204,0,ST,122,Y,1.2,Flat,1 543 | 76,M,NAP,104,113,0,LVH,120,N,3.5,Down,1 544 | 54,F,ASY,138,274,0,Normal,105,Y,1.5,Flat,1 545 | 70,M,ASY,170,192,0,ST,129,Y,3,Down,1 546 | 61,F,ATA,140,298,1,Normal,120,Y,0,Up,0 547 | 48,M,ASY,132,272,0,ST,139,N,0.2,Up,0 548 | 48,M,NAP,132,220,1,ST,162,N,0,Flat,1 549 | 61,M,TA,142,200,1,ST,100,N,1.5,Down,1 550 | 66,M,ASY,112,261,0,Normal,140,N,1.5,Up,1 551 | 68,M,TA,139,181,1,ST,135,N,0.2,Up,0 552 | 55,M,ASY,172,260,0,Normal,73,N,2,Flat,1 553 | 62,M,NAP,120,220,0,LVH,86,N,0,Up,0 554 | 71,M,NAP,144,221,0,Normal,108,Y,1.8,Flat,1 555 | 74,M,TA,145,216,1,Normal,116,Y,1.8,Flat,1 556 | 53,M,NAP,155,175,1,ST,160,N,0.3,Up,0 557 | 58,M,NAP,150,219,0,ST,118,Y,0,Flat,1 558 | 75,M,ASY,160,310,1,Normal,112,Y,2,Down,0 559 | 56,M,NAP,137,208,1,ST,122,Y,1.8,Flat,1 560 | 58,M,NAP,137,232,0,ST,124,Y,1.4,Flat,1 561 | 64,M,ASY,134,273,0,Normal,102,Y,4,Down,1 562 | 54,M,NAP,133,203,0,ST,137,N,0.2,Up,0 563 | 54,M,ATA,132,182,0,ST,141,N,0.1,Up,0 564 | 59,M,ASY,140,274,0,Normal,154,Y,2,Flat,0 565 | 55,M,ASY,135,204,1,ST,126,Y,1.1,Flat,1 566 | 57,M,ASY,144,270,1,ST,160,Y,2,Flat,1 567 | 61,M,ASY,141,292,0,ST,115,Y,1.7,Flat,1 568 | 41,M,ASY,150,171,0,Normal,128,Y,1.5,Flat,0 569 | 71,M,ASY,130,221,0,ST,115,Y,0,Flat,1 570 | 38,M,ASY,110,289,0,Normal,105,Y,1.5,Down,1 571 | 55,M,ASY,158,217,0,Normal,110,Y,2.5,Flat,1 572 | 56,M,ASY,128,223,0,ST,119,Y,2,Down,1 573 | 69,M,ASY,140,110,1,Normal,109,Y,1.5,Flat,1 574 | 64,M,ASY,150,193,0,ST,135,Y,0.5,Flat,1 575 | 72,M,ASY,160,123,1,LVH,130,N,1.5,Flat,1 576 | 69,M,ASY,142,210,1,ST,112,Y,1.5,Flat,1 577 | 56,M,ASY,137,282,1,Normal,126,Y,1.2,Flat,1 578 | 62,M,ASY,139,170,0,ST,120,Y,3,Flat,1 579 | 67,M,ASY,146,369,0,Normal,110,Y,1.9,Flat,1 580 | 57,M,ASY,156,173,0,LVH,119,Y,3,Down,1 581 | 69,M,ASY,145,289,1,ST,110,Y,1.8,Flat,1 582 | 51,M,ASY,131,152,1,LVH,130,Y,1,Flat,1 583 | 48,M,ASY,140,208,0,Normal,159,Y,1.5,Up,1 584 | 69,M,ASY,122,216,1,LVH,84,Y,0,Flat,1 585 | 69,M,NAP,142,271,0,LVH,126,N,0.3,Up,0 586 | 64,M,ASY,141,244,1,ST,116,Y,1.5,Flat,1 587 | 57,M,ATA,180,285,1,ST,120,N,0.8,Flat,1 588 | 53,M,ASY,124,243,0,Normal,122,Y,2,Flat,1 589 | 37,M,NAP,118,240,0,LVH,165,N,1,Flat,0 590 | 67,M,ASY,140,219,0,ST,122,Y,2,Flat,1 591 | 74,M,NAP,140,237,1,Normal,94,N,0,Flat,1 592 | 63,M,ATA,136,165,0,ST,133,N,0.2,Up,0 593 | 58,M,ASY,100,213,0,ST,110,N,0,Up,0 594 | 61,M,ASY,190,287,1,LVH,150,Y,2,Down,1 595 | 64,M,ASY,130,258,1,LVH,130,N,0,Flat,1 596 | 58,M,ASY,160,256,1,LVH,113,Y,1,Up,1 597 | 60,M,ASY,130,186,1,LVH,140,Y,0.5,Flat,1 598 | 57,M,ASY,122,264,0,LVH,100,N,0,Flat,1 599 | 55,M,NAP,133,185,0,ST,136,N,0.2,Up,0 600 | 55,M,ASY,120,226,0,LVH,127,Y,1.7,Down,1 601 | 56,M,ASY,130,203,1,Normal,98,N,1.5,Flat,1 602 | 57,M,ASY,130,207,0,ST,96,Y,1,Flat,0 603 | 61,M,NAP,140,284,0,Normal,123,Y,1.3,Flat,1 604 | 61,M,NAP,120,337,0,Normal,98,Y,0,Flat,1 605 | 74,M,ASY,155,310,0,Normal,112,Y,1.5,Down,1 606 | 68,M,NAP,134,254,1,Normal,151,Y,0,Up,0 607 | 51,F,ASY,114,258,1,LVH,96,N,1,Up,0 608 | 62,M,ASY,160,254,1,ST,108,Y,3,Flat,1 609 | 53,M,ASY,144,300,1,ST,128,Y,1.5,Flat,1 610 | 62,M,ASY,158,170,0,ST,138,Y,0,Flat,1 611 | 46,M,ASY,134,310,0,Normal,126,N,0,Flat,1 612 | 54,F,ASY,127,333,1,ST,154,N,0,Flat,1 613 | 62,M,TA,135,139,0,ST,137,N,0.2,Up,0 614 | 55,M,ASY,122,223,1,ST,100,N,0,Flat,1 615 | 58,M,ASY,140,385,1,LVH,135,N,0.3,Up,0 616 | 62,M,ATA,120,254,0,LVH,93,Y,0,Flat,1 617 | 70,M,ASY,130,322,0,LVH,109,N,2.4,Flat,1 618 | 67,F,NAP,115,564,0,LVH,160,N,1.6,Flat,0 619 | 57,M,ATA,124,261,0,Normal,141,N,0.3,Up,1 620 | 64,M,ASY,128,263,0,Normal,105,Y,0.2,Flat,0 621 | 74,F,ATA,120,269,0,LVH,121,Y,0.2,Up,0 622 | 65,M,ASY,120,177,0,Normal,140,N,0.4,Up,0 623 | 56,M,NAP,130,256,1,LVH,142,Y,0.6,Flat,1 624 | 59,M,ASY,110,239,0,LVH,142,Y,1.2,Flat,1 625 | 60,M,ASY,140,293,0,LVH,170,N,1.2,Flat,1 626 | 63,F,ASY,150,407,0,LVH,154,N,4,Flat,1 627 | 59,M,ASY,135,234,0,Normal,161,N,0.5,Flat,0 628 | 53,M,ASY,142,226,0,LVH,111,Y,0,Up,0 629 | 44,M,NAP,140,235,0,LVH,180,N,0,Up,0 630 | 61,M,TA,134,234,0,Normal,145,N,2.6,Flat,1 631 | 57,F,ASY,128,303,0,LVH,159,N,0,Up,0 632 | 71,F,ASY,112,149,0,Normal,125,N,1.6,Flat,0 633 | 46,M,ASY,140,311,0,Normal,120,Y,1.8,Flat,1 634 | 53,M,ASY,140,203,1,LVH,155,Y,3.1,Down,1 635 | 64,M,TA,110,211,0,LVH,144,Y,1.8,Flat,0 636 | 40,M,TA,140,199,0,Normal,178,Y,1.4,Up,0 637 | 67,M,ASY,120,229,0,LVH,129,Y,2.6,Flat,1 638 | 48,M,ATA,130,245,0,LVH,180,N,0.2,Flat,0 639 | 43,M,ASY,115,303,0,Normal,181,N,1.2,Flat,0 640 | 47,M,ASY,112,204,0,Normal,143,N,0.1,Up,0 641 | 54,F,ATA,132,288,1,LVH,159,Y,0,Up,0 642 | 48,F,NAP,130,275,0,Normal,139,N,0.2,Up,0 643 | 46,F,ASY,138,243,0,LVH,152,Y,0,Flat,0 644 | 51,F,NAP,120,295,0,LVH,157,N,0.6,Up,0 645 | 58,M,NAP,112,230,0,LVH,165,N,2.5,Flat,1 646 | 71,F,NAP,110,265,1,LVH,130,N,0,Up,0 647 | 57,M,NAP,128,229,0,LVH,150,N,0.4,Flat,1 648 | 66,M,ASY,160,228,0,LVH,138,N,2.3,Up,0 649 | 37,F,NAP,120,215,0,Normal,170,N,0,Up,0 650 | 59,M,ASY,170,326,0,LVH,140,Y,3.4,Down,1 651 | 50,M,ASY,144,200,0,LVH,126,Y,0.9,Flat,1 652 | 48,M,ASY,130,256,1,LVH,150,Y,0,Up,1 653 | 61,M,ASY,140,207,0,LVH,138,Y,1.9,Up,1 654 | 59,M,TA,160,273,0,LVH,125,N,0,Up,1 655 | 42,M,NAP,130,180,0,Normal,150,N,0,Up,0 656 | 48,M,ASY,122,222,0,LVH,186,N,0,Up,0 657 | 40,M,ASY,152,223,0,Normal,181,N,0,Up,1 658 | 62,F,ASY,124,209,0,Normal,163,N,0,Up,0 659 | 44,M,NAP,130,233,0,Normal,179,Y,0.4,Up,0 660 | 46,M,ATA,101,197,1,Normal,156,N,0,Up,0 661 | 59,M,NAP,126,218,1,Normal,134,N,2.2,Flat,1 662 | 58,M,NAP,140,211,1,LVH,165,N,0,Up,0 663 | 49,M,NAP,118,149,0,LVH,126,N,0.8,Up,1 664 | 44,M,ASY,110,197,0,LVH,177,N,0,Up,1 665 | 66,M,ATA,160,246,0,Normal,120,Y,0,Flat,1 666 | 65,F,ASY,150,225,0,LVH,114,N,1,Flat,1 667 | 42,M,ASY,136,315,0,Normal,125,Y,1.8,Flat,1 668 | 52,M,ATA,128,205,1,Normal,184,N,0,Up,0 669 | 65,F,NAP,140,417,1,LVH,157,N,0.8,Up,0 670 | 63,F,ATA,140,195,0,Normal,179,N,0,Up,0 671 | 45,F,ATA,130,234,0,LVH,175,N,0.6,Flat,0 672 | 41,F,ATA,105,198,0,Normal,168,N,0,Up,0 673 | 61,M,ASY,138,166,0,LVH,125,Y,3.6,Flat,1 674 | 60,F,NAP,120,178,1,Normal,96,N,0,Up,0 675 | 59,F,ASY,174,249,0,Normal,143,Y,0,Flat,1 676 | 62,M,ATA,120,281,0,LVH,103,N,1.4,Flat,1 677 | 57,M,NAP,150,126,1,Normal,173,N,0.2,Up,0 678 | 51,F,ASY,130,305,0,Normal,142,Y,1.2,Flat,1 679 | 44,M,NAP,120,226,0,Normal,169,N,0,Up,0 680 | 60,F,TA,150,240,0,Normal,171,N,0.9,Up,0 681 | 63,M,TA,145,233,1,LVH,150,N,2.3,Down,0 682 | 57,M,ASY,150,276,0,LVH,112,Y,0.6,Flat,1 683 | 51,M,ASY,140,261,0,LVH,186,Y,0,Up,0 684 | 58,F,ATA,136,319,1,LVH,152,N,0,Up,1 685 | 44,F,NAP,118,242,0,Normal,149,N,0.3,Flat,0 686 | 47,M,NAP,108,243,0,Normal,152,N,0,Up,1 687 | 61,M,ASY,120,260,0,Normal,140,Y,3.6,Flat,1 688 | 57,F,ASY,120,354,0,Normal,163,Y,0.6,Up,0 689 | 70,M,ATA,156,245,0,LVH,143,N,0,Up,0 690 | 76,F,NAP,140,197,0,ST,116,N,1.1,Flat,0 691 | 67,F,ASY,106,223,0,Normal,142,N,0.3,Up,0 692 | 45,M,ASY,142,309,0,LVH,147,Y,0,Flat,1 693 | 45,M,ASY,104,208,0,LVH,148,Y,3,Flat,0 694 | 39,F,NAP,94,199,0,Normal,179,N,0,Up,0 695 | 42,F,NAP,120,209,0,Normal,173,N,0,Flat,0 696 | 56,M,ATA,120,236,0,Normal,178,N,0.8,Up,0 697 | 58,M,ASY,146,218,0,Normal,105,N,2,Flat,1 698 | 35,M,ASY,120,198,0,Normal,130,Y,1.6,Flat,1 699 | 58,M,ASY,150,270,0,LVH,111,Y,0.8,Up,1 700 | 41,M,NAP,130,214,0,LVH,168,N,2,Flat,0 701 | 57,M,ASY,110,201,0,Normal,126,Y,1.5,Flat,0 702 | 42,M,TA,148,244,0,LVH,178,N,0.8,Up,0 703 | 62,M,ATA,128,208,1,LVH,140,N,0,Up,0 704 | 59,M,TA,178,270,0,LVH,145,N,4.2,Down,0 705 | 41,F,ATA,126,306,0,Normal,163,N,0,Up,0 706 | 50,M,ASY,150,243,0,LVH,128,N,2.6,Flat,1 707 | 59,M,ATA,140,221,0,Normal,164,Y,0,Up,0 708 | 61,F,ASY,130,330,0,LVH,169,N,0,Up,1 709 | 54,M,ASY,124,266,0,LVH,109,Y,2.2,Flat,1 710 | 54,M,ASY,110,206,0,LVH,108,Y,0,Flat,1 711 | 52,M,ASY,125,212,0,Normal,168,N,1,Up,1 712 | 47,M,ASY,110,275,0,LVH,118,Y,1,Flat,1 713 | 66,M,ASY,120,302,0,LVH,151,N,0.4,Flat,0 714 | 58,M,ASY,100,234,0,Normal,156,N,0.1,Up,1 715 | 64,F,NAP,140,313,0,Normal,133,N,0.2,Up,0 716 | 50,F,ATA,120,244,0,Normal,162,N,1.1,Up,0 717 | 44,F,NAP,108,141,0,Normal,175,N,0.6,Flat,0 718 | 67,M,ASY,120,237,0,Normal,71,N,1,Flat,1 719 | 49,F,ASY,130,269,0,Normal,163,N,0,Up,0 720 | 57,M,ASY,165,289,1,LVH,124,N,1,Flat,1 721 | 63,M,ASY,130,254,0,LVH,147,N,1.4,Flat,1 722 | 48,M,ASY,124,274,0,LVH,166,N,0.5,Flat,1 723 | 51,M,NAP,100,222,0,Normal,143,Y,1.2,Flat,0 724 | 60,F,ASY,150,258,0,LVH,157,N,2.6,Flat,1 725 | 59,M,ASY,140,177,0,Normal,162,Y,0,Up,1 726 | 45,F,ATA,112,160,0,Normal,138,N,0,Flat,0 727 | 55,F,ASY,180,327,0,ST,117,Y,3.4,Flat,1 728 | 41,M,ATA,110,235,0,Normal,153,N,0,Up,0 729 | 60,F,ASY,158,305,0,LVH,161,N,0,Up,1 730 | 54,F,NAP,135,304,1,Normal,170,N,0,Up,0 731 | 42,M,ATA,120,295,0,Normal,162,N,0,Up,0 732 | 49,F,ATA,134,271,0,Normal,162,N,0,Flat,0 733 | 46,M,ASY,120,249,0,LVH,144,N,0.8,Up,1 734 | 56,F,ASY,200,288,1,LVH,133,Y,4,Down,1 735 | 66,F,TA,150,226,0,Normal,114,N,2.6,Down,0 736 | 56,M,ASY,130,283,1,LVH,103,Y,1.6,Down,1 737 | 49,M,NAP,120,188,0,Normal,139,N,2,Flat,1 738 | 54,M,ASY,122,286,0,LVH,116,Y,3.2,Flat,1 739 | 57,M,ASY,152,274,0,Normal,88,Y,1.2,Flat,1 740 | 65,F,NAP,160,360,0,LVH,151,N,0.8,Up,0 741 | 54,M,NAP,125,273,0,LVH,152,N,0.5,Down,0 742 | 54,F,NAP,160,201,0,Normal,163,N,0,Up,0 743 | 62,M,ASY,120,267,0,Normal,99,Y,1.8,Flat,1 744 | 52,F,NAP,136,196,0,LVH,169,N,0.1,Flat,0 745 | 52,M,ATA,134,201,0,Normal,158,N,0.8,Up,0 746 | 60,M,ASY,117,230,1,Normal,160,Y,1.4,Up,1 747 | 63,F,ASY,108,269,0,Normal,169,Y,1.8,Flat,1 748 | 66,M,ASY,112,212,0,LVH,132,Y,0.1,Up,1 749 | 42,M,ASY,140,226,0,Normal,178,N,0,Up,0 750 | 64,M,ASY,120,246,0,LVH,96,Y,2.2,Down,1 751 | 54,M,NAP,150,232,0,LVH,165,N,1.6,Up,0 752 | 46,F,NAP,142,177,0,LVH,160,Y,1.4,Down,0 753 | 67,F,NAP,152,277,0,Normal,172,N,0,Up,0 754 | 56,M,ASY,125,249,1,LVH,144,Y,1.2,Flat,1 755 | 34,F,ATA,118,210,0,Normal,192,N,0.7,Up,0 756 | 57,M,ASY,132,207,0,Normal,168,Y,0,Up,0 757 | 64,M,ASY,145,212,0,LVH,132,N,2,Flat,1 758 | 59,M,ASY,138,271,0,LVH,182,N,0,Up,0 759 | 50,M,NAP,140,233,0,Normal,163,N,0.6,Flat,1 760 | 51,M,TA,125,213,0,LVH,125,Y,1.4,Up,0 761 | 54,M,ATA,192,283,0,LVH,195,N,0,Up,1 762 | 53,M,ASY,123,282,0,Normal,95,Y,2,Flat,1 763 | 52,M,ASY,112,230,0,Normal,160,N,0,Up,1 764 | 40,M,ASY,110,167,0,LVH,114,Y,2,Flat,1 765 | 58,M,NAP,132,224,0,LVH,173,N,3.2,Up,1 766 | 41,F,NAP,112,268,0,LVH,172,Y,0,Up,0 767 | 41,M,NAP,112,250,0,Normal,179,N,0,Up,0 768 | 50,F,NAP,120,219,0,Normal,158,N,1.6,Flat,0 769 | 54,F,NAP,108,267,0,LVH,167,N,0,Up,0 770 | 64,F,ASY,130,303,0,Normal,122,N,2,Flat,0 771 | 51,F,NAP,130,256,0,LVH,149,N,0.5,Up,0 772 | 46,F,ATA,105,204,0,Normal,172,N,0,Up,0 773 | 55,M,ASY,140,217,0,Normal,111,Y,5.6,Down,1 774 | 45,M,ATA,128,308,0,LVH,170,N,0,Up,0 775 | 56,M,TA,120,193,0,LVH,162,N,1.9,Flat,0 776 | 66,F,ASY,178,228,1,Normal,165,Y,1,Flat,1 777 | 38,M,TA,120,231,0,Normal,182,Y,3.8,Flat,1 778 | 62,F,ASY,150,244,0,Normal,154,Y,1.4,Flat,1 779 | 55,M,ATA,130,262,0,Normal,155,N,0,Up,0 780 | 58,M,ASY,128,259,0,LVH,130,Y,3,Flat,1 781 | 43,M,ASY,110,211,0,Normal,161,N,0,Up,0 782 | 64,F,ASY,180,325,0,Normal,154,Y,0,Up,0 783 | 50,F,ASY,110,254,0,LVH,159,N,0,Up,0 784 | 53,M,NAP,130,197,1,LVH,152,N,1.2,Down,0 785 | 45,F,ASY,138,236,0,LVH,152,Y,0.2,Flat,0 786 | 65,M,TA,138,282,1,LVH,174,N,1.4,Flat,1 787 | 69,M,TA,160,234,1,LVH,131,N,0.1,Flat,0 788 | 69,M,NAP,140,254,0,LVH,146,N,2,Flat,1 789 | 67,M,ASY,100,299,0,LVH,125,Y,0.9,Flat,1 790 | 68,F,NAP,120,211,0,LVH,115,N,1.5,Flat,0 791 | 34,M,TA,118,182,0,LVH,174,N,0,Up,0 792 | 62,F,ASY,138,294,1,Normal,106,N,1.9,Flat,1 793 | 51,M,ASY,140,298,0,Normal,122,Y,4.2,Flat,1 794 | 46,M,NAP,150,231,0,Normal,147,N,3.6,Flat,1 795 | 67,M,ASY,125,254,1,Normal,163,N,0.2,Flat,1 796 | 50,M,NAP,129,196,0,Normal,163,N,0,Up,0 797 | 42,M,NAP,120,240,1,Normal,194,N,0.8,Down,0 798 | 56,F,ASY,134,409,0,LVH,150,Y,1.9,Flat,1 799 | 41,M,ASY,110,172,0,LVH,158,N,0,Up,1 800 | 42,F,ASY,102,265,0,LVH,122,N,0.6,Flat,0 801 | 53,M,NAP,130,246,1,LVH,173,N,0,Up,0 802 | 43,M,NAP,130,315,0,Normal,162,N,1.9,Up,0 803 | 56,M,ASY,132,184,0,LVH,105,Y,2.1,Flat,1 804 | 52,M,ASY,108,233,1,Normal,147,N,0.1,Up,0 805 | 62,F,ASY,140,394,0,LVH,157,N,1.2,Flat,0 806 | 70,M,NAP,160,269,0,Normal,112,Y,2.9,Flat,1 807 | 54,M,ASY,140,239,0,Normal,160,N,1.2,Up,0 808 | 70,M,ASY,145,174,0,Normal,125,Y,2.6,Down,1 809 | 54,M,ATA,108,309,0,Normal,156,N,0,Up,0 810 | 35,M,ASY,126,282,0,LVH,156,Y,0,Up,1 811 | 48,M,NAP,124,255,1,Normal,175,N,0,Up,0 812 | 55,F,ATA,135,250,0,LVH,161,N,1.4,Flat,0 813 | 58,F,ASY,100,248,0,LVH,122,N,1,Flat,0 814 | 54,F,NAP,110,214,0,Normal,158,N,1.6,Flat,0 815 | 69,F,TA,140,239,0,Normal,151,N,1.8,Up,0 816 | 77,M,ASY,125,304,0,LVH,162,Y,0,Up,1 817 | 68,M,NAP,118,277,0,Normal,151,N,1,Up,0 818 | 58,M,ASY,125,300,0,LVH,171,N,0,Up,1 819 | 60,M,ASY,125,258,0,LVH,141,Y,2.8,Flat,1 820 | 51,M,ASY,140,299,0,Normal,173,Y,1.6,Up,1 821 | 55,M,ASY,160,289,0,LVH,145,Y,0.8,Flat,1 822 | 52,M,TA,152,298,1,Normal,178,N,1.2,Flat,0 823 | 60,F,NAP,102,318,0,Normal,160,N,0,Up,0 824 | 58,M,NAP,105,240,0,LVH,154,Y,0.6,Flat,0 825 | 64,M,NAP,125,309,0,Normal,131,Y,1.8,Flat,1 826 | 37,M,NAP,130,250,0,Normal,187,N,3.5,Down,0 827 | 59,M,TA,170,288,0,LVH,159,N,0.2,Flat,1 828 | 51,M,NAP,125,245,1,LVH,166,N,2.4,Flat,0 829 | 43,F,NAP,122,213,0,Normal,165,N,0.2,Flat,0 830 | 58,M,ASY,128,216,0,LVH,131,Y,2.2,Flat,1 831 | 29,M,ATA,130,204,0,LVH,202,N,0,Up,0 832 | 41,F,ATA,130,204,0,LVH,172,N,1.4,Up,0 833 | 63,F,NAP,135,252,0,LVH,172,N,0,Up,0 834 | 51,M,NAP,94,227,0,Normal,154,Y,0,Up,0 835 | 54,M,NAP,120,258,0,LVH,147,N,0.4,Flat,0 836 | 44,M,ATA,120,220,0,Normal,170,N,0,Up,0 837 | 54,M,ASY,110,239,0,Normal,126,Y,2.8,Flat,1 838 | 65,M,ASY,135,254,0,LVH,127,N,2.8,Flat,1 839 | 57,M,NAP,150,168,0,Normal,174,N,1.6,Up,0 840 | 63,M,ASY,130,330,1,LVH,132,Y,1.8,Up,1 841 | 35,F,ASY,138,183,0,Normal,182,N,1.4,Up,0 842 | 41,M,ATA,135,203,0,Normal,132,N,0,Flat,0 843 | 62,F,NAP,130,263,0,Normal,97,N,1.2,Flat,1 844 | 43,F,ASY,132,341,1,LVH,136,Y,3,Flat,1 845 | 58,F,TA,150,283,1,LVH,162,N,1,Up,0 846 | 52,M,TA,118,186,0,LVH,190,N,0,Flat,0 847 | 61,F,ASY,145,307,0,LVH,146,Y,1,Flat,1 848 | 39,M,ASY,118,219,0,Normal,140,N,1.2,Flat,1 849 | 45,M,ASY,115,260,0,LVH,185,N,0,Up,0 850 | 52,M,ASY,128,255,0,Normal,161,Y,0,Up,1 851 | 62,M,NAP,130,231,0,Normal,146,N,1.8,Flat,0 852 | 62,F,ASY,160,164,0,LVH,145,N,6.2,Down,1 853 | 53,F,ASY,138,234,0,LVH,160,N,0,Up,0 854 | 43,M,ASY,120,177,0,LVH,120,Y,2.5,Flat,1 855 | 47,M,NAP,138,257,0,LVH,156,N,0,Up,0 856 | 52,M,ATA,120,325,0,Normal,172,N,0.2,Up,0 857 | 68,M,NAP,180,274,1,LVH,150,Y,1.6,Flat,1 858 | 39,M,NAP,140,321,0,LVH,182,N,0,Up,0 859 | 53,F,ASY,130,264,0,LVH,143,N,0.4,Flat,0 860 | 62,F,ASY,140,268,0,LVH,160,N,3.6,Down,1 861 | 51,F,NAP,140,308,0,LVH,142,N,1.5,Up,0 862 | 60,M,ASY,130,253,0,Normal,144,Y,1.4,Up,1 863 | 65,M,ASY,110,248,0,LVH,158,N,0.6,Up,1 864 | 65,F,NAP,155,269,0,Normal,148,N,0.8,Up,0 865 | 60,M,NAP,140,185,0,LVH,155,N,3,Flat,1 866 | 60,M,ASY,145,282,0,LVH,142,Y,2.8,Flat,1 867 | 54,M,ASY,120,188,0,Normal,113,N,1.4,Flat,1 868 | 44,M,ATA,130,219,0,LVH,188,N,0,Up,0 869 | 44,M,ASY,112,290,0,LVH,153,N,0,Up,1 870 | 51,M,NAP,110,175,0,Normal,123,N,0.6,Up,0 871 | 59,M,NAP,150,212,1,Normal,157,N,1.6,Up,0 872 | 71,F,ATA,160,302,0,Normal,162,N,0.4,Up,0 873 | 61,M,NAP,150,243,1,Normal,137,Y,1,Flat,0 874 | 55,M,ASY,132,353,0,Normal,132,Y,1.2,Flat,1 875 | 64,M,NAP,140,335,0,Normal,158,N,0,Up,1 876 | 43,M,ASY,150,247,0,Normal,171,N,1.5,Up,0 877 | 58,F,NAP,120,340,0,Normal,172,N,0,Up,0 878 | 60,M,ASY,130,206,0,LVH,132,Y,2.4,Flat,1 879 | 58,M,ATA,120,284,0,LVH,160,N,1.8,Flat,1 880 | 49,M,ATA,130,266,0,Normal,171,N,0.6,Up,0 881 | 48,M,ATA,110,229,0,Normal,168,N,1,Down,1 882 | 52,M,NAP,172,199,1,Normal,162,N,0.5,Up,0 883 | 44,M,ATA,120,263,0,Normal,173,N,0,Up,0 884 | 56,F,ATA,140,294,0,LVH,153,N,1.3,Flat,0 885 | 57,M,ASY,140,192,0,Normal,148,N,0.4,Flat,0 886 | 67,M,ASY,160,286,0,LVH,108,Y,1.5,Flat,1 887 | 53,F,NAP,128,216,0,LVH,115,N,0,Up,0 888 | 52,M,NAP,138,223,0,Normal,169,N,0,Up,0 889 | 43,M,ASY,132,247,1,LVH,143,Y,0.1,Flat,1 890 | 52,M,ASY,128,204,1,Normal,156,Y,1,Flat,1 891 | 59,M,TA,134,204,0,Normal,162,N,0.8,Up,1 892 | 64,M,TA,170,227,0,LVH,155,N,0.6,Flat,0 893 | 66,F,NAP,146,278,0,LVH,152,N,0,Flat,0 894 | 39,F,NAP,138,220,0,Normal,152,N,0,Flat,0 895 | 57,M,ATA,154,232,0,LVH,164,N,0,Up,1 896 | 58,F,ASY,130,197,0,Normal,131,N,0.6,Flat,0 897 | 57,M,ASY,110,335,0,Normal,143,Y,3,Flat,1 898 | 47,M,NAP,130,253,0,Normal,179,N,0,Up,0 899 | 55,F,ASY,128,205,0,ST,130,Y,2,Flat,1 900 | 35,M,ATA,122,192,0,Normal,174,N,0,Up,0 901 | 61,M,ASY,148,203,0,Normal,161,N,0,Up,1 902 | 58,M,ASY,114,318,0,ST,140,N,4.4,Down,1 903 | 58,F,ASY,170,225,1,LVH,146,Y,2.8,Flat,1 904 | 58,M,ATA,125,220,0,Normal,144,N,0.4,Flat,0 905 | 56,M,ATA,130,221,0,LVH,163,N,0,Up,0 906 | 56,M,ATA,120,240,0,Normal,169,N,0,Down,0 907 | 67,M,NAP,152,212,0,LVH,150,N,0.8,Flat,1 908 | 55,F,ATA,132,342,0,Normal,166,N,1.2,Up,0 909 | 44,M,ASY,120,169,0,Normal,144,Y,2.8,Down,1 910 | 63,M,ASY,140,187,0,LVH,144,Y,4,Up,1 911 | 63,F,ASY,124,197,0,Normal,136,Y,0,Flat,1 912 | 41,M,ATA,120,157,0,Normal,182,N,0,Up,0 913 | 59,M,ASY,164,176,1,LVH,90,N,1,Flat,1 914 | 57,F,ASY,140,241,0,Normal,123,Y,0.2,Flat,1 915 | 45,M,TA,110,264,0,Normal,132,N,1.2,Flat,1 916 | 68,M,ASY,144,193,1,Normal,141,N,3.4,Flat,1 917 | 57,M,ASY,130,131,0,Normal,115,Y,1.2,Flat,1 918 | 57,F,ATA,130,236,0,LVH,174,N,0,Flat,1 919 | 38,M,NAP,138,175,0,Normal,173,N,0,Up,0 920 | -------------------------------------------------------------------------------- /datacleaning/data_cleaning.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import pandas as pd 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import seaborn as sns 7 | from scipy import stats 8 | 9 | movies_df = pd.read_csv("movie_metadata.csv") 10 | #print movies_df.head(3) 11 | #print movies_df.describe() 12 | # check the column names 13 | #print "column names: ", movies_df.columns.values 14 | #print movies_df.dtypes 15 | #print "data-frame shape: ", movies_df.shape # (5043, 28) 16 | 17 | # check null data 18 | #print "null values: \n", 19 | #print movies_df.isna() # same as movies_df.isnull().sum() 20 | #print movies_df.isna().sum() 21 | #print "null values", movies_df.isnull().values.any() 22 | #print "total null values", movies_df.isna().sum().sum() 23 | #print movies_df.describe() 24 | 25 | #clean_movies_df = movies_df.dropna(how='any') 26 | movies_df = movies_df.dropna(how='any') 27 | 28 | #print "new dataframe shape: ", clean_movies_df.shape # (3756, 28) 29 | #print "old dataframe shape: ", movies_df.shape 30 | #use fillna attribute of pandas 31 | #for filling up missing values in all columns 32 | #movies_df.fillna(value=0, inplace=True) 33 | 34 | # for some specfific columns we can just choose select those columns 35 | # movies_df[['gross', 'budget']]=movies_df[['gross', 'budget']].fillna(value=0) 36 | # filling with the mean 37 | #movies_df['budget'].fillna(movies_df[budget].mean(), inplace=True) 38 | 39 | #for this dataframe we can use 'missing' in the columns of object data types for example language or movie_imdb_link 40 | 41 | #movies_df['language'].fillna("no info", inplace=True) 42 | 43 | #print "null values", movies_df.isna().sum() # now no missing values in languages column! 44 | 45 | #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 46 | 47 | # check if there are duplicate rows or not 48 | 49 | #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 50 | 51 | #duplicate_rows_df = movies_df[movies_df.duplicated()] 52 | #print "number of duplicate rows: ", duplicate_rows_df.shape 53 | 54 | 55 | #print duplicate_rows_df.head(6) 56 | #print movies_df['movie_imdb_link'].head(3) 57 | 58 | #duplicate_rows_df_imdb_link = movies_df[movies_df.duplicated(['movie_imdb_link'])] 59 | #print duplicate_rows_df_imdb_link.head(3) 60 | #print "shape of duplicate dataframe with same imdb link", duplicate_rows_df_imdb_link.shape 61 | 62 | #print len(movies_df.movie_imdb_link.unique()) 63 | # select duplicate rows except first occurences, consider all columns 64 | #duplicate_rows_df = movies_df[movies_df.duplicated()] 65 | #print type(duplicate_rows_df) # dataframe 66 | #print duplicate_rows_df.shape 67 | 68 | 69 | #++++++++++++++++++++++++++++++++ 70 | # drop_duplicates 71 | #++++++++++++++++++++++++++++++++ 72 | 73 | #Drop duplicate rows (duplicate values for all entries) 74 | #print "shape of dataframe before dropping duplicates", movies_df.shape 75 | #0print "shape of dataframe after dropping duplicates", movies_df.drop_duplicates().shape 76 | 77 | 78 | #+++++++++++++++++++++++++++++++++++++++++++++++ 79 | #+ discretization or binning 80 | #+++++++++++++++++++++++++++++++++++++++++++++++ 81 | 82 | #print movies_df['imdb_score'][5:10] 83 | # check the miminmum value of all columns 84 | #print "minimum values of all cloumn:", 85 | #print '\n' 86 | #print movies_df.min() 87 | 88 | #print movies_df['imdb_score'].idxmax() 89 | #print movies_df.loc[movies_df['imdb_score'].idxmax(), 'movie_title'] 90 | #print movies_df['movie_title'].loc[2764:2767] 91 | #print movies_df['budget'].idxmax() 92 | #print movies_df[['movie_title','budget']].loc[2986:2990] 93 | 94 | # check the distribution of imdb score 95 | #fig = plt.figure(figsize=(10,7)) 96 | #sns.distplot(movies_df['imdb_score']) 97 | #plt.xlabel('IMDB Score', fontsize=12) 98 | #sns.jointplot(x='budget', y='imdb_score', data=movies_df); 99 | #plt.show() 100 | 101 | 102 | # based on the 'imdb_score' we will discritize the movies in 3 categories ['shyte', 'moderate', 'good'] 103 | # similar with pd.cut method described in McKinney's book 104 | #op_labels = ['shyttte', 'moderate', 'good'] 105 | #category = [0.,4.,7.,10.] 106 | #movies_df['imdb_labels'] = pd.cut(movies_df['imdb_score'], labels=op_labels, bins=category, include_lowest=False) 107 | #print movies_df[['movie_title', 'imdb_score', 'imdb_labels']][209:220] 108 | 109 | 110 | 111 | #++++++++++++++++++++++++++++++++++++++++++++++++++++++ 112 | #_ Removing outliers from the dataframe 113 | #++++++++++++++++++++++++++++++++++++++++++++++++++++++ 114 | 115 | #___________________________________________ 116 | # First check with box plot 117 | #___________________________________________ 118 | 119 | # let's try to plot boxplot with seaborn 120 | #sns.boxplot(x=movies_df['facenumber_in_poster'], color='lime') 121 | #plt.xlabel('No. of Actors Featured in Poster', fontsize=14) 122 | #plt.show() 123 | 124 | #print "min no", movies_df['facenumber_in_poster'].min() 125 | #print "max no: index", movies_df['facenumber_in_poster'].idxmax() 126 | #print movies_df[['movie_title', 'facenumber_in_poster']].iloc[movies_df['facenumber_in_poster'].idxmax()] 127 | #print movies_df['facenumber_in_poster'].describe() 128 | # budget column is massively varying. so we are in dire need to drop outliers 129 | #best option for dropping outliers is to use zscore method and reject all rows in non-object type columns 130 | 131 | 132 | #____________________________________________________________________________________ 133 | # Use Z Score from Scipy Stats 134 | # https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.zscore.html 135 | #____________________________________________________________________________________ 136 | 137 | 138 | # will try first using the z score method 139 | # z score is basically how many standard deviation away a point is from the mean. Closer the value of z is towards 0, the data point is closer to the mean. 140 | 141 | #first detect outlier for a particular column 142 | 143 | 144 | #print "data types: \n", movies_df.dtypes 145 | #print "shape before :", movies_df.shape 146 | 147 | movies_df_num = movies_df.select_dtypes(exclude=['object']) 148 | #print "shape after excluding object columns: ", movies_df_num.shape 149 | # 12 object type columns were dropped. 150 | 151 | #use z score for all columns in the new data frame 152 | movies_df_Zscore = movies_df_num[(np.abs(stats.zscore(movies_df_num))<=3).all(axis=1)] 153 | #print "shape after rejecting outliers: ", movies_df_Zscore.shape 154 | movies_df_Zscore_usr_rev = movies_df_num[(np.abs(stats.zscore(movies_df_num[['num_user_for_reviews']]))<=3).all(axis=1)] 155 | print type(movies_df_Zscore_usr_rev) 156 | 157 | #fig = plt.figure(figsize=(12,8)) 158 | #plt.subplot(1,2,1) 159 | #sns.boxplot(x=movies_df['num_user_for_reviews'], color='lime') 160 | #plt.xlabel('No. of Actors Featured in Poster (After Using Z Score)', fontsize=14) 161 | #plt.subplot(1,2,2) 162 | #sns.distplot(movies_df_Zscore['num_user_for_reviews'], color='lime') 163 | #plt.tight_layout() 164 | #plt.show() 165 | 166 | 167 | #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 168 | #+ use numpy 169 | #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 170 | 171 | movies_num_usr_rev= movies_df[np.abs(movies_df.num_user_for_reviews-movies_df.num_user_for_reviews.mean()) <= (3*movies_df.num_user_for_reviews.std())] 172 | print type(movies_num_usr_rev) 173 | print movies_num_usr_rev.head(3) 174 | 175 | fig = plt.figure(figsize=(12,8)) 176 | plt.subplot(1,2,1) 177 | sns.distplot(movies_num_usr_rev['num_user_for_reviews'], color='lime') 178 | plt.xlabel('No. of Users to Review (Numpy only on "num_user_for_reviews")', fontsize=13) 179 | plt.subplot(1,2,2) 180 | sns.distplot(movies_df_Zscore_usr_rev['num_user_for_reviews'], color='lime') 181 | plt.xlabel('No. of Users to Review (Scipy Z Score on "num_user_for_reviews" Column)', fontsize=13) 182 | plt.tight_layout() 183 | plt.show() 184 | 185 | 186 | #============================================= 187 | #= Some more checks 188 | #============================================= 189 | 190 | #movies_df['budget_zscore'] = movies_df['budget'].stats 191 | 192 | #print type(stats.zscore(movies_df['budget'])) # numpy array 193 | #budg_zsc = stats.zscore(movies_df['budget']) 194 | #print type(budg_zsc) 195 | #print np.where(budg_zsc > 3.0)[0] 196 | #print budg_zsc[3259] 197 | #print budg_zsc[100:133] # only nan ? because the nan values aren't dropped yet 198 | # In that case we use mean and std instance of dataframe 199 | #print type( (movies_df.budget - movies_df.budget.mean())/movies_df.budget.std(ddof=0)) 200 | #mov_df_budget_zscore = (movies_df.budget - movies_df.budget.mean())/movies_df.budget.std(ddof=0) 201 | #print mov_df_budget_zscore[100:131] 202 | #mov_budg_zsc_arr = mov_df_budget_zscore.values 203 | #print mov_budg_zsc_arr[50:55] 204 | #print np.where(mov_budg_zsc_arr > .60)[0] 205 | 206 | 207 | 208 | 209 | 210 | 211 | -------------------------------------------------------------------------------- /datacleaning/hist_plot_numpy_scipy1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/datacleaning/hist_plot_numpy_scipy1.png -------------------------------------------------------------------------------- /datacleaning/hist_plot_numpy_scipy2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/datacleaning/hist_plot_numpy_scipy2.png -------------------------------------------------------------------------------- /datacleaning/outliers.odp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/datacleaning/outliers.odp -------------------------------------------------------------------------------- /datacleaning/outliers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/datacleaning/outliers.png -------------------------------------------------------------------------------- /datacleaning/outliers_face.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/datacleaning/outliers_face.png -------------------------------------------------------------------------------- /datacleaning/outliers_face_Zscore.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/datacleaning/outliers_face_Zscore.png -------------------------------------------------------------------------------- /datacleaning/standard_dev_Zscore.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import math 4 | import numpy as np 5 | import seaborn as sns 6 | import matplotlib.pyplot as plt 7 | from scipy.stats import norm 8 | 9 | np.random.seed(10) 10 | 11 | #definition of probability density function 12 | # pdf is for a continuous random variable, is a function whose value at any given sample (or data point) in the sample space can be interpreted as providing a relative likelihood that the value of the random variable would equal that sample space. 13 | 14 | # Plot between -10 and 10 with .001 steps. 15 | x_axis = np.arange(-10, 10, 0.001) 16 | Xarr = np.random.normal(loc=0.0, scale=5.0, size=1000) 17 | print np.mean(Xarr), np.std(Xarr) 18 | sns.distplot(Xarr, color='lime', hist_kws=dict(alpha=0.3)) 19 | 20 | 21 | 22 | pl_1_std = np.mean(Xarr) + np.std(Xarr) 23 | mi_1_std = np.mean(Xarr) - np.std(Xarr) 24 | 25 | pl_3_std = np.mean(Xarr) + (3*np.std(Xarr)) 26 | mi_3_std = np.mean(Xarr) - (3*np.std(Xarr)) 27 | #+++++++++++++++++++++++++++++++++++++++++++++++++ 28 | # Mean = 0, SD = 2. 29 | 30 | #plt.plot(x_axis, norm.pdf(x_axis,0,5), color='purple', linestyle='--') 31 | #plt.plot(x_axis, norm.pdf(x_axis,0,2), color='magenta', linestyle='-.') 32 | #+++++++++++++++++++++++++++++++++++++++++++++++++ 33 | 34 | plt.axvline(pl_1_std, ymin=0, ymax = 0.53,linewidth=4, color='orange') 35 | plt.axvline(pl_3_std, linestyle='-.', linewidth=4, color='magenta', ymin=0, ymax=0.05) 36 | 37 | plt.axvline(mi_1_std, label=r'$-1\, \sigma$', ymin=0, ymax = 0.53, linewidth=4, color='orange') 38 | plt.axvline(mi_3_std, linestyle='-.', color='magenta', label=r'$-3\, \sigma$', linewidth=4, ymin=0, ymax=0.05) 39 | 40 | #plt.legend(fontsize=14) 41 | plt.text(-16.3, 0.02, r'$-3\, \sigma$', fontsize=13, rotation='vertical', color='magenta', 42 | bbox=dict(facecolor='none', edgecolor='lavender', boxstyle='round,pad=0.7')) 43 | 44 | 45 | plt.text(15.5, 0.02, r'$+ 3\, \sigma$', fontsize=13, rotation='vertical', color='magenta', 46 | bbox=dict(facecolor='none', edgecolor='lavender', boxstyle='round,pad=0.7')) 47 | 48 | plt.text(-6.5, 0.07, r'$-1\, \sigma$', fontsize=13, rotation='vertical', color='orange', 49 | bbox=dict(facecolor='none', edgecolor='azure', boxstyle='round,pad=0.7')) 50 | 51 | 52 | plt.text(6.5, 0.07, r'$+ 1\, \sigma$', fontsize=13, rotation='vertical', color='orange', 53 | bbox=dict(facecolor='none', edgecolor='azure', boxstyle='round,pad=0.7')) 54 | 55 | 56 | plt.show() 57 | -------------------------------------------------------------------------------- /datacleaning/zscore_std3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/datacleaning/zscore_std3.png -------------------------------------------------------------------------------- /fakeCover3_Web_Scrap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/suvoooo/Machine_Learning/cb9f09ee7bb54a575fd7527e147709def4a27b2f/fakeCover3_Web_Scrap.png -------------------------------------------------------------------------------- /gender_purchase.csv: -------------------------------------------------------------------------------- 1 | Gender,Purchase 2 | Female,Yes 3 | Female,Yes 4 | Female,No 5 | Male,No 6 | Male,Yes 7 | Female,Yes 8 | Male,No 9 | Female,Yes 10 | Female,No 11 | Female,Yes 12 | Female,No 13 | Male,No 14 | Male,Yes 15 | Male,No 16 | Female,Yes 17 | Male,Yes 18 | Male,Yes 19 | Male,Yes 20 | Female,Yes 21 | Female,No 22 | Male,Yes 23 | Male,Yes 24 | Male,No 25 | Female,Yes 26 | Male,Yes 27 | Female,Yes 28 | Male,No 29 | Male,No 30 | Female,Yes 31 | Female,Yes 32 | Male,No 33 | Female,Yes 34 | Female,Yes 35 | Female,No 36 | Female,No 37 | Female,Yes 38 | Male,Yes 39 | Female,Yes 40 | Female,Yes 41 | Female,Yes 42 | Male,Yes 43 | Male,No 44 | Female,Yes 45 | Female,No 46 | Female,Yes 47 | Female,Yes 48 | Female,No 49 | Male,Yes 50 | Female,No 51 | Female,Yes 52 | Female,No 53 | Male,No 54 | Female,Yes 55 | Male,Yes 56 | Female,No 57 | Female,No 58 | Female,No 59 | Female,Yes 60 | Male,Yes 61 | Female,Yes 62 | Male,Yes 63 | Male,No 64 | Male,Yes 65 | Male,Yes 66 | Male,No 67 | Male,Yes 68 | Female,Yes 69 | Female,No 70 | Male,Yes 71 | Female,No 72 | Male,Yes 73 | Female,Yes 74 | Female,Yes 75 | Female,No 76 | Female,No 77 | Male,Yes 78 | Male,No 79 | Male,No 80 | Male,No 81 | Male,No 82 | Female,No 83 | Male,No 84 | Male,No 85 | Female,Yes 86 | Female,Yes 87 | Female,Yes 88 | Female,Yes 89 | Female,Yes 90 | Female,Yes 91 | Male,No 92 | Male,Yes 93 | Female,Yes 94 | Male,No 95 | Male,No 96 | Female,Yes 97 | Female,No 98 | Male,Yes 99 | Female,Yes 100 | Female,Yes 101 | Male,Yes 102 | Male,No 103 | Male,Yes 104 | Female,No 105 | Male,Yes 106 | Female,Yes 107 | Female,Yes 108 | Male,Yes 109 | Female,No 110 | Male,No 111 | Female,Yes 112 | Female,No 113 | Male,Yes 114 | Male,Yes 115 | Male,Yes 116 | Male,No 117 | Male,No 118 | Female,No 119 | Female,No 120 | Male,Yes 121 | Female,No 122 | Female,Yes 123 | Female,No 124 | Female,Yes 125 | Female,No 126 | Male,Yes 127 | Female,Yes 128 | Female,No 129 | Male,No 130 | Female,Yes 131 | Female,Yes 132 | Male,No 133 | Female,Yes 134 | Female,Yes 135 | Male,Yes 136 | Male,No 137 | Male,Yes 138 | Female,Yes 139 | Female,Yes 140 | Female,No 141 | Female,No 142 | Male,Yes 143 | Male,Yes 144 | Male,No 145 | Female,Yes 146 | Male,Yes 147 | Male,No 148 | Female,Yes 149 | Male,No 150 | Male,No 151 | Female,Yes 152 | Female,No 153 | Female,Yes 154 | Male,Yes 155 | Male,Yes 156 | Female,Yes 157 | Male,No 158 | Male,Yes 159 | Male,No 160 | Male,No 161 | Female,No 162 | Male,Yes 163 | Female,No 164 | Male,Yes 165 | Male,Yes 166 | Male,Yes 167 | Male,Yes 168 | Female,Yes 169 | Female,No 170 | Female,Yes 171 | Female,Yes 172 | Female,No 173 | Female,Yes 174 | Female,No 175 | Male,Yes 176 | Male,No 177 | Female,No 178 | Male,No 179 | Male,No 180 | Male,No 181 | Female,Yes 182 | Female,Yes 183 | Female,No 184 | Female,No 185 | Female,No 186 | Female,No 187 | Female,Yes 188 | Male,No 189 | Female,Yes 190 | Female,Yes 191 | Female,No 192 | Female,No 193 | Female,No 194 | Female,Yes 195 | Female,Yes 196 | Male,No 197 | Male,No 198 | Male,Yes 199 | Female,No 200 | Male,No 201 | Female,Yes 202 | Female,Yes 203 | Female,No 204 | Female,No 205 | Male,No 206 | Male,No 207 | Male,No 208 | Female,Yes 209 | Male,Yes 210 | Male,No 211 | Female,Yes 212 | Female,Yes 213 | Male,No 214 | Female,No 215 | Male,Yes 216 | Male,No 217 | Male,Yes 218 | Male,Yes 219 | Female,Yes 220 | Female,Yes 221 | Male,No 222 | Female,No 223 | Male,Yes 224 | Male,No 225 | Male,Yes 226 | Male,No 227 | Female,Yes 228 | Female,Yes 229 | Female,No 230 | Male,No 231 | Male,No 232 | Female,No 233 | Male,No 234 | Male,Yes 235 | Female,Yes 236 | Female,Yes 237 | Female,No 238 | Male,No 239 | Female,No 240 | Female,Yes 241 | Female,No 242 | Male,Yes 243 | Male,Yes 244 | Female,Yes 245 | Female,Yes 246 | Female,Yes 247 | Male,No 248 | Male,Yes 249 | Female,No 250 | Male,Yes 251 | Male,Yes 252 | Male,No 253 | Female,Yes 254 | Female,No 255 | Female,No 256 | Female,Yes 257 | Female,Yes 258 | Male,No 259 | Male,No 260 | Male,No 261 | Male,No 262 | Male,No 263 | Female,Yes 264 | Female,No 265 | Female,Yes 266 | Male,Yes 267 | Female,Yes 268 | Female,Yes 269 | Male,No 270 | Male,No 271 | Male,No 272 | Male,No 273 | Male,No 274 | Female,Yes 275 | Female,Yes 276 | Female,No 277 | Male,No 278 | Female,Yes 279 | Female,Yes 280 | Female,Yes 281 | Female,Yes 282 | Male,No 283 | Male,No 284 | Female,No 285 | Male,No 286 | Male,No 287 | Female,No 288 | Female,Yes 289 | Male,No 290 | Female,Yes 291 | Female,No 292 | Female,Yes 293 | Female,No 294 | Male,No 295 | Female,Yes 296 | Male,No 297 | Male,Yes 298 | Female,Yes 299 | Female,Yes 300 | Female,Yes 301 | Female,No 302 | Male,Yes 303 | Female,No 304 | Male,No 305 | Female,Yes 306 | Male,Yes 307 | Male,No 308 | Female,Yes 309 | Female,Yes 310 | Female,Yes 311 | Female,Yes 312 | Female,No 313 | Male,Yes 314 | Male,No 315 | Female,Yes 316 | Female,Yes 317 | Female,No 318 | Female,Yes 319 | Female,Yes 320 | Male,No 321 | Female,No 322 | Male,No 323 | Female,No 324 | Male,No 325 | Male,No 326 | Male,Yes 327 | Female,Yes 328 | Male,Yes 329 | Male,No 330 | Male,Yes 331 | Male,Yes 332 | Male,Yes 333 | Male,No 334 | Female,Yes 335 | Male,Yes 336 | Male,No 337 | Male,Yes 338 | Male,Yes 339 | Female,Yes 340 | Male,No 341 | Male,Yes 342 | Male,Yes 343 | Female,Yes 344 | Female,No 345 | Female,No 346 | Female,No 347 | Male,Yes 348 | Female,No 349 | Male,No 350 | Female,Yes 351 | Female,No 352 | Male,Yes 353 | Female,No 354 | Female,No 355 | Male,Yes 356 | Female,No 357 | Female,No 358 | Male,Yes 359 | Female,Yes 360 | Female,Yes 361 | Male,Yes 362 | Male,No 363 | Male,Yes 364 | Female,No 365 | Female,Yes 366 | Male,Yes 367 | Male,Yes 368 | Male,Yes 369 | Male,No 370 | Male,Yes 371 | Male,No 372 | Male,No 373 | Female,Yes 374 | Female,No 375 | Female,Yes 376 | Female,No 377 | Male,Yes 378 | Female,Yes 379 | Female,Yes 380 | Male,No 381 | Female,No 382 | Female,No 383 | Female,No 384 | Male,Yes 385 | Female,Yes 386 | Female,Yes 387 | Male,Yes 388 | Male,No 389 | Female,No 390 | Male,No 391 | Female,Yes 392 | Male,No 393 | Female,Yes 394 | Male,Yes 395 | Female,Yes 396 | Male,Yes 397 | Male,Yes 398 | Male,No 399 | Male,No 400 | Male,No 401 | Female,No 402 | Female,No 403 | Male,Yes 404 | Female,Yes 405 | Female,No 406 | Female,Yes 407 | Male,Yes 408 | Male,No 409 | Female,No 410 | Male,No 411 | Female,Yes 412 | Female,Yes 413 | Female,No 414 | Male,No 415 | Male,Yes 416 | Male,No 417 | Male,Yes 418 | Female,Yes 419 | Male,Yes 420 | Male,Yes 421 | Female,No 422 | Male,No 423 | Female,No 424 | Female,No 425 | Female,No 426 | Female,Yes 427 | Male,Yes 428 | Male,Yes 429 | Male,No 430 | Male,No 431 | Male,No 432 | Female,Yes 433 | Male,No 434 | Male,Yes 435 | Female,Yes 436 | Male,Yes 437 | Male,Yes 438 | Female,No 439 | Female,Yes 440 | Female,No 441 | Female,Yes 442 | Female,Yes 443 | Male,Yes 444 | Male,Yes 445 | Male,No 446 | Female,Yes 447 | Male,No 448 | Male,Yes 449 | Female,Yes 450 | Female,No 451 | Female,No 452 | Female,No 453 | Male,No 454 | Female,Yes 455 | Male,Yes 456 | Male,No 457 | Male,Yes 458 | Female,No 459 | Male,No 460 | Male,No 461 | Female,Yes 462 | Male,No 463 | Female,Yes 464 | Female,Yes 465 | Male,Yes 466 | Female,Yes 467 | Male,Yes 468 | Female,Yes 469 | Female,Yes 470 | Male,No 471 | Female,No 472 | Female,Yes 473 | Female,No 474 | Male,No 475 | Female,No 476 | Male,Yes 477 | Female,No 478 | Male,Yes 479 | Female,Yes 480 | Male,No 481 | Male,No 482 | Female,No 483 | Male,No 484 | Male,No 485 | Male,No 486 | Male,No 487 | Male,Yes 488 | Male,Yes 489 | Male,Yes 490 | Female,Yes 491 | Male,Yes 492 | Male,Yes 493 | Female,Yes 494 | Female,No 495 | Male,Yes 496 | Female,Yes 497 | Female,Yes 498 | Female,Yes 499 | Male,Yes 500 | Male,Yes 501 | Female,Yes 502 | Male,Yes 503 | Male,Yes 504 | Male,No 505 | Female,Yes 506 | Female,Yes 507 | Male,Yes 508 | Male,Yes 509 | Female,Yes 510 | Male,No 511 | Female,Yes 512 | Female,Yes 513 | 514 | -------------------------------------------------------------------------------- /lagmult.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import math 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | import matplotlib 7 | 8 | 9 | matplotlib.rcParams.update({'font.size': 12}) 10 | 11 | circle1 = plt.Circle((0,0),1, color='magenta', alpha=0.4) 12 | 13 | fig,ax= plt.subplots() 14 | 15 | ax.add_artist(circle1) 16 | 17 | 18 | ax.set_xlim(-5.2,5.2) 19 | ax.set_ylim(-5.2,5.2) 20 | ax.set_xlabel('X', fontsize=12) 21 | ax.set_ylabel('Y', fontsize=12) 22 | 23 | 24 | xp = np.linspace(-10,10,num=1000) 25 | 26 | print xp 27 | 28 | 29 | kval = [-3,-2,-1,0,1,2,4,8.125,11] 30 | 31 | 32 | yp0 = [(8*(x**2) - kval[0])/2.0 for x in xp] 33 | yp1 = [(8*(x**2) - kval[1])/2.0 for x in xp] 34 | yp2 = [(8*(x**2) - kval[2])/2.0 for x in xp] 35 | yp3 = [(8*(x**2) - kval[3])/2.0 for x in xp] 36 | yp4 = [(8*(x**2) - kval[4])/2.0 for x in xp] 37 | yp5 = [(8*(x**2) - kval[5])/2.0 for x in xp] 38 | yp6 = [(8*(x**2) - kval[6])/2.0 for x in xp] 39 | yp7 = [(8*(x**2) - kval[7])/2.0 for x in xp] 40 | yp8 = [(8*(x**2) - kval[8])/2.0 for x in xp] 41 | 42 | 43 | 44 | 45 | 46 | #print yp 47 | 48 | 49 | plt.plot(xp, yp0,color='hotpink',label=r'$f(x,y)=-3.0$') 50 | plt.plot(xp, yp1,color='red',linestyle='-.',label=r'$f(x,y)=-2.0$') 51 | plt.plot(xp, yp2,color='olive',label=r'$f(x,y)=-1.0$') 52 | plt.plot(xp, yp3,color='lime',label=r'$f(x,y)=0.0$') 53 | #plt.plot(xp, yp4,color='aqua',label=r'$f(x,y)=1.0$') 54 | plt.plot(xp, yp5,color='aqua',label=r'$f(x,y)=2.0$') 55 | #plt.plot(xp, yp5,color='deepskyblue',label=r'$f(x,y)=4.0$') 56 | plt.plot(xp, yp7,color='navy',linestyle='--',label=r'$f(x,y)=8.125$') 57 | plt.plot(xp, yp8,color='gray',label=r'$f(x,y)=11.0$') 58 | 59 | plt.axhline(y=0, linestyle=':', linewidth=1, color='orange') 60 | plt.axhline(y=1, linestyle=':', linewidth=1, color='orange') 61 | plt.axhline(y=-1, linestyle=':', linewidth=1, color='orange') 62 | 63 | 64 | plt.axvline(x=0, linestyle=':', linewidth=1, color='orange') 65 | plt.axvline(x=1, linestyle=':', linewidth=1, color='orange') 66 | plt.axvline(x=-1, linestyle=':', linewidth=1, color='orange') 67 | 68 | 69 | 70 | plt.annotate('Minima',color='red',xy=(-0.49, 2.34), xytext=(-3.0,2.24), ha='center', arrowprops=dict(facecolor='red', edgecolor='red',shrink=0.05, width=1)) 71 | plt.annotate('Maxima',color='navy',xy=(0.67, -2.), xytext=(3.0,-2.05), ha='center', arrowprops=dict(facecolor='navy', edgecolor='navy',shrink=0.05, width=1)) 72 | 73 | #print Ypara1 74 | 75 | 76 | plt.title ('Example of Solving Equation with Constraint; Lagrange Multiplier') 77 | plt.legend(fontsize=12) 78 | plt.show() 79 | -------------------------------------------------------------------------------- /pipelineWine.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import pandas as pd 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import seaborn as sns 7 | import pprint 8 | import time 9 | 10 | from sklearn.cross_validation import train_test_split 11 | from sklearn.pipeline import Pipeline 12 | from sklearn.pipeline import make_pipeline 13 | from sklearn.preprocessing import StandardScaler 14 | from sklearn.svm import SVC 15 | from sklearn.grid_search import GridSearchCV 16 | 17 | 18 | startT = time.time() 19 | 20 | 21 | winedf = pd.read_csv('winequality-red.csv',sep=';') 22 | 23 | #print winedf.isnull().sum() check for missing data 24 | 25 | print winedf.head(3) 26 | 27 | #+++++++++++++++++++++++++++++++++++++++++++++++++++++++ 28 | #+ check whether the labels are unblanced or not 29 | #+++++++++++++++++++++++++++++++++++++++++++++++++++++++ 30 | 31 | #print winedf.shape 32 | #ylab = winedf[['quality']] 33 | #print ylab.shape 34 | #print winedf['quality'].value_counts() # indeed it is 35 | 36 | #++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 37 | 38 | # check the correlation plot 39 | 40 | #winecorr = winedf.corr() 41 | #s=sns.heatmap(winecorr) 42 | #s.set_yticklabels(s.get_yticklabels(),rotation=30,fontsize=7) 43 | #s.set_xticklabels(s.get_xticklabels(),rotation=30,fontsize=7) 44 | 45 | #plt.show() # as expected high correlation between acidity and pH 46 | 47 | # individual correlation plot 48 | #plt.subplot(1,2,1) 49 | #plt.scatter(winedf['fixed acidity'], winedf['pH'], s=winedf['quality']*5, color='magenta', alpha=0.3) 50 | #plt.xlabel('Fixed Acidity') 51 | #plt.ylabel('pH') 52 | #plt.subplot(1,2,2) 53 | #plt.scatter(winedf['fixed acidity'], winedf['residual sugar'], s=winedf['quality']*5, color='purple', alpha=0.3) 54 | #plt.xlabel('Fixed Acidity') 55 | #plt.ylabel('Residual Sugar') 56 | #plt.tight_layout() 57 | #plt.show() 58 | 59 | X=winedf.drop(['quality'],axis=1) 60 | Y=winedf['quality'] 61 | 62 | print type(X), type(Y) 63 | print X.head(3) 64 | 65 | #++++++++++++++++++++++++++++++++ 66 | # create the pipeline object 67 | #++++++++++++++++++++++++++++++++ 68 | steps = [('scaler', StandardScaler()), ('SVM', SVC())] 69 | pipeline = Pipeline(steps) 70 | 71 | 72 | #++++++++++++++++++++++++++++++++++++++ 73 | #+ create the hyperparameter space 74 | #++++++++++++++++++++++++++++++++++++++ 75 | 76 | parameteres = {'SVM__C':[0.001,0.1,10,100,10e5], 'SVM__gamma':[0.1,0.01]} 77 | 78 | #++++++++++++++++++++++++++++++++++++ 79 | #+ create train and test sets 80 | #++++++++++++++++++++++++++++++++++++ 81 | 82 | X_train, X_test, y_train, y_test = train_test_split(X,Y,test_size=0.2, random_state=30, stratify=Y) 83 | 84 | #print X_test.shape 85 | 86 | #++++++++++++++++++++++++++++++ 87 | #+ Grid Search Cross Validation 88 | #++++++++++++++++++++++++++++++ 89 | grid = GridSearchCV(pipeline, param_grid=parameteres, cv=5) 90 | 91 | grid.fit(X_train, y_train) 92 | 93 | print "score = %3.2f" %(grid.score(X_test,y_test)) 94 | 95 | #pparam=pprint.PrettyPrinter(indent=2) 96 | 97 | print grid.best_params_ 98 | endT = time.time() 99 | 100 | print "total time elapsed = %3.3f"%(endT-startT) 101 | 102 | -------------------------------------------------------------------------------- /why_relu.py: -------------------------------------------------------------------------------- 1 | ''' 2 | accopanying code for {https://towardsdatascience.com/3-basic-concepts-in-neural-net-and-deep-learning-revisited-7f982bb7bb05}[why relu works] 3 | ''' 4 | 5 | import numpy as np 6 | import matplotlib.pyplot as plt 7 | 8 | 9 | def f(x): return x**2; 10 | 11 | x_in = np.linspace(-3, 3, 50) 12 | 13 | x_out = f(x_in) 14 | print ('check output array: ', x_out) 15 | 16 | 17 | def node(a, x, b): 18 | ''' 19 | define a relu function 20 | potentially used as an activation for a node 21 | ''' 22 | linear = a*x + b 23 | relu = np.maximum(0, linear) 24 | return relu 25 | 26 | 27 | ### imagine we have only two input to neural net 28 | ### zero bias and a = 1, -1 29 | 30 | y_out_1 = np.zeros((len(x_in), )) 31 | 32 | # print (y_out_1) 33 | 34 | for i in range(len(x_in)): 35 | y_1 = node(1, x_in[i], 0) + node(-1, x_in[i], 0) 36 | y_out_1[i] = y_1 37 | 38 | print (y_out_1) 39 | 40 | 41 | #### let's extend this for 4 inputs 42 | #### two same as before, the new two inputs have a=2 and -2 and bias -1 43 | 44 | y_out_2 = np.zeros((len(x_in), )) 45 | 46 | for i in range(len(x_in)): 47 | y_2 = node(1, x_in[i], 0) + node(-1, x_in[i], 0) + node(2, x_in[i], -2) + node(-2, x_in[i], -2) 48 | y_out_2[i] = y_2 49 | 50 | print (y_out_2) 51 | 52 | #### Extend a bit more fpr 6 inputs 53 | #### 4 same as before, the new two inputs have a=3 and -3 and bias -2 54 | 55 | y_out_3 = np.zeros((len(x_in), )) 56 | 57 | for i in range(len(x_in)): 58 | y_3 = node(1, x_in[i], 0) + node(-1, x_in[i], 0) + node(2, x_in[i], -2) + node(-2, x_in[i], -2) + node(3, x_in[i], -6) + node(-3, x_in[i], -6) 59 | y_out_3[i] = y_3 60 | 61 | 62 | print (y_out_3) 63 | 64 | 65 | fig = plt.figure(figsize=(8, 5)) 66 | plt.plot(x_in, x_out, ls='None', marker='*', color='green', alpha=0.6, label='f(x)') 67 | plt.plot(x_in, y_out_1, ls='--', color='orange', alpha=0.5, label='Relu- 2 Inputs') 68 | plt.plot(x_in, y_out_2, ls='-.', color='magenta', alpha=0.5, label='Relu- 4 Inputs') 69 | plt.plot(x_in, y_out_3, ls=':', color='red', alpha=0.5, label='Relu- 6 Inputs') 70 | 71 | plt.legend(fontsize=12) 72 | plt.xlabel('Input') 73 | plt.ylabel('Output') 74 | 75 | plt.tight_layout() 76 | plt.show() 77 | 78 | ### the job of the neural net is to update the weights ('a's) and biases ('b's) to replicate the input behaviour as much as possible. 79 | --------------------------------------------------------------------------------