├── .gitignore
├── CFS.py
├── CFS_ForwardSearch.py
├── FS-CFS.ipynb
├── FS-D-Tree.ipynb
├── FS-Filters.ipynb
├── FS-LDA.ipynb
├── FS-Lasso.ipynb
├── FS-PCA.ipynb
├── FS-Permutation+Wrapper.ipynb
├── FS-Permutation-FI.ipynb
├── FS-Random-Forest.ipynb
├── FS-ReliefF.ipynb
├── FS-Wrappers.ipynb
├── HarryPotterTT.csv
├── README.md
├── entropy_estimators.py
├── ionosphere.csv
├── mutual_information.py
├── penguins.csv
├── requirements.txt
└── segmentation-all.csv
/.gitignore:
--------------------------------------------------------------------------------
1 | .venv
2 |
--------------------------------------------------------------------------------
/CFS.py:
--------------------------------------------------------------------------------
1 | '''
2 | All functions used in this notebook were obtained from:
3 |
4 | Jundong Li, Kewei Cheng, Suhang Wang, Fred Morstatter, Robert P. Trevino, Jiliang Tang, and Huan Liu. 2017. Feature Selection: A Data Perspective. ACM Comput. Surv. 50, 6, Article 94 (January 2018), 45 pages. DOI:https://doi.org/10.1145/3136625
5 |
6 | '''
7 |
8 | import numpy as np
9 | from mutual_information import su_calculation
10 |
11 |
12 | def merit_calculation(X, y):
13 | """
14 | This function calculates the merit of X given class labels y, where
15 | merits = (k * rcf)/sqrt(k+k*(k-1)*rff)
16 | rcf = (1/k)*sum(su(fi,y)) for all fi in X
17 | rff = (1/(k*(k-1)))*sum(su(fi,fj)) for all fi and fj in X
18 | Input
19 | ----------
20 | X: {numpy array}, shape (n_samples, n_features)
21 | input data
22 | y: {numpy array}, shape (n_samples,)
23 | input class labels
24 | Output
25 | ----------
26 | merits: {float}
27 | merit of a feature subset X
28 | """
29 |
30 | n_samples, n_features = X.shape
31 | rff = 0
32 | rcf = 0
33 | for i in range(n_features):
34 | fi = X[:, i]
35 | rcf += su_calculation(fi, y)
36 | for j in range(n_features):
37 | if j > i:
38 | fj = X[:, j]
39 | rff += su_calculation(fi, fj)
40 | rff *= 2
41 | merits = rcf / np.sqrt(n_features + rff)
42 | return merits
43 |
44 |
45 | def cfs(X, y):
46 | """
47 | This function uses a correlation based heuristic to evaluate the worth of features which is called CFS
48 | Input
49 | -----
50 | X: {numpy array}, shape (n_samples, n_features)
51 | input data
52 | y: {numpy array}, shape (n_samples,)
53 | input class labels
54 | Output
55 | ------
56 | F: {numpy array}
57 | index of selected features
58 | Reference
59 | ---------
60 | Zhao, Zheng et al. "Advancing Feature Selection Research - ASU Feature Selection Repository" 2010.
61 | """
62 |
63 | n_samples, n_features = X.shape
64 | F = []
65 | # M stores the merit values
66 | M = []
67 | while True:
68 | merit = -100000000000
69 | idx = -1
70 | for i in range(n_features):
71 | if i not in F:
72 | F.append(i)
73 | # calculate the merit of current selected features
74 | t = merit_calculation(X[:, F], y)
75 | if t > merit:
76 | merit = t
77 | idx = i
78 | F.pop()
79 | F.append(idx)
80 | M.append(merit)
81 | if len(M) > 5:
82 | if M[len(M)-1] <= M[len(M)-2]:
83 | if M[len(M)-2] <= M[len(M)-3]:
84 | if M[len(M)-3] <= M[len(M)-4]:
85 | if M[len(M)-4] <= M[len(M)-5]:
86 | break
87 | return np.array(F)
--------------------------------------------------------------------------------
/CFS_ForwardSearch.py:
--------------------------------------------------------------------------------
1 | from CFS import merit_calculation
2 | import pandas as pd
3 | import numpy as np
4 |
5 | def CFS_FS(X,y):
6 |
7 | '''
8 | This function performs a forward search for CFS
9 | Inputs:
10 | X - training data
11 | y - labels
12 |
13 | Outputs:
14 | merit_score_sel - The merit value assigned to the selected feature subsets in the order they were added
15 | sel_comb - The selected feature combination
16 | '''
17 |
18 | # initialise variables
19 | var_no = 1
20 | sel_comb = []
21 | merit_score_change = 1
22 | merit_score_prev = 0
23 | merit_score_sel = pd.DataFrame()
24 | enum = 0
25 |
26 | m,n = X.shape
27 |
28 | for i in range(0,n-1):
29 |
30 | # Create a consecutive list with all the variables
31 | var_list = list(range(0,n))
32 | combs = []
33 | j = 0
34 |
35 | # Find the unique combinations of variables
36 | if(var_no ==1):
37 | combs = var_list
38 | elif (var_no == 2):
39 | var_list.remove(sel_comb)
40 | for i in var_list:
41 | combs.insert(j, tuple([sel_comb,i]))
42 | j=j+1
43 | else:
44 | for i in sel_comb:
45 | var_list.remove(i)
46 | for i in var_list:
47 | combs.insert(j, sel_comb + (i,))
48 | j=j+1
49 |
50 | # Iterate through the possible feature subsets and find merit scores
51 | merit_score = []
52 | for i in range(0,len(combs)):
53 | X_input = X[:,combs[i]]
54 | if (var_no == 1):
55 | X_input = np.atleast_2d(X_input).T
56 | MS = merit_calculation(X_input, y)
57 | merit_score.append(MS)
58 |
59 | # Calculate the change in the merit score, once the score stops improving, stop the search
60 | merit_score_change = max(merit_score) - merit_score_prev
61 | if(merit_score_change <= 0):
62 | break
63 | else:
64 | sel_comb = combs[np.argmax(merit_score)]
65 | merit_score_prev = max(merit_score)
66 | var_no = var_no + 1
67 |
68 | merit_score_sel.insert(enum, enum,[ merit_score_prev])
69 | enum = enum+1
70 |
71 | return merit_score_sel, sel_comb
--------------------------------------------------------------------------------
/FS-Filters.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Feature Selection using Filters\n",
8 | "### Feature Scoring - two methods \n",
9 | "1. Chi square statistic\n",
10 | "2. Information Gain"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": null,
16 | "metadata": {},
17 | "outputs": [],
18 | "source": [
19 | "import pandas as pd\n",
20 | "import numpy as np\n",
21 | "from sklearn.feature_selection import chi2, mutual_info_classif\n",
22 | "from sklearn.model_selection import train_test_split\n",
23 | "from sklearn.preprocessing import MinMaxScaler\n",
24 | "from sklearn.model_selection import cross_val_score\n",
25 | "import matplotlib.pyplot as plt "
26 | ]
27 | },
28 | {
29 | "cell_type": "code",
30 | "execution_count": null,
31 | "metadata": {},
32 | "outputs": [],
33 | "source": [
34 | "seg_data = pd.read_csv('segmentation-all.csv')\n",
35 | "print(seg_data.shape)\n",
36 | "seg_data.head()"
37 | ]
38 | },
39 | {
40 | "cell_type": "code",
41 | "execution_count": null,
42 | "metadata": {},
43 | "outputs": [],
44 | "source": [
45 | "seg_data['Class'].value_counts()"
46 | ]
47 | },
48 | {
49 | "cell_type": "markdown",
50 | "metadata": {},
51 | "source": [
52 | "Load the data, scale it and divide into train and test sets. \n",
53 | "The filters are *trained* using the training data and then a classifier is trained on the feature subset and tested on the test set. "
54 | ]
55 | },
56 | {
57 | "cell_type": "code",
58 | "execution_count": null,
59 | "metadata": {},
60 | "outputs": [],
61 | "source": [
62 | "y = seg_data.pop('Class').values\n",
63 | "X_raw = seg_data.values\n",
64 | "\n",
65 | "X_tr_raw, X_ts_raw, y_train, y_test = train_test_split(X_raw, y, \n",
66 | " random_state=1, test_size=1/2)\n",
67 | "scaler = MinMaxScaler()\n",
68 | "X_train = scaler.fit_transform(X_tr_raw)\n",
69 | "X_test = scaler.transform(X_ts_raw)\n",
70 | "\n",
71 | "feature_names = seg_data.columns\n",
72 | "X_train.shape, X_test.shape"
73 | ]
74 | },
75 | {
76 | "cell_type": "markdown",
77 | "metadata": {},
78 | "source": [
79 | "### Feature Scores \n",
80 | "Determine the chi-squared and information gain scores for all features using the training set. \n",
81 | "**Note:** The mutual information score returned by `mutual_info_classif` is effectively an information gain score. "
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": null,
87 | "metadata": {},
88 | "outputs": [],
89 | "source": [
90 | "chi2_score, pval = chi2(X_train, y_train)\n",
91 | "chi2_score = np.nan_to_num(chi2_score)\n",
92 | "chi2_score\n",
93 | "# The chi square scores for the features"
94 | ]
95 | },
96 | {
97 | "cell_type": "code",
98 | "execution_count": null,
99 | "metadata": {},
100 | "outputs": [],
101 | "source": [
102 | "i_scores = mutual_info_classif(X_train,y_train)\n",
103 | "i_scores\n",
104 | "# The i-gain scores for the features"
105 | ]
106 | },
107 | {
108 | "cell_type": "markdown",
109 | "metadata": {},
110 | "source": [
111 | "Store the scores in a dataframe indexed by the feature names."
112 | ]
113 | },
114 | {
115 | "cell_type": "code",
116 | "execution_count": null,
117 | "metadata": {},
118 | "outputs": [],
119 | "source": [
120 | "df=pd.DataFrame({'Mutual Info.':i_scores,'Chi Square':chi2_score,'Feature':feature_names})\n",
121 | "df.set_index('Feature', inplace = True)\n",
122 | "df.sort_values('Mutual Info.', inplace = True, ascending = False)\n",
123 | "df"
124 | ]
125 | },
126 | {
127 | "cell_type": "markdown",
128 | "metadata": {},
129 | "source": [
130 | "### Plotting the Filter scores\n",
131 | "We see that the two scores are fairly well correlated. \n",
132 | "The Spearman correlation is 0.89."
133 | ]
134 | },
135 | {
136 | "cell_type": "code",
137 | "execution_count": null,
138 | "metadata": {},
139 | "outputs": [],
140 | "source": [
141 | "fig, ax = plt.subplots()\n",
142 | "rr = range(0,len(feature_names))\n",
143 | "ax2 = ax.twinx()\n",
144 | "ax.plot(df.index, df[\"Mutual Info.\"], label='I-Gain')\n",
145 | "ax2.plot(df.index, df[\"Chi Square\"], color='skyblue', label='Chi Squared')\n",
146 | "ax.set_xticks(rr)\n",
147 | "\n",
148 | "ax.set_xticklabels(list(df.index), rotation = 90)\n",
149 | "ax.set_xlabel('Features', fontsize=12, fontweight='bold')\n",
150 | "ax.set_ylabel('I-Gain')\n",
151 | "ax2.set_ylabel('Chi Squared')\n",
152 | "fig.legend(loc=\"upper right\", bbox_to_anchor=(1,1), bbox_transform=ax.transAxes)"
153 | ]
154 | },
155 | {
156 | "cell_type": "code",
157 | "execution_count": null,
158 | "metadata": {},
159 | "outputs": [],
160 | "source": [
161 | "from scipy import stats\n",
162 | "stats.spearmanr(chi2_score, i_scores)"
163 | ]
164 | },
165 | {
166 | "cell_type": "markdown",
167 | "metadata": {},
168 | "source": [
169 | "## Feature Selection\n",
170 | "Compare \n",
171 | "- Baseline: all features\n",
172 | "- Top three, I-Gain and Chi-Square\n",
173 | "- Top six, I-Gain and Chi-Square\n",
174 | "- Top half (12), I-Gain and Chi-Square"
175 | ]
176 | },
177 | {
178 | "cell_type": "code",
179 | "execution_count": null,
180 | "metadata": {},
181 | "outputs": [],
182 | "source": [
183 | "from sklearn.model_selection import train_test_split\n",
184 | "from sklearn.feature_selection import SelectKBest, mutual_info_classif\n",
185 | "from sklearn.metrics import accuracy_score\n",
186 | "from sklearn.neighbors import KNeighborsClassifier"
187 | ]
188 | },
189 | {
190 | "cell_type": "markdown",
191 | "metadata": {},
192 | "source": [
193 | "### Baseline Classifier"
194 | ]
195 | },
196 | {
197 | "cell_type": "code",
198 | "execution_count": null,
199 | "metadata": {},
200 | "outputs": [],
201 | "source": [
202 | "model = KNeighborsClassifier(n_neighbors=3)\n",
203 | "model = model.fit(X_train,y_train)\n",
204 | "y_pred = model.predict(X_test)\n",
205 | "acc = accuracy_score(y_pred,y_test)\n",
206 | "acc"
207 | ]
208 | },
209 | {
210 | "cell_type": "code",
211 | "execution_count": null,
212 | "metadata": {},
213 | "outputs": [],
214 | "source": [
215 | "n_features = X_train.shape[1]\n",
216 | "n_features"
217 | ]
218 | },
219 | {
220 | "cell_type": "code",
221 | "execution_count": null,
222 | "metadata": {},
223 | "outputs": [],
224 | "source": [
225 | "filters = [mutual_info_classif, chi2]\n",
226 | "k_options = [n_features, 3, 6, 10, 15]\n",
227 | "filt_scores = {}\n",
228 | "chi_scores = {}\n",
229 | "i_gain_scores = {}\n",
230 | "\n",
231 | "for the_filter in filters:\n",
232 | " accs = []\n",
233 | " for k_val in k_options:\n",
234 | " FS_trans = SelectKBest(the_filter, \n",
235 | " k=k_val).fit(X_train, y_train)\n",
236 | " X_tR_new = FS_trans.transform(X_train)\n",
237 | " X_tS_new = FS_trans.transform(X_test)\n",
238 | "\n",
239 | " model.fit(X_tR_new, y_train)\n",
240 | "\n",
241 | " y_tS_pred = model.predict(X_tS_new)\n",
242 | " \n",
243 | " acc = accuracy_score(y_test, y_tS_pred)\n",
244 | " accs.append(acc)\n",
245 | " print(the_filter, k_val, acc)\n",
246 | " filt_scores[the_filter.__name__] = accs\n"
247 | ]
248 | },
249 | {
250 | "cell_type": "code",
251 | "execution_count": null,
252 | "metadata": {},
253 | "outputs": [],
254 | "source": [
255 | "import matplotlib.pyplot as plt \n",
256 | "import numpy as np\n",
257 | "%matplotlib inline \n",
258 | "\n",
259 | "fig, ax = plt.subplots()\n",
260 | "width = 0.3\n",
261 | "sb = 'skyblue'\n",
262 | "\n",
263 | "options = ['All'] + k_options[1:]\n",
264 | "ig = filt_scores['mutual_info_classif']\n",
265 | "ch = filt_scores['chi2']\n",
266 | "\n",
267 | "y_pos = np.arange(len(options))\n",
268 | "\n",
269 | "p1 = ax.bar(y_pos-width, ig, width, align='center', \n",
270 | " color=['red', 'blue', 'blue','blue','blue'],alpha=0.5)\n",
271 | "p2 = ax.bar(y_pos, ch, width, align='center', \n",
272 | " color=['red', sb, sb, sb, sb],alpha=0.5)\n",
273 | "\n",
274 | "ax.legend((p1[1], p2[1]), ('I-Gain', 'Chi Squared'),loc='lower right')\n",
275 | "ax.set_ylim([0.5, 1])\n",
276 | "plt.grid(axis = 'y')\n",
277 | "plt.yticks(np.arange(0.5,1.05,0.1))\n",
278 | "\n",
279 | "plt.xticks(y_pos, options)\n",
280 | "plt.ylabel('Test Set Accuracy')\n",
281 | "plt.xlabel('Feature Counts')\n",
282 | "plt.show()"
283 | ]
284 | },
285 | {
286 | "cell_type": "markdown",
287 | "metadata": {},
288 | "source": [
289 | "## Hybrid Filter Wrapper Strategy\n",
290 | "We rank the features using information gain (well mutual information) and select the _k_ best to build a classifier. \n",
291 | "We iterate through increasing values of *k*. \n",
292 | "`SelectKBest` is a _transform_ that transforms the training data.\n"
293 | ]
294 | },
295 | {
296 | "cell_type": "code",
297 | "execution_count": null,
298 | "metadata": {
299 | "scrolled": true
300 | },
301 | "outputs": [],
302 | "source": [
303 | "cv_acc_scores = []\n",
304 | "tst_acc_scores = []\n",
305 | "best_acc = 0\n",
306 | "best_k = 0\n",
307 | "for kk in range(1, X_train.shape[1]+1):\n",
308 | " FS_trans = SelectKBest(mutual_info_classif, \n",
309 | " k=kk).fit(X_train, y_train)\n",
310 | " X_tR_new = FS_trans.transform(X_train)\n",
311 | " X_tS_new = FS_trans.transform(X_test)\n",
312 | " cv_acc = cross_val_score(model, X_tR_new, y_train, cv=8)\n",
313 | " cv_acc_scores.append(cv_acc.mean())\n",
314 | " y_pred_temp = model.fit(X_tR_new, y_train).predict(X_tS_new)\n",
315 | " tst_acc_scores.append(accuracy_score(y_pred_temp, y_test))\n",
316 | " if cv_acc.mean() > best_acc:\n",
317 | " best_acc = cv_acc.mean()\n",
318 | " best_k = kk\n",
319 | "df['Training Acc.'] = cv_acc_scores\n",
320 | "df['Test Acc.'] = tst_acc_scores\n",
321 | "\n",
322 | "print(best_k, best_acc)\n",
323 | "df.head(15)"
324 | ]
325 | },
326 | {
327 | "cell_type": "code",
328 | "execution_count": null,
329 | "metadata": {},
330 | "outputs": [],
331 | "source": [
332 | "import matplotlib.pyplot as plt\n",
333 | "%matplotlib inline\n",
334 | "\n",
335 | "n = len(df.index)\n",
336 | "rr = range(0,n)\n",
337 | "fig, ax = plt.subplots()\n",
338 | "ax2 = ax.twinx()\n",
339 | "ax.bar(df.index, df[\"Mutual Info.\"], label='I-Gain',width=.35)\n",
340 | "\n",
341 | "ax2.plot(df.index, df[\"Training Acc.\"], color='green', label='Training Acc.')\n",
342 | "ax2.plot(df.index, df[\"Test Acc.\"], color='lightgreen', label='Test Acc')\n",
343 | "ax.set_xticks(rr)\n",
344 | "ax2.plot(best_k-1,best_acc,'gx') \n",
345 | "ax.set_xticklabels(list(df.index), rotation = 90)\n",
346 | "ax.set_xlabel('Features')\n",
347 | "ax.set_ylabel('I-Gain')\n",
348 | "ax2.set_ylabel('Accuracy')\n",
349 | "fig.legend(loc=\"upper right\", bbox_to_anchor=(1,0.8), bbox_transform=ax.transAxes)\n",
350 | "plt.show()"
351 | ]
352 | }
353 | ],
354 | "metadata": {
355 | "kernelspec": {
356 | "display_name": "Python 3",
357 | "language": "python",
358 | "name": "python3"
359 | },
360 | "language_info": {
361 | "codemirror_mode": {
362 | "name": "ipython",
363 | "version": 3
364 | },
365 | "file_extension": ".py",
366 | "mimetype": "text/x-python",
367 | "name": "python",
368 | "nbconvert_exporter": "python",
369 | "pygments_lexer": "ipython3",
370 | "version": "3.8.3"
371 | }
372 | },
373 | "nbformat": 4,
374 | "nbformat_minor": 2
375 | }
376 |
--------------------------------------------------------------------------------
/FS-LDA.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Linear Discriminant Analysis\n",
8 | "### + comparison with PCA\n",
9 | "Linear Discriminant Analysis using the LDA implementation in `scikit-learn`.\n",
10 | "The objective with LDA is to project the data into a reduced dimension space that maximises between-class separation. \n",
11 | "PCA is also included for the purpose of comparison. "
12 | ]
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": null,
17 | "metadata": {},
18 | "outputs": [],
19 | "source": [
20 | "import pandas as pd\n",
21 | "import numpy as np\n",
22 | "from sklearn.model_selection import train_test_split\n",
23 | "from sklearn.preprocessing import StandardScaler\n",
24 | "from collections import Counter\n",
25 | "from sklearn.metrics import accuracy_score\n",
26 | "from sklearn.discriminant_analysis import LinearDiscriminantAnalysis\n",
27 | "from sklearn.decomposition import PCA\n",
28 | "import matplotlib.pyplot as plt \n",
29 | "%matplotlib inline"
30 | ]
31 | },
32 | {
33 | "cell_type": "markdown",
34 | "metadata": {},
35 | "source": [
36 | "## Penguins"
37 | ]
38 | },
39 | {
40 | "cell_type": "code",
41 | "execution_count": null,
42 | "metadata": {},
43 | "outputs": [],
44 | "source": [
45 | "penguins_df = pd.read_csv('penguins.csv', index_col = 0)\n",
46 | "y = penguins_df.pop('species').values\n",
47 | "X_raw = penguins_df.values\n",
48 | "\n",
49 | "X_tr_raw, X_ts_raw, y_train, y_test = train_test_split(X_raw, y, \n",
50 | " random_state=1, test_size=1/2)\n",
51 | "scaler = StandardScaler()\n",
52 | "X_train = scaler.fit_transform(X_tr_raw)\n",
53 | "X_test = scaler.transform(X_ts_raw)\n",
54 | "\n",
55 | "feature_names = penguins_df.columns\n",
56 | "print(penguins_df.shape)\n",
57 | "penguins_df.head()"
58 | ]
59 | },
60 | {
61 | "cell_type": "code",
62 | "execution_count": null,
63 | "metadata": {},
64 | "outputs": [],
65 | "source": [
66 | "types = list(Counter(y).keys())\n",
67 | "types"
68 | ]
69 | },
70 | {
71 | "cell_type": "code",
72 | "execution_count": null,
73 | "metadata": {},
74 | "outputs": [],
75 | "source": [
76 | "lda = LinearDiscriminantAnalysis()\n",
77 | "lda.fit(X_train, y_train)\n",
78 | "X_tr_lda = lda.transform(X_train)\n",
79 | "X_tr_lda.shape"
80 | ]
81 | },
82 | {
83 | "cell_type": "code",
84 | "execution_count": null,
85 | "metadata": {},
86 | "outputs": [],
87 | "source": [
88 | "lda.explained_variance_ratio_"
89 | ]
90 | },
91 | {
92 | "cell_type": "code",
93 | "execution_count": null,
94 | "metadata": {},
95 | "outputs": [],
96 | "source": [
97 | "plt.figure(figsize=(4,4))\n",
98 | "colors = ['navy', 'turquoise', 'darkorange']\n",
99 | "lw = 2\n",
100 | "\n",
101 | "for color, target_name in zip(colors, types):\n",
102 | " plt.scatter(X_tr_lda[y_train == target_name, 0], X_tr_lda[y_train == target_name, 1], \n",
103 | " color=color, alpha=.8, lw=lw, label=target_name)\n",
104 | "plt.legend(loc='best', shadow=False, scatterpoints=1)\n",
105 | "plt.xlabel('PC1 (84%)')\n",
106 | "plt.ylabel('PC2 (16%)')\n",
107 | "#plt.title('LDA of the Penguins dataset')\n",
108 | "\n",
109 | "plt.show()"
110 | ]
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": null,
115 | "metadata": {},
116 | "outputs": [],
117 | "source": [
118 | "y_pred = lda.predict(X_test)\n",
119 | "accuracy_score(y_pred,y_test)"
120 | ]
121 | },
122 | {
123 | "cell_type": "markdown",
124 | "metadata": {},
125 | "source": [
126 | "## PCA"
127 | ]
128 | },
129 | {
130 | "cell_type": "code",
131 | "execution_count": null,
132 | "metadata": {},
133 | "outputs": [],
134 | "source": [
135 | "pca = PCA(n_components=4)\n",
136 | "X_tr_pca = pca.fit(X_train).transform(X_train)\n",
137 | "\n",
138 | "# Proportion of variance explained for each components\n",
139 | "pca.explained_variance_ratio_"
140 | ]
141 | },
142 | {
143 | "cell_type": "code",
144 | "execution_count": null,
145 | "metadata": {},
146 | "outputs": [],
147 | "source": [
148 | "plt.figure(figsize=(4, 4))\n",
149 | "\n",
150 | "lw = 2\n",
151 | "\n",
152 | "for color, target_name in zip(colors, types):\n",
153 | " plt.scatter(X_tr_pca[y_train == target_name, 0], X_tr_pca[y_train == target_name, 1], \n",
154 | " color=color, alpha=.8, lw=lw, label=target_name)\n",
155 | "plt.legend(loc='best', shadow=False, scatterpoints=1)\n",
156 | "plt.xlabel('PC1 (69%)')\n",
157 | "plt.ylabel('PC2 (20%)')\n",
158 | "#plt.title('PCA of the Penguins dataset')\n",
159 | "plt.show()"
160 | ]
161 | }
162 | ],
163 | "metadata": {
164 | "kernelspec": {
165 | "display_name": "Python 3",
166 | "language": "python",
167 | "name": "python3"
168 | },
169 | "language_info": {
170 | "codemirror_mode": {
171 | "name": "ipython",
172 | "version": 3
173 | },
174 | "file_extension": ".py",
175 | "mimetype": "text/x-python",
176 | "name": "python",
177 | "nbconvert_exporter": "python",
178 | "pygments_lexer": "ipython3",
179 | "version": "3.8.3"
180 | }
181 | },
182 | "nbformat": 4,
183 | "nbformat_minor": 2
184 | }
185 |
--------------------------------------------------------------------------------
/FS-PCA.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Principal Component Analysis\n",
8 | "PCA using the PCA implementaiton in `scikit-learn`"
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": null,
14 | "metadata": {},
15 | "outputs": [],
16 | "source": [
17 | "import matplotlib.pyplot as plt\n",
18 | "%matplotlib inline\n",
19 | "import numpy as np\n",
20 | "import pandas as pd\n",
21 | "from sklearn import datasets\n",
22 | "from sklearn.decomposition import PCA\n",
23 | "from sklearn.preprocessing import StandardScaler"
24 | ]
25 | },
26 | {
27 | "cell_type": "markdown",
28 | "metadata": {},
29 | "source": [
30 | "## Top Trumps\n",
31 | "`HarryPotterTT.csv` contains data on Top Trumps cards. \n",
32 | "There are 22 examples described by 5 features."
33 | ]
34 | },
35 | {
36 | "cell_type": "code",
37 | "execution_count": null,
38 | "metadata": {},
39 | "outputs": [],
40 | "source": [
41 | "import pandas as pd\n",
42 | "from sklearn.preprocessing import StandardScaler\n",
43 | "TT_df = pd.read_csv('HarryPotterTT.csv')\n",
44 | "TT_df"
45 | ]
46 | },
47 | {
48 | "cell_type": "markdown",
49 | "metadata": {},
50 | "source": [
51 | "Extract the data into a numpy array X. \n",
52 | "And scale the data."
53 | ]
54 | },
55 | {
56 | "cell_type": "code",
57 | "execution_count": null,
58 | "metadata": {},
59 | "outputs": [],
60 | "source": [
61 | "y = TT_df.pop('Name').values\n",
62 | "X = TT_df.values\n",
63 | "X_scal = StandardScaler().fit_transform(X)\n",
64 | "X.shape"
65 | ]
66 | },
67 | {
68 | "cell_type": "markdown",
69 | "metadata": {},
70 | "source": [
71 | "Apply PCA."
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": null,
77 | "metadata": {},
78 | "outputs": [],
79 | "source": [
80 | "pcaHP = PCA(n_components=4)\n",
81 | "X_r = pcaHP.fit(X_scal).transform(X_scal)\n",
82 | "pcaHP.explained_variance_ratio_"
83 | ]
84 | },
85 | {
86 | "cell_type": "markdown",
87 | "metadata": {},
88 | "source": [
89 | "There are five features being projected onto 4 PCs so the projection matrix is 4 x 5."
90 | ]
91 | },
92 | {
93 | "cell_type": "code",
94 | "execution_count": null,
95 | "metadata": {},
96 | "outputs": [],
97 | "source": [
98 | "pcaHP.components_"
99 | ]
100 | },
101 | {
102 | "cell_type": "code",
103 | "execution_count": null,
104 | "metadata": {},
105 | "outputs": [],
106 | "source": [
107 | "df = pd.DataFrame(pcaHP.explained_variance_ratio_, \n",
108 | " index=['PC1','PC2','PC3','PC4'],columns =['var'])\n",
109 | "\n",
110 | "pl = df.plot.bar(color='red',figsize=(5,4))\n",
111 | "pl.set_ylabel(\"Variance Explained\")\n",
112 | "pl.set_ylim([0,0.8])"
113 | ]
114 | },
115 | {
116 | "cell_type": "code",
117 | "execution_count": null,
118 | "metadata": {},
119 | "outputs": [],
120 | "source": [
121 | "plt.figure(figsize=(8,6))\n",
122 | "lw = 2\n",
123 | "labels = list(range(len (y)))\n",
124 | "labels[0]='Harry'\n",
125 | "labels[1]='Hermione'\n",
126 | "labels[3]='Prof D'\n",
127 | "labels[5]='Prof McG'\n",
128 | "labels[6]='Prof Moody'\n",
129 | "labels[18]='Cedric D'\n",
130 | "labels[19]='Viktor K'\n",
131 | "labels[21]='Lucius Malfoy'\n",
132 | "labels[4]='Snape'\n",
133 | "labels[12]='Draco Malfoy'\n",
134 | "\n",
135 | "plt.scatter(X_r[:, 0], X_r[:, 1])\n",
136 | "\n",
137 | "for label, xi, yi in zip(labels, X_r[:, 0], X_r[:, 1]):\n",
138 | " plt.annotate(\n",
139 | " label,\n",
140 | " xy=(xi, yi), xytext=(-3, 3),\n",
141 | " textcoords='offset points', ha='right', va='bottom')\n",
142 | "\n",
143 | "plt.xlabel('PC1 (49%)')\n",
144 | "plt.ylabel('PC2 (32%)')\n",
145 | "plt.title('PCA of HP dataset')\n",
146 | "\n",
147 | "plt.show()"
148 | ]
149 | },
150 | {
151 | "cell_type": "markdown",
152 | "metadata": {},
153 | "source": [
154 | "## Comment\n",
155 | " - This plot shows the data projected onto the first 2 PCs. \n",
156 | " - These PCs account for 81% of the variance in the data. \n",
157 | " - It might be argued that the first PC captures *competence* and the second represents *malevolence*. \n"
158 | ]
159 | }
160 | ],
161 | "metadata": {
162 | "kernelspec": {
163 | "display_name": "Python 3",
164 | "language": "python",
165 | "name": "python3"
166 | },
167 | "language_info": {
168 | "codemirror_mode": {
169 | "name": "ipython",
170 | "version": 3
171 | },
172 | "file_extension": ".py",
173 | "mimetype": "text/x-python",
174 | "name": "python",
175 | "nbconvert_exporter": "python",
176 | "pygments_lexer": "ipython3",
177 | "version": "3.8.3"
178 | }
179 | },
180 | "nbformat": 4,
181 | "nbformat_minor": 2
182 | }
183 |
--------------------------------------------------------------------------------
/FS-Permutation+Wrapper.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "f2178a5a",
6 | "metadata": {},
7 | "source": [
8 | "# A Two Stage Strategy for Feature Subset Selection\n",
9 | "1. Use Permutation Feature Importance to identify a large subset of candidates\n",
10 | "2. Use a Wrapper search on this subset to select a final set"
11 | ]
12 | },
13 | {
14 | "cell_type": "markdown",
15 | "id": "ec35a629",
16 | "metadata": {},
17 | "source": [
18 | "## Permutation Feature Importance\n",
19 | "https://scikit-learn.org/stable/modules/permutation_importance.html \n",
20 | "\n",
21 | "This is a good way to score feature importance in the context of a specific classifier/model. \n",
22 | "The permutation feature importance is defined to be the decrease in a model score when a single feature value is randomly shuffled. \n",
23 | "The idea comes from the original work on Random Forests by Leo Breiman: \n",
24 | "L. Breiman, “Random Forests”, Machine Learning, 45(1), 5-32, 2001."
25 | ]
26 | },
27 | {
28 | "cell_type": "code",
29 | "execution_count": 1,
30 | "id": "39ad8bc6",
31 | "metadata": {},
32 | "outputs": [],
33 | "source": [
34 | "import pandas as pd\n",
35 | "import numpy as np\n",
36 | "from sklearn.inspection import permutation_importance\n",
37 | "from sklearn import preprocessing\n",
38 | "from sklearn.metrics import accuracy_score\n",
39 | "from sklearn.model_selection import cross_val_score\n",
40 | "from sklearn.neighbors import KNeighborsClassifier\n",
41 | "from sklearn.model_selection import train_test_split\n",
42 | "import matplotlib.pyplot as plt"
43 | ]
44 | },
45 | {
46 | "cell_type": "markdown",
47 | "id": "71fcf18b",
48 | "metadata": {},
49 | "source": [
50 | "Load the Ionsphere dataset from the UCI repository. "
51 | ]
52 | },
53 | {
54 | "cell_type": "code",
55 | "execution_count": 2,
56 | "id": "c6578ead",
57 | "metadata": {},
58 | "outputs": [
59 | {
60 | "name": "stdout",
61 | "output_type": "stream",
62 | "text": [
63 | "(351, 35)\n"
64 | ]
65 | },
66 | {
67 | "data": {
68 | "text/html": [
69 | "
\n",
70 | "\n",
83 | "
\n",
84 | " \n",
85 | " \n",
86 | " | \n",
87 | " 0 | \n",
88 | " 1 | \n",
89 | " 2 | \n",
90 | " 3 | \n",
91 | " 4 | \n",
92 | " 5 | \n",
93 | " 6 | \n",
94 | " 7 | \n",
95 | " 8 | \n",
96 | " 9 | \n",
97 | " ... | \n",
98 | " 25 | \n",
99 | " 26 | \n",
100 | " 27 | \n",
101 | " 28 | \n",
102 | " 29 | \n",
103 | " 30 | \n",
104 | " 31 | \n",
105 | " 32 | \n",
106 | " 33 | \n",
107 | " Class | \n",
108 | "
\n",
109 | " \n",
110 | " \n",
111 | " \n",
112 | " 0 | \n",
113 | " 1 | \n",
114 | " 0 | \n",
115 | " 0.99539 | \n",
116 | " -0.05889 | \n",
117 | " 0.85243 | \n",
118 | " 0.02306 | \n",
119 | " 0.83398 | \n",
120 | " -0.37708 | \n",
121 | " 1.00000 | \n",
122 | " 0.03760 | \n",
123 | " ... | \n",
124 | " -0.51171 | \n",
125 | " 0.41078 | \n",
126 | " -0.46168 | \n",
127 | " 0.21266 | \n",
128 | " -0.34090 | \n",
129 | " 0.42267 | \n",
130 | " -0.54487 | \n",
131 | " 0.18641 | \n",
132 | " -0.45300 | \n",
133 | " g | \n",
134 | "
\n",
135 | " \n",
136 | " 1 | \n",
137 | " 1 | \n",
138 | " 0 | \n",
139 | " 1.00000 | \n",
140 | " -0.18829 | \n",
141 | " 0.93035 | \n",
142 | " -0.36156 | \n",
143 | " -0.10868 | \n",
144 | " -0.93597 | \n",
145 | " 1.00000 | \n",
146 | " -0.04549 | \n",
147 | " ... | \n",
148 | " -0.26569 | \n",
149 | " -0.20468 | \n",
150 | " -0.18401 | \n",
151 | " -0.19040 | \n",
152 | " -0.11593 | \n",
153 | " -0.16626 | \n",
154 | " -0.06288 | \n",
155 | " -0.13738 | \n",
156 | " -0.02447 | \n",
157 | " b | \n",
158 | "
\n",
159 | " \n",
160 | " 2 | \n",
161 | " 1 | \n",
162 | " 0 | \n",
163 | " 1.00000 | \n",
164 | " -0.03365 | \n",
165 | " 1.00000 | \n",
166 | " 0.00485 | \n",
167 | " 1.00000 | \n",
168 | " -0.12062 | \n",
169 | " 0.88965 | \n",
170 | " 0.01198 | \n",
171 | " ... | \n",
172 | " -0.40220 | \n",
173 | " 0.58984 | \n",
174 | " -0.22145 | \n",
175 | " 0.43100 | \n",
176 | " -0.17365 | \n",
177 | " 0.60436 | \n",
178 | " -0.24180 | \n",
179 | " 0.56045 | \n",
180 | " -0.38238 | \n",
181 | " g | \n",
182 | "
\n",
183 | " \n",
184 | " 3 | \n",
185 | " 1 | \n",
186 | " 0 | \n",
187 | " 1.00000 | \n",
188 | " -0.45161 | \n",
189 | " 1.00000 | \n",
190 | " 1.00000 | \n",
191 | " 0.71216 | \n",
192 | " -1.00000 | \n",
193 | " 0.00000 | \n",
194 | " 0.00000 | \n",
195 | " ... | \n",
196 | " 0.90695 | \n",
197 | " 0.51613 | \n",
198 | " 1.00000 | \n",
199 | " 1.00000 | \n",
200 | " -0.20099 | \n",
201 | " 0.25682 | \n",
202 | " 1.00000 | \n",
203 | " -0.32382 | \n",
204 | " 1.00000 | \n",
205 | " b | \n",
206 | "
\n",
207 | " \n",
208 | " 4 | \n",
209 | " 1 | \n",
210 | " 0 | \n",
211 | " 1.00000 | \n",
212 | " -0.02401 | \n",
213 | " 0.94140 | \n",
214 | " 0.06531 | \n",
215 | " 0.92106 | \n",
216 | " -0.23255 | \n",
217 | " 0.77152 | \n",
218 | " -0.16399 | \n",
219 | " ... | \n",
220 | " -0.65158 | \n",
221 | " 0.13290 | \n",
222 | " -0.53206 | \n",
223 | " 0.02431 | \n",
224 | " -0.62197 | \n",
225 | " -0.05707 | \n",
226 | " -0.59573 | \n",
227 | " -0.04608 | \n",
228 | " -0.65697 | \n",
229 | " g | \n",
230 | "
\n",
231 | " \n",
232 | "
\n",
233 | "
5 rows × 35 columns
\n",
234 | "
"
235 | ],
236 | "text/plain": [
237 | " 0 1 2 3 4 5 6 7 8 \\\n",
238 | "0 1 0 0.99539 -0.05889 0.85243 0.02306 0.83398 -0.37708 1.00000 \n",
239 | "1 1 0 1.00000 -0.18829 0.93035 -0.36156 -0.10868 -0.93597 1.00000 \n",
240 | "2 1 0 1.00000 -0.03365 1.00000 0.00485 1.00000 -0.12062 0.88965 \n",
241 | "3 1 0 1.00000 -0.45161 1.00000 1.00000 0.71216 -1.00000 0.00000 \n",
242 | "4 1 0 1.00000 -0.02401 0.94140 0.06531 0.92106 -0.23255 0.77152 \n",
243 | "\n",
244 | " 9 ... 25 26 27 28 29 30 \\\n",
245 | "0 0.03760 ... -0.51171 0.41078 -0.46168 0.21266 -0.34090 0.42267 \n",
246 | "1 -0.04549 ... -0.26569 -0.20468 -0.18401 -0.19040 -0.11593 -0.16626 \n",
247 | "2 0.01198 ... -0.40220 0.58984 -0.22145 0.43100 -0.17365 0.60436 \n",
248 | "3 0.00000 ... 0.90695 0.51613 1.00000 1.00000 -0.20099 0.25682 \n",
249 | "4 -0.16399 ... -0.65158 0.13290 -0.53206 0.02431 -0.62197 -0.05707 \n",
250 | "\n",
251 | " 31 32 33 Class \n",
252 | "0 -0.54487 0.18641 -0.45300 g \n",
253 | "1 -0.06288 -0.13738 -0.02447 b \n",
254 | "2 -0.24180 0.56045 -0.38238 g \n",
255 | "3 1.00000 -0.32382 1.00000 b \n",
256 | "4 -0.59573 -0.04608 -0.65697 g \n",
257 | "\n",
258 | "[5 rows x 35 columns]"
259 | ]
260 | },
261 | "execution_count": 2,
262 | "metadata": {},
263 | "output_type": "execute_result"
264 | }
265 | ],
266 | "source": [
267 | "col_heads = [str(x) for x in range(34)]\n",
268 | "col_heads.append('Class')\n",
269 | "ion_data = pd.read_csv('ionosphere.csv', names = col_heads, na_values='?', header = None)\n",
270 | "ion_data.dropna(axis = 0, inplace = True)\n",
271 | "print(ion_data.shape)\n",
272 | "ion_data.head()"
273 | ]
274 | },
275 | {
276 | "cell_type": "markdown",
277 | "id": "31f199b0",
278 | "metadata": {},
279 | "source": [
280 | "Split the data 50:50; 50% of the data will be held back from the feature selection and model training process."
281 | ]
282 | },
283 | {
284 | "cell_type": "code",
285 | "execution_count": 3,
286 | "id": "e0757455",
287 | "metadata": {},
288 | "outputs": [],
289 | "source": [
290 | "y = ion_data.pop('Class').values\n",
291 | "X = ion_data\n",
292 | "ion_scaler = preprocessing.MinMaxScaler().fit(X)\n",
293 | "X_scaled = ion_scaler.transform(X)\n",
294 | "X_train, X_test, y_train, y_test = train_test_split(X_scaled, y, \n",
295 | " random_state=2, test_size=1/2)"
296 | ]
297 | },
298 | {
299 | "cell_type": "code",
300 | "execution_count": 4,
301 | "id": "94efc1d7",
302 | "metadata": {},
303 | "outputs": [],
304 | "source": [
305 | "# Final results dataframe\n",
306 | "res_df = pd.DataFrame([['All Features',0.0,0.0],\n",
307 | " ['After PI',0.0,0.0],\n",
308 | " ['After Wrapper',0.0,0.0]],columns=['Method','X-Val','Hold-Out'])\n",
309 | "res_df.set_index('Method', inplace = True)"
310 | ]
311 | },
312 | {
313 | "cell_type": "markdown",
314 | "id": "baa7d3e9",
315 | "metadata": {},
316 | "source": [
317 | "Calculating the base-line accuracies on the full feature set. \n",
318 | "Accuracy for the training data is assessed using cross-validation. \n",
319 | "Accuracy on the test set is assessed using a simple hold-out strategy. "
320 | ]
321 | },
322 | {
323 | "cell_type": "code",
324 | "execution_count": 5,
325 | "id": "cc09c142",
326 | "metadata": {},
327 | "outputs": [
328 | {
329 | "name": "stdout",
330 | "output_type": "stream",
331 | "text": [
332 | "X_Val on training all features: 0.818\n",
333 | "Hold Out testing all features: 0.795\n"
334 | ]
335 | }
336 | ],
337 | "source": [
338 | "kNN = KNeighborsClassifier(n_neighbors=3)\n",
339 | "kNN = kNN.fit(X_train,y_train)\n",
340 | "y_pred = kNN.predict(X_test)\n",
341 | "acc = accuracy_score(y_pred,y_test)\n",
342 | "cv_acc = cross_val_score(kNN, X_train, y_train, cv=8)\n",
343 | "\n",
344 | "res_df.loc['All Features']['X-Val']=cv_acc.mean()\n",
345 | "res_df.loc['All Features']['Hold-Out']=acc\n",
346 | "\n",
347 | "print(\"X_Val on training all features: {0:.3f}\".format(cv_acc.mean())) \n",
348 | "print(\"Hold Out testing all features: {0:.3f}\".format(acc)) "
349 | ]
350 | },
351 | {
352 | "cell_type": "markdown",
353 | "id": "cd93fa2e",
354 | "metadata": {},
355 | "source": [
356 | "## Permutation feature importance stage"
357 | ]
358 | },
359 | {
360 | "cell_type": "code",
361 | "execution_count": 6,
362 | "id": "2bc7dfd2",
363 | "metadata": {},
364 | "outputs": [],
365 | "source": [
366 | "knn_perm = permutation_importance(kNN, X_train, y_train, \n",
367 | " n_repeats=10, random_state = 0)"
368 | ]
369 | },
370 | {
371 | "cell_type": "code",
372 | "execution_count": 9,
373 | "id": "e3184727",
374 | "metadata": {},
375 | "outputs": [
376 | {
377 | "data": {
378 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAGpCAYAAACK4V1rAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAnGUlEQVR4nO3dfbxeZX3n+89XQEFAUAGlECY+IFOLEDTl0FJtRNtSpaC2tlJLcfQl1aOtUPtAtfXheJzjqU+1zkxbKhScQVoqIApYyLEGtBUswUBCg+Io0kiGYD0CoqKB3/yxVvRmZ++dnb2z1tphfd6v1/3a614P93WtBH575bqv3+9KVSFJGo9HDN0BSVK/DPySNDIGfkkaGQO/JI2MgV+SRmbXoTswF/vtt18tXbp06G507r777mPPPfccuhuDGOu9j/W+wXvv495Xr179jaraf+r+nSLwL126lOuvv37obnRu1apVrFixYuhuDGKs9z7W+wbvvY97T/K16fY71CNJI2Pgl6SRMfBL0sgY+CVpZAz8kjQyBn5JGhkDvySNjIFfkkbGwC9JI2Pgl6SRMfBL0sgY+CVpZAz8kjQyBn5JGhkDvySNjIFfkkYmVTV0H7Zp+fLlNYaFWD54/qW8d+1OsTbODvfGZ2we5b2P9b7Be5/rvd/2rhfOu50kq6tq+dT9gz3xJ9klyReSXDZUHyRpjIYc6nkDsH7A9iVplAYJ/EkOBl4IfGiI9iVpzIZ64v8z4A+AB2c6IclpSa5Pcv1dd93VW8ck6eGu98Cf5ARgU1Wtnu28qjqrqpZX1fL999+/p95J0sPfEE/8xwInJrkN+FvguCT/Y4B+SNIo9R74q+qPqurgqloKvAz4x6r6jb77IUljNc5JtIvUMw7ah9tevmLobgxi1apVo7z3sd43eO9D3vuggb+qVgGrhuyDJI2NT/yLyNqv380rzrx86G4M4o3P2PywuveFZFtKXRtiVs+SJJ9Osj7JzUne0HcfJGnMhnji3wy8sapuSLI3sDrJyqr61wH6IkmjM8Ssno1VdUO7fS9N2YaD+u6HJI3VoGWZkywFjgKum+aYmbuS1IEhq3PuBVwEnF5V90w9buauJHVjqCJtu9EE/fOr6uIh+iBJYzXErJ4AZwPrq+p9fbcvSWM3xKyeY4FTgLVJ1rT73lRVVwzQl0XFzN0VQ3dDGoXeA39VfRZI3+1Kkhpm7i4iY8jcNaNVGl5nY/zbytBN8ntJKsl+XfVBkrS1Lp/4Z8zQTbIE+Dng9g7blyRNo7Mn/m1k6L6fZunF6qp9SdL0epnOOZmhm+RE4OtVdeM2rjFzV5I60Hngn8zQpRn+eTPwlm1dZ+auJHWj08A/TYbuU4AnATe2a+4eDNyQ5Ild9kOS9COdfbk7XYZuVa0FDpg45zZgeVV9o6t+SJIeqstZPWbobqcxZ+5K6k9ngX8uGbpVtbSr9iVJ0zNzdxF5uGbumq0rLS5DlWXeN8lHk9zSZvb+1BD9kKQxGuqJ/wPAP1TVryR5JPDogfohSaPTe+BP8hjgOcArAKrq+8D3++6HJI3VEEM9TwbuAv4myReSfCjJnlNPMnNXkroxRODfFXgm8BdVdRRwH3Dm1JPM3JWkbgwR+DcAG6rquvb9R2l+EUiSetB74K+q/wX8W5LD2l3PA/61735I0lgNNavnt4Hz2xk9XwH+00D9WFTM3JXUh0ECf1WtAZYP0bYkjd0gCVySpOFYsmERsWSDpD4MVbLh+CRfTPLlJFtN5ZQkdaf3wJ9kF+C/Ar8IPB04OcnT++6HJI3VEE/8RwNfrqqvtOUa/hY4aYB+SNIoDRH4DwL+beL9hnbfQ1iyQZK6MUTgn25xltpqhyUbJKkTQ5VsWDLx/mDgjgH6IUmjNETg/xfg0CRPajN3XwZ8fIB+SNIo9T6Pv6o2J3k9cCWwC3BOVd3cdz8WI0s2SOrDUCUbrgCuGKJtSRo7M3cXkcWWuWvGrfTw1NkYf5JzkmxKsm6aY7+XpJLs11X7kqTpdfnl7rnA8VN3JlkC/Bxwe4dtS5Jm0Fngr6prgG9Oc+j9wB8wzdx9SVL3ep3OmeRE4OtVdeMczjVzV5I60FvgT/Jo4M3AW+Zyvpm7ktSNPp/4nwI8CbgxyW00Gbs3JHlij32QpNHrbTpnVa0FDtjyvg3+y6vqG331QZLUYeBPcgGwAtgvyQbgrVV1dlftPRyYuSupD50F/qo6eRvHl3bVtiRpZmbuLiJDZ+6aqSuNQ5dDPUuADwNPBB4EzqqqDyT5O+Cw9rR9gW9V1bKu+iFJeqgun/g3A2+sqhuS7A2sTrKyqn5tywlJ3gvc3WEfJElTdDnGvxHY2G7fm2Q9zRKL/wqQJMCvAsd11QdJ0tZ6mcefZClwFHDdxO5nA3dW1a0zXGPmriR1oPPAn2Qv4CLg9Kq6Z+LQycAFM11n5q4kdaPTWT1JdqMJ+udX1cUT+3cFXgI8q8v2JUlb67Ief4CzgfVV9b4ph58P3FJVG7pqX5I0vS6f+I8FTgHWJlnT7ntTu+ziy5hlmGeszNyV1IcuZ/V8FsgMx17RVbuSpNmZubuImLkrqQ+9rrmb5N1JbklyU5JLkuzbVfuSpOn1vebuSuDwqjoC+BLwRx22L0maRq9r7lbVVVW1uX17Lc1iLJKkHvW65u4UrwQ+OdNBM3clqRuDBP4kb6Yp4nb+TOeYuStJ3eh9Vk+SU4ETgOdVVfXdviSNXa+BP8nxwB8CP1tV3+mzbUlSo9c1d2lm8TwKWNlUdODaqnpNV33Y2Zi5K6kPfa+562LrkjQwM3cXETN3JfWhy8zdJUk+nWR9kpuTvKHdb/auJA2oy+mcW9bc/XHgGOB1SZ6O2buSNKguM3c3VtUN7fa9wHrgILN3JWlYQ665C9vI3pUk7XiDrbm7rexdSzZIUjc6DfyzrLm7JXv35TNl71qyQZK60WUC17Rr7pq9K0nD6n3NXeDPMXtXkgYzxJq7V3TV5s7Okg2S+jBkPX5J0gAs2bCI9FGywbIMkros2bB7ks8nubEt2fD2dv/jkqxMcmv787Fd9UGStLUuh3ruB46rqiOBZcDxSY4BzgQ+VVWHAp9q30uSetJlyYaqqm+3b3drXwWcBJzX7j8PeFFXfZAkba3rBK5d2qmcm4CVVXUd8ISq2ghNPR/ggBmuNXNXkjrQaeCvqgeqahlNIbajkxy+HdeauStJHehlOmdVfQtYBRwP3JnkQID256Y++iBJanQ5q2f/LYusJNkDeD5wC/Bx4NT2tFOBS7vqgyRpa13O4z8QOC/JLjS/YC6sqsuSfA64MMmrgNuBl3bYh52KmbuS+tBlyYabaGrwT93/78DzumpXkjQ7M3cXETN3JfVhiMXWlyW5Nsmadrrm0V31QZK0tS6f+Lcstn5Dkr2B1UlWAn8KvL2qPpnkBe37FR32Q5I0ocsx/o3AlkSte5OsBw6iyd59THvaPsAdXfVBkrS1Xsb4pyy2fjpwZZL30Aw1/fQM15wGnAZwyCGH9NFNSRqFIRZbfy1wRlUtAc6gWZ5xK2buSlI3hlhs/VRgy/bfA365K0k96nJWz7SLrdOM6f9su30ccGtXfZAkbW2IxdZfDXwgya7A92jH8WXmrqR+DLHYOsCzumpXkjQ7M3cXka4yd83WlTRpiDV3X9q+fzDJ8q7alyRNr8sn/i1r7n67nd3z2SSfBNYBLwH+qsO2JUkz6HKMv4Ct1tytqvUAzaQfSVLfhlhzd67XuuauJHXANXclaWSGWHNXkjSgIdbclSQNaIg1d18MfBDYH7g8yZqq+oUO+7HTMHNXUh+GWHP3EuCSrtqVJM3OzN1FxMxdSX0YInP33UluSXJTkku2fA8gSepHl7N6tmTuHgksA45PcgywEji8qo4AvgT8UYd9kCRN0Vngr8Z0mbtXVdXmdv+1NHP8JUk9GTpz95XAJ2e41sxdSerAYJm7Sd4MbAbOn+FaM3clqQODZO4mORU4AXh5W8xNktST3jN3kxwP/CFwYlV9p6v2JUnTGyJz98vAo4CVbWnma6vqNR32Q5I0YYjM3ad21ebOzpINkvrQyxi/JGnxsGTDIrI9JRsswyBpvgZ54k9yRlvGYV2SC5LsPkQ/JGmMeg/8SQ4CfgdYXlWHA7sAL+u7H5I0VkON8e8K7JFkV+DRwB0D9UOSRqf3wF9VXwfeA9wObATurqqrpp5nyQZJ6sYQQz2PBU4CngT8GLBnkt+Yep4lGySpG0MM9Twf+GpV3VVVPwAuBn56gH5I0igNEfhvB45J8ug0qbvPA9YP0A9JGqXe5/FX1XVJPgrcQFOd8wvAWX33YzEyc1dSHwZJ4KqqtwJvHaJtSRo7M3cXETN3JfVhkMCf5DbgXuABYHNVLR+iH5I0RkM+8T+3qr4xYPuSNEpW55SkkRkq8BdwVZLVSU6b7gQzdyWpG0MF/mOr6pnALwKvS/KcqSeYuStJ3Zg18Cd5bpKL2xLKNyf5aJIVC220qu5of24CLgGOXuhnSpLmZsbAn+SFwDnAJ4BfB14OXAGck+QF820wyZ5J9t6yDfw8sG6+nydJ2j6zzer5feBFVXXjxL41Sa4HPkjzS2A+ngBc0i60vivwkar6h3l+1sOKmbuS+jBb4H/ilKAPNIuoJ3nCfBusqq8AR873eknSwswW+O+b5zHN02yZu2bqStpRZgv8T0ny8Wn2B3jytj44yRLgw8ATgQeBs6rqA0mOBP4S2Au4DXh5Vd2zvR2XJM3PbIH/pFmOvWcOn70ZeGNV3dB+mbs6yUrgQ8DvVdXVSV5J813Cn8y5x5KkBZkx8FfV1Qv54KraSLO0IlV1b5L1wEHAYcA17WkrgSsx8EtSb2YM/EnW0mTYTquqjphrI0mWAkcB19FM3TwRuBR4KbBkhmtOA04DOOSQQ+balCRpG2Yb6jlhRzSQZC/gIuD0qrqnHd758yRvAT4OfH+666rqLNoFWpYvXz7jLyBJ0vaZbajnawv98CS70QT986vq4vZzb6FJ2iLJ0wCnq0hSjzqr1dOup3s2sL6q3jex/4D25yOAP6aZ4SNJ6kmX9fiPBU4B1iZZ0+57E3Bokte17y8G/qbDPuxUzNyV1Ic5Bf4kewCHVNUX5/rBVfVZmjn/0/nAXD9HkrRjbTPwJ/klmnn7jwSelGQZ8H9V1Ykd9210pmbumq0rqQtzGeN/G03Z5G8BVNUaYOm2LkpyTpJNSdZN7HtckpVJbm1/PnY+nZYkzd9cAv/mqrp7Hp99LnD8lH1nAp+qqkOBT7XvJUk9mkvgX5fk14Fdkhya5IPAP2/roqq6BvjmlN0nAee12+cBL9qOvkqSdoC5BP7fBn4CuB/4CHA3cPo823tCW8phS0mHA2Y60TV3Jakbs365m2QX4ONV9Xzgzf10qWHmriR1Y9Yn/qp6APhOkn12UHt3JjkQoP25aQd9riRpjuYyj/97NElYK5lYgKWqfmce7X0cOBV4V/vz0nl8hiRpAeYS+C9vX9slyQXACmC/JBuAt9IE/AuTvAq4naY6pySpR9sM/FV13rbOmeG6k2c49Lz5fN4YWLJBUh/mkrn7Vaapy19V21x+UZK0+MxlqGf5xPbuNMMzj+umO5Kkrm1zHn9V/fvE6+tV9WfAcdu6boaSDe9IclOSNUmuSvJjC+u+JGl7bTPwJ3nmxGt5ktcAe8/hs89l65IN766qI6pqGXAZ8Jbt7bAkaWHmMtTz3ontzcBXgV/d1kVVdU271u7kvnsm3u7JLGv6SpK6MZfA/6qq+srkjiRPmm+DSd4J/CZN6YfnznKei61LUgfmUqvno3PcNydV9eaqWgKcD7x+lvPOqqrlVbV8//33n29zkqQpZnziT/IfaYqz7ZPkJROHHkMzu2ehPkKTGPbWHfBZkqQ5mm2o5zDgBGBf4Jcm9t8LvHo+jSU5tKpubd+eCNwyn8+RJM3fjIG/qi4FLk3yU1X1ue394BlKNrwgyWHAg8DXgNfMq9eSpHmby5e7X0jyOpphnx8O8VTVK2e7aIaSDWdvX/ckSTvaXL7c/e/AE4FfAK4GDqYZ7pEk7YTmEvifWlV/AtzXFmx7IfCMhTQ6XVavJKkfcwn8P2h/fivJ4cA+wNIFtnsuW2f1SpJ6MJcx/rOSPBb4E5qFVPZigaUWpsvqlST1Yy71+D/Ubl4N9FaK2cxdSerGXIq0PSHJ2Uk+2b5/eruCVqfM3JWkbsxljP9c4EpgSwnlLwGnd9QfSVLH5hL496uqC2mSrqiqzcADnfZKktSZuQT++5I8nraEcpJjaCprzlub1fs54LAkG/oYOpIkNeYyq+d3aWbzPCXJPwH7A7+ykEZnWYhdktSx2apzHlJVt1fVDUl+lqZoW4AvVtUPZrpO22fpmZf/cPvc4/ccsCeSxmK2oZ6PTWz/XVXdXFXr5hr0kyxJ8ukk65PcnOQNE8d+O8kX2/1/Os++S5LmYbahnkxsz2f+/mbgje2/GPYGVidZCTwBOAk4oqruT3LAPD5bkjRPswX+mmF7TqpqI7Cx3b43yXrgIJpa/u+qqvvbY5u297MlSfM321DPkUnuSXIvcES7fU+Se5PcM8t1W2nLMxwFXAc8DXh2kuuSXJ3kJ2e45rQk1ye5/q677tqe5iRJs5htIZZddkQDSfYCLgJOr6p7kuwKPBY4BvhJ4MIkT66qh/yroqrOAs4CWL58+Xb/i0OSNL25zOOftyS70QT986vq4nb3BuDianyeJjFsvy77IUn6kc4Cf5LQrLi1vqreN3HoY8Bx7TlPAx4JfKOrfkiSHmouCVzzdSxwCrA2yZp235uAc4Bz2kVYvg+cOnWYZ0xue9cLf7i9atWq4ToiaTQ6C/xV9VkeOiV00m901a4kaXZdPvFrDszcldS3Tr/cnU6S3ZN8PsmNbebu2/vugySN2RBP/PcDx1XVt9tZP59N8smqunaAvkjS6PQe+Nsvcr/dvt2tfY32y11J6lvvQz0ASXZpZ/psAlZW1XXTnGPmriR1YJDAX1UPVNUy4GDg6CSHT3OOa+5KUgcGCfxbVNW3gFXA8UP2Q5LGZIhZPfsn2bfd3gN4PnBL3/2QpLEaYlbPgcB5SXah+cVzYVVdNkA/FgUzdyX1bYhZPTfRlGiWJA1g0DF+SVL/LNnQs8kSDVNZskFSH7osy3xOkk1tFc7J/S60LkkD6nKo51ymTNNM8lx+tND6TwDv6bB9SdI0Ogv8VXUN8M0pu1+LC61L0qD6/nJ3TgutgyUbJKkrfQf+yYXWf59mofVpF2uxZIMkdaPvwO9C65I0sL4D/8dwoXVJGlRn8/iTXACsAPZLsgF4Ky60/pASDVNZskFSH7pcbP3kGQ650LokDcjM3Z6ZuStpaL1m7iZ5W5KvJ1nTvl7QVfuSpOn1mrnben9VLWtfV3TYviRpGn1n7kqSBjZEWebXJ7mpHQp67EwnmbkrSd3oO/D/BfAUYBmwEXjvTCeauStJ3eg18FfVnVX1QFU9CPw1cHSf7UuSeg78SQ6cePtiYN1M50qSutF35u6KJMuAAm4Dfqur9hcrM3clDa3vzN2zu2pPkjQ3Zu72zMxdSUPrO3P37yaydm9Lsqar9iVJ0+vyif9c4L8AH96yo6p+bct2kvcCd3fYviRpGl2O8V+TZOl0x9pVt36Vtja/JKk/Q2TuAjwbuLOqbp3pBDN3JakbQwX+k4ELZjvBzF1J6kbvs3qS7Aq8BHhW321LkoZ54n8+cEtVbRigbUkavV4zd6vqbOBlbGOY5+HMzF1JQ+t9zd2qekVXbUqSts3M3Y7Nlqk7lZm7kvrQd+busiTXtpm71yexLLMk9azvNXf/FHh7VS0D3tK+lyT1qO81dwt4TLu9D3BHV+1LkqbX9xj/6cCVSd5D80vnp2c6MclpwGkAhxxySC+dk6Qx6Hse/2uBM6pqCXAGs9TnN3NXkrrRd+A/Fbi43f57XHNXknrXd+C/A/jZdvs4YMYibZKkbvS95u6rgQ+09Xq+RzuG/3A2W6buVGbuSupD75m7WJxNkgZl5m7HzNyVtNh0mbm7JMmnk6xPcnOSN7T735bk6xNr776gqz5IkrbW5RP/ZuCNVXVDkr2B1UlWtsfeX1Xv6bBtSdIMuhzj3whsbLfvTbIeOKir9iRJc9PLdM520fWjgOvaXa9PclNbyO2xffRBktToPPAn2Qu4CDi9qu4B/gJ4CrCM5l8E753hOhdbl6QOdBr4k+xGE/TPr6qLAarqzqp6oKoeBP6aGbJ3LdkgSd3oclZPaGrxrK+q903sP3DitBcD66ZeK0nqTpezeo4FTgHWJlnT7nsTcHKSZTQlmm8DfqvDPkiSpuhyVs9ngUxz6Iqu2lyMLNkgabHpu0ibJGlglmzomCUbJC02Q5RseEc7h39NkquS/FhXfZAkba3LoZ4tJRt+HDgGeF2SpwPvrqoj2gXXL6NZdF2S1JMuF1vfWFU3tNv3AuuBg9okri32pJndI0nqSS9j/FNLNiR5J/CbwN3Ac2e4xsXWJakDQ5RsoKre3C64fj7w+umuM3NXkrrRe8mGKT4C/HKXfZAkPdQQJRsOnTjtROCWrvogSdraECUbXpXkMOBB4GvAazrsw+DM3JW02FiyQZJGxszdDmxPtu4kM3cl9WGIzN3HJVmZ5Nb2pytwSVKPhsjcPRP4VFUdCnyqfS9J6knvmbvAScB57WnnAS/qqg+SpK0Nsdj6E6pqIzS/HIADZrjGNXclqQODZO7OhZm7ktSNITJ379yy7m77c1OXfZAkPVTvmbvAx4FT2+1TgUu76oMkaWtDZO6+C7gwyauA24GXdtiHQWxPtu4kM3cl9WGIzF2A53XVriRpdmbu7gDzzdSdysxdSX3ocox/9ySfT3Jjm7n79na/a+5K0oC6nNVzP3BcVR0JLAOOT3IMrrkrSYPqcoy/gG+3b3drX+Wau5I0rE7H+JPsAqwGngr816pyzV1JGlinCVxV9UA7pHMwcHSSw9v9rrkrSQPppVZPVX0LWAUcP+WQa+5KUs+6nNWzf5J92+09gOcDt7jmriQNq8sx/gOB89px/kcAF1bVZUkuerituTvfTN2pzNyV1IcuZ/XcRFOKeep+h3YkaUBm7s7TjsrWnWTmrqQ+dDnGf06STUnWTex7aZvF+2CS5V21LUmaWZezes5l61k864CXANd02K4kaRZdjvFf0y65OLlvPUBTql+SNIRe5vHPh2vuSlI3Fm3gN3NXkrqxaAO/JKkbBn5JGpkup3NeAHwOOCzJhiSvSvLiJBuAnwIuT3JlV+1LkqbX5ayek2c4dElXbfZpR5VpmGTJBkl9cKhHkkbGkg0TuijDsD0s2SCpD4M88Sd5Q5J1bfmG04fogySNVe+Bv12F69XA0cCRwAlTavRLkjo0xBP/jwPXVtV3qmozcDXw4gH6IUmjNETgXwc8J8njkzwaeAGwZOpJlmyQpG70HvjbQm3/L7AS+AfgRmDzNOdZskGSOjDIl7tVdXZVPbOqngN8E7h1iH5I0hgNMp0zyQFVtSnJITT1+X9qiH5I0hgNNY//oiSPB34AvK6q/v+B+vEQXWTjbg8zdyX1YZDAX1XPHqJdSZKZuw9h5q6kMRgigeuwJGsmXveYvStJ/en9ib+qvggsA0iyC/B1HiYVOyVpZzB0dc7nAf+zqr42cD8kaTSGDvwvAy6Y7oCZu5LUjcECf5JHAicCfz/dcTN3JakbQz7x/yJwQ1XdOWAfJGl0hgz8JzPDMI8kqTtDlWx4NPBzwG8N0f5MzNyVNAZDZe5+B3j8EG1L0tiNOnN36EzdqczcldSHzsb4kyxJ8ukk69u1dd/Q7n9p+/7BJMu7al+SNL0un/g3A2+sqhuS7A2sTrKSZgWulwB/1WHbkqQZdBb4q2ojsLHdvjfJeuCgqloJkKSrpiVJs+hlOmeSpcBRwHXbcY2Zu5LUgc4Df5K9gIuA06vqnrleZ+auJHWj08CfZDeaoH9+VV3cZVuSpLnpclZPgLOB9VX1vq7akSRtny5n9RwLnAKsTbKm3fcm4FHAB4H9gcuTrKmqX+iwHzMaOlN3KjN3JfWhy1k9nwVmmrrjwiuSNJCHfebuYsvOnY2Zu5L60OUY/zlJNiVZN7HvyCSfS7I2ySeSPKar9iVJ0+tyVs+5wPFT9n0IOLOqnkEz3PP7HbYvSZpGZ4G/qq4Bvjll92HANe32SuCXu2pfkjS9vhdiWUez3CLAS4ElM51o5q4kdaPvwP9K4HVJVgN7A9+f6UQzdyWpG73O6qmqW4CfB0jyNGBxTaSXpBHo9Yk/yQHtz0cAfwz8ZZ/tS5K6nc55AfA54LAkG5K8Cjg5yZeAW4A7gL/pqn1J0vS6zNw9eYZDH+iqzekstrIMs7Fkg6Q+9P3lriRpYAZ+SRoZA78kjYyBX5JGxsAvSSNj4JekkTHwS9LIGPglaWQM/JI0MgZ+SRoZA78kjYyBX5JGxsAvSSNj4JekkTHwS9LIGPglaWRSVUP3YZuS3AV8beh+9GA/4BtDd2IgY733sd43eO993Pt/qKr9p+7cKQL/WCS5vqqWD92PIYz13sd63+C9D3nvDvVI0sgY+CVpZAz8i8tZQ3dgQGO997HeN3jvg3GMX5JGxid+SRoZA78kjYyBvydJjk/yxSRfTnLmNMeT5M/b4zcleeZcr13MFnjf5yTZlGRdv73eMeZ770mWJPl0kvVJbk7yhv57vzALuPfdk3w+yY3tvb+9/97P30L+e2+P75LkC0ku67SjVeWr4xewC/A/gScDjwRuBJ4+5ZwXAJ8EAhwDXDfXaxfrayH33R57DvBMYN3Q99Lz3/mBwDPb7b2BL+0sf+c74N4D7NVu7wZcBxwz9D11fd8Tx38X+AhwWZd99Ym/H0cDX66qr1TV94G/BU6acs5JwIercS2wb5ID53jtYrWQ+6aqrgG+2WuPd5x533tVbayqGwCq6l5gPXBQn51foIXce1XVt9tzdmtfO8sMlAX9957kYOCFwIe67qiBvx8HAf828X4DW/+PPNM5c7l2sVrIfe/sdsi9J1kKHEXz5LuzWNC9t8Mda4BNwMqq2lnufaF/538G/AHwYEf9+yEDfz8yzb6pTzEznTOXaxerhdz3zm7B955kL+Ai4PSqumcH9q1rC7r3qnqgqpYBBwNHJzl8x3avM/O+7yQnAJuqavWO79bWDPz92AAsmXh/MHDHHM+Zy7WL1ULue2e3oHtPshtN0D+/qi7usJ9d2CF/71X1LWAVcPwO72E3FnLfxwInJrmNZojouCT/o7OeDv2FyBhewK7AV4An8aMvfX5iyjkv5KFf+nx+rtcu1tdC7nvi+FJ2zi93F/J3HuDDwJ8NfR8D3Pv+wL7t9h7AZ4AThr6nru97yjkr6PjL3V3n+PtBC1BVm5O8HriS5pv/c6rq5iSvaY//JXAFzTf+Xwa+A/yn2a4d4Da220LuGyDJBTT/E+yXZAPw1qo6u9+7mJ8F3vuxwCnA2nasG+BNVXVFj7cwbwu89wOB85LsQjMicWFVdTu1cQdZ6H/vfbJkgySNjGP8kjQyBn5JGhkDvySNjIFfkkbGwC9JI2Pg14ySPJBkTZJ1Sf4+yaMXQZ+WJvn1iffLk/z5Dvrs25LstyM+a47t7Zvk/+yrvWnaT5J/TPKY9s91qyqoSVYkqSS/NLHvsiQr2u1VSa6fOLY8yap2+xlJzu36PrT9DPyazXerallVHQ58H3jNXC5K0mV+yFLgh4G/qq6vqt/psL1OtPPU9wUGC/w088lvrG2Xg9gAvHmW4wck+cWpO6tqLXBwkkMW0Ed1wMCvufoM8NQke7Z18v+lrRt+EkCSV7T/KvgEcFX7/mNJPpHkq0len+R322uuTfK49rpVSZa32/u1Ketbnuw/k+SG9vXTbT/eBTy7/ZfIGe0T6WXtNY9r27ypbeOIdv/b2j6vSvKVJLP+omjbviXJh9p/7Zyf5PlJ/inJrUmOnvjc/94+Nd+a5NXt/iR5d3vt2iS/1u5fkabO/keAte29PKW9l3cn2SvJp9r7XTvxZ7s0TW3+v05To/6qJHu0x56a5P9LU7/+hiRPaff/fvt3dFNmrmn/cuDSae7/ye3f00+2u24E7k7yczN8zruBP57h2CeAl832560BDJ3m7GvxvoBvtz93pQkQrwX+M/Ab7f59aWrF7wm8gubJ8HHtsVfQZCfuTZOGfzfwmvbY+2kKj0FTi2V5u70fcFu7/Whg93b7UOD6dnsFE+nsk++BD9Jk9wIcB6xpt98G/DPwqLaNfwd2m+Z+b2uPLwU2A8+geThaDZxDk2Z/EvCxic+9kaa0wH40VRd/DPhlYCVN9uYTgNtpMlJXAPcBT2qvX8pEOYr2z/kxE38WX27b3NKfZe2xCyf+Dq4DXtxu797+uf08zWLeaft/GfCcae73a8Dek30BDgO+MNHWivb6ZwNXt/suA1ZM/v0B/wg8t91eNdHGscAnhv5v2ddDX5Zs0Gz2yI9KBnwGOJsmgJ6Y5Pfa/bsDW/4pv7KqJuvnf7qaevL3Jrmb5ukPmqfdI7bR9m7Af0myDHgAeNoc+vszNEGXqvrHJI9Psk977PKquh+4P8kmmoC8YZbP+mo1QxUkuRn4VFVVkrU0QXKLS6vqu8B3k3yapib7zwAXVNUDwJ1JrgZ+EriHpjbLV2doM8B/TvIcmtK8B7X93NKfNe32amBpkr2Bg6rqkvaev9f29+dpgv8X2vP3ovnlec2U9h7X/v1ssT/NL/hfrillQarqM0lI8uwZ+v5/0zz1/+GU/ZtofhlqETHwazbfraY87g8lCU1g+OKU/f8HzdPspPsnth+ceP8gP/pvbzM/GnLcfeL8M4A7gSPb49+bQ39nK4s72ZcH2PZ/+3Pp++TnT76frh9bTP0zmvRymuD7rKr6QTvsteXPZGr/95ilnQD/T1X91SxtAWxO8oiq2lL//W6af7UcC0xXD+qdNGP9m6ceaH/RvoOm8Nik3YHvbqMf6plj/NpeVwK/3f4CIMlRC/y824Bntdu/MrF/H2BjG5ROoRk2AbiXZvhoOtfQBE/SzDr5RnVfx/6kNOvEPp5mWORf2n78WpoFRfanWULy89NcO/Ve9qGpyf6DJM8F/sNsDbf3tiHJiwCSPCrNzKsrgVemqedPkoOSHDDNR3yRZpnALb4PvAj4zUzMnJpo7yrgsTS/jKfzTpqFRCY9jWYISYuIgV/b6x00wzA3pZn+944Fft57gNcm+Weace0t/htwapJraYLHliflm2ieVG9McsaUz3obsDzJTTRfnJ66wL7NxeeBy4FrgXdU1R3AJW0/b6QZ+/6DqvpfUy+sqn8H/qn9EvjdwPlt/6+n+QV2yxzaPwX4nfae/xl4YhugPwJ8rh2a+ijT/7K8nOaX1WSf7gNOAM7Y8uXyFO+kqSG/lWqqh941Zfdz23a0iFidU5qnJG+j+QL8PUP3ZT7SrPX64aqaabbOQj//UcDVwM9U1VbDQxqOT/zSSFXVRuCvkzymoyYOAc406C8+PvFL0sj4xC9JI2Pgl6SRMfBL0sgY+CVpZAz8kjQy/xsXV/FNXakOwQAAAABJRU5ErkJggg==\n",
379 | "text/plain": [
380 | ""
381 | ]
382 | },
383 | "metadata": {
384 | "needs_background": "light"
385 | },
386 | "output_type": "display_data"
387 | }
388 | ],
389 | "source": [
390 | "sorted_idx = knn_perm.importances_mean.argsort()\n",
391 | "fig, ax = plt.subplots(figsize=(6, 7))\n",
392 | "ax.barh(ion_data.columns[sorted_idx], knn_perm.importances_mean[sorted_idx], zorder=1)\n",
393 | "ax.xaxis.grid(True)\n",
394 | "ax.set_ylabel(\"Feature ID\")\n",
395 | "ax.set_xlabel(\"Permuation Importance (kNN)\");\n",
396 | "ax.figure.savefig(\"PIscores.png\")"
397 | ]
398 | },
399 | {
400 | "cell_type": "markdown",
401 | "id": "630ba84e",
402 | "metadata": {},
403 | "source": [
404 | "Build a mask representing features with an FI score > 0. \n",
405 | "Create a reduced version of the dataset with just these features. "
406 | ]
407 | },
408 | {
409 | "cell_type": "code",
410 | "execution_count": 10,
411 | "id": "673cbf5c",
412 | "metadata": {},
413 | "outputs": [],
414 | "source": [
415 | "mask = knn_perm.importances_mean[sorted_idx]>0"
416 | ]
417 | },
418 | {
419 | "cell_type": "code",
420 | "execution_count": 11,
421 | "id": "fdbade71",
422 | "metadata": {},
423 | "outputs": [],
424 | "source": [
425 | "PI_features = ion_data.columns[sorted_idx][mask]"
426 | ]
427 | },
428 | {
429 | "cell_type": "code",
430 | "execution_count": 12,
431 | "id": "328a9069",
432 | "metadata": {},
433 | "outputs": [
434 | {
435 | "name": "stdout",
436 | "output_type": "stream",
437 | "text": [
438 | "Permutation Importance selects 18 features.\n"
439 | ]
440 | }
441 | ],
442 | "source": [
443 | "X_PI = X[PI_features]\n",
444 | "print(\"Permutation Importance selects\", X_PI.shape[1], \"features.\")\n",
445 | "X_PI_scaled = preprocessing.MinMaxScaler().fit_transform(X_PI)\n",
446 | "X_PI_train, X_PI_test, y_PI_train, y_PI_test = train_test_split(X_PI_scaled, \n",
447 | " y, random_state=2, \n",
448 | " test_size=1/2)"
449 | ]
450 | },
451 | {
452 | "cell_type": "code",
453 | "execution_count": 14,
454 | "id": "a156f39e",
455 | "metadata": {},
456 | "outputs": [
457 | {
458 | "name": "stdout",
459 | "output_type": "stream",
460 | "text": [
461 | "X_Val on training PI features: 0.880\n",
462 | "Hold Out testing PI features: 0.847\n"
463 | ]
464 | }
465 | ],
466 | "source": [
467 | "knnPI = KNeighborsClassifier(n_neighbors=3)\n",
468 | "knnPI = knnPI.fit(X_PI_train,y_PI_train)\n",
469 | "\n",
470 | "y_pred_PI = knnPI.predict(X_PI_test)\n",
471 | "acc_PI = accuracy_score(y_pred_PI,y_test)\n",
472 | "cv_acc_PI = cross_val_score(knnPI, X_PI_train, y_train, cv=8)\n",
473 | "\n",
474 | "res_df.loc['After PI']['X-Val']=cv_acc_PI.mean()\n",
475 | "res_df.loc['After PI']['Hold-Out']=acc_PI\n",
476 | "\n",
477 | "\n",
478 | "print(\"X_Val on training PI features: {0:.3f}\".format(cv_acc_PI.mean())) \n",
479 | "print(\"Hold Out testing PI features: {0:.3f}\".format(acc_PI)) "
480 | ]
481 | },
482 | {
483 | "cell_type": "markdown",
484 | "id": "a054d6f0",
485 | "metadata": {},
486 | "source": [
487 | "## Wrapper stage \n",
488 | "Run the Wrapper process using feature backward elimination. \n",
489 | "The `transform` method of the Wrapper object (`sfs`) will transform the dataset to include just the features selected by the Wrapper. "
490 | ]
491 | },
492 | {
493 | "cell_type": "code",
494 | "execution_count": 25,
495 | "id": "2666f71b",
496 | "metadata": {},
497 | "outputs": [],
498 | "source": [
499 | "from sklearn.feature_selection import SequentialFeatureSelector\n",
500 | "from sklearn.neighbors import KNeighborsClassifier\n",
501 | "knn = KNeighborsClassifier(n_neighbors=3)\n",
502 | "sfs = SequentialFeatureSelector(knn, n_features_to_select='auto', direction = 'backward',\n",
503 | " cv = 10, tol = 0.00001\n",
504 | " )\n",
505 | "sfs.fit(X_PI_train, y_train)\n",
506 | "X_PI_W_train = sfs.transform(X_PI_train)\n",
507 | "X_PI_W_test = sfs.transform(X_PI_test)"
508 | ]
509 | },
510 | {
511 | "cell_type": "code",
512 | "execution_count": 26,
513 | "id": "8d6aa023",
514 | "metadata": {},
515 | "outputs": [
516 | {
517 | "name": "stdout",
518 | "output_type": "stream",
519 | "text": [
520 | "(175, 16)\n"
521 | ]
522 | },
523 | {
524 | "data": {
525 | "text/plain": [
526 | "0.8465909090909091"
527 | ]
528 | },
529 | "execution_count": 26,
530 | "metadata": {},
531 | "output_type": "execute_result"
532 | }
533 | ],
534 | "source": [
535 | "knnPIW = KNeighborsClassifier(n_neighbors=3)\n",
536 | "knnPIW = knnPIW.fit(X_PI_W_train,y_PI_train)\n",
537 | "print(X_PI_W_train.shape)\n",
538 | "knnPIW.score(X_PI_W_test,y_PI_test)"
539 | ]
540 | },
541 | {
542 | "cell_type": "code",
543 | "execution_count": 27,
544 | "id": "856da3aa",
545 | "metadata": {},
546 | "outputs": [
547 | {
548 | "name": "stdout",
549 | "output_type": "stream",
550 | "text": [
551 | "(175, 16)\n",
552 | "X_Val on training all features: 0.897\n",
553 | "Hold Out testing all features: 0.847\n"
554 | ]
555 | }
556 | ],
557 | "source": [
558 | "knnPIW = KNeighborsClassifier(n_neighbors=3)\n",
559 | "knnPIW = knnPIW.fit(X_PI_W_train,y_PI_train)\n",
560 | "print(X_PI_W_train.shape)\n",
561 | "\n",
562 | "y_pred_PIW = knnPIW.predict(X_PI_W_test)\n",
563 | "acc_PIW = accuracy_score(y_pred_PIW,y_PI_test)\n",
564 | "cv_acc_PIW = cross_val_score(knnPIW, X_PI_W_train, y_train, cv=8)\n",
565 | "\n",
566 | "res_df.loc['After Wrapper']['X-Val']=cv_acc_PIW.mean()\n",
567 | "res_df.loc['After Wrapper']['Hold-Out']=acc_PIW\n",
568 | "\n",
569 | "\n",
570 | "print(\"X_Val on training all features: {0:.3f}\".format(cv_acc_PIW.mean())) \n",
571 | "print(\"Hold Out testing all features: {0:.3f}\".format(acc_PIW)) "
572 | ]
573 | },
574 | {
575 | "cell_type": "markdown",
576 | "id": "23b472a2",
577 | "metadata": {},
578 | "source": [
579 | "## Plot all the results"
580 | ]
581 | },
582 | {
583 | "cell_type": "code",
584 | "execution_count": 28,
585 | "id": "4e2bf3f2",
586 | "metadata": {},
587 | "outputs": [
588 | {
589 | "data": {
590 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAnz0lEQVR4nO3de5xVdb3/8ddbLg4CamqNCiqTefJSMOoIWpaDVzQVzQtgiVpG1NHzs6TCOiHqOV7rZKVG1FGyY+IlMVS8pZGXNAEdL6gUiemImnhBULnp5/fHWoObzZ6ZPcOsPTDr/Xw89oO91vqu9f2u/WXPZ6/vd63vVxGBmZnl10adXQAzM+tcDgRmZjnnQGBmlnMOBGZmOedAYGaWcw4EZmY550BguSJpiqT/yuC4kyT9sKOPa1YJDgQ5I+l5Se9JWlrw2rYDjnlgR5WxjPw2l3SlpFckLZH0N0nfq2D+J0t6oHBdRIyNiPMyym9PSXPSuvqbpEPK3K9G0geSrsiiXNZ1OBDk0xER0afgtbAzCyOpext3+QnQB9gF2Aw4EvhHR5drPXIZcDuwKXAI0FjmfqOBN4GRkjbOqGwlSepWyfxs3TgQGACSNpP0v5JelvSSpP9q+jJL2lHSvZJel7RI0jWSNk+3/RbYHrgl/cX6XUn1khqLjr/6qkHSREk3Svo/SW8DJ7eUfwl7Ab+LiDcj4oOIeDYibizIa2dJd0t6Q9I8Sce3cN6HS2qQ9Jakv0gaWLBtO0k3SXotPffLJO0CTAL2Sc/3rTTtGk1Okr4maX5ahumFV12SQtJYSX+X9KakyyWphepZBfwzPdcFETG3hbSFRgP/CawEjig67+Hpeb8t6R+ShqXrt5B0laSFadluTtevdRWUnscnCs7/F5JmSHoHGCrpC5IeS/N4UdLEov33TT/zt9LtJ0vaS9KrhT8OJB0jqaHMc7b2iAi/cvQCngcOLLH+ZuCXQG/gY8AjwNfTbZ8ADgI2Bj4K3Adc2twxgXqgsbl8gYkkf5yOIvkx0qul/EuU9dfAXOAUYKeibb2BF9Nt3YE9gEXAbun2KcB/pe/3AP4FDAG6ASel5dw4XX6c5OqjN1AF7JvudzLwQFG+hcfdP81zj/RYPwfuK0gbwK3A5iRB9DVgWAt19mOSX/a7t6GePwcsBz6S5j+9YNtgYHFapxsB/YCd0223Adel+/UA9mvhnAP4RMH5LwY+mx6zKv1/8Ol0eSDwKnBUmn57YAkwKs1nS6A23fY0cGhBPtOAMzv7u9OVX51eAL8qXOHJH7qlwFvp62agOv2j0asg3SjgT80c4yjgsaJjtjUQFP5hbGv+vYDvA3NIAsr8pj8cwAjg/qL0vwTOTt9P4cM/2L8AzitKOw/YD9gn/QPdvUT+pf4oFh73f4GLC7b1Scs5IF0O0qCSLl8PjG/mXEcCjwLDSJqEdk/XHwTMaaGefw3cnL7fJ83/YwWfx09K7LMN8AHwkTLPuTgQXN3K/71Lm/IFzgKmNZPue8A16fstgHeBbTr7u9OVX21tm7Wu4aiI+GPTgqTBJL/KXi5oodiI5Jc1kj4G/IzkV2bfdNub61iGFwve79BS/sUi4j3gfOB8SZsC44EbJG2fHmtIU5NNqjvw2xKH2gE4SdLpBet6AtsC75M0x6xq43mR7v9oQXmXSnqd5Jf38+nqVwrSv0sSLEr5f8BlEXGHpLHAHWkzzmeAP5baQVIv4Djg1DT/hyS9AJxA8sd4O2BGiV23A96IiPbW7Rr1JWkIcCHwKZLPdWPghoK8muvX+T/gGUl9gONJAvvL7SyTlcF9BAbJF3g5sFVEbJ6+No2I3dLtF5D8+hsYEZsCXwYK27SLh7B9B9ikaSFt6/9oUZrCfVrLv1kR8TZJUOgN1KTH+nPBcTaPpEP8G82c938Xpd0kIq5Nt23fTEd2a0P2LiQJMgBI6k3S9PFSa+dTQneSPgIi4lbg28BdJL/Q/6eZfY4m6Vi+QsmdVa+QBKHR6fYXgR1L7PcisEVT/0+R4jrdukSa4s/ld8B0YLuI2Iykb6Xp/01zZSAiXgIeSs/jREoHcetADgRG+mvrLuDHkjaVtFHaQbxfmqQvaXOSpH7Ad4oO8Srw8YLlvwFVaWdhD5IOy2bvWikj/zVI+mHaqdhTUhXJr+a3SJp1bgX+TdKJknqkr73STt5ivwLGShqiRO+0zH1J+iheBi5M11dJ+mzB+faX1LOZU/odcIqkWiV365wP/DUinm/uM2jBDcAESYMkbUTy2b7Hh/0WpZwEXEnSPl+bvj4L1Er6NEnT1SmSDkg/636Sdk7r4XaSAPKR9LP7fHrMx4Hd0nOqImnea01fkiuMZelV5wkF264BDpR0vKTukraUVFuw/Wrgu+k5TCsjL1sHDgTWZDTJ5fvTJM0+N5K0GQOcQ9LxuZikM/Gmon0vAP4zvftjXEQsBr5J0k79EsmvydZueWwp/2IBXEXSIbuQpL38CxGxNCKWAAeTtK0vJGmCuYgSgSgiZgNfI7k9802SvoaT023vk9xp8wnghbT8I9Jd7yXprH5F0qISx70H+CHwe5JgsmNanvb4Eckf9WnAGyRNdGOB3wC3SdqsMHEaqA8g6cx/peA1B7gDOCkiHiHpTP8JSZ3+mQ+vYE4k6U94lqQj/Yz0nP4GnEvSHPV3YI07iJrxTeBcSUuACSR9IaTHewE4DDgzPa8GYFDBvtPSMk2LiHfKyMvWgSI8MY2ZrX8k/YPkzrGSfSHWcXxFYGbrHUnHkFz53dvZZckD3zVkZusVSTOBXYETI+KDTi5OLrhpyMws59w0ZGaWcxtc09BWW20VAwYM6OxiZOadd96hd+/enV0MayfX34arq9fdnDlzFkVE8fM8wAYYCAYMGMDs2bM7uxiZmTlzJvX19Z1dDGsn19+Gq6vXnaR/NrfNTUNmZjnnQGBmlnMOBGZmOedAYGaWcw4EZmY550BgZpZzDgRmZjnnQGBmlnMOBGZmOedAYGaWcw4EZmY550BgZpZzDgRmZjnnQGBmlnMOBGZmOedAYGaWcw4EZmY5l2kgkDRM0jxJ8yWNL7H9I5KmSXpC0iOSPpVleczMbG2ZBQJJ3YDLgUOBXYFRknYtSvZ9oCEiBgKjgZ9mVR4zMystyyuCwcD8iHguIlYAU4HhRWl2Be4BiIhngQGSqjMsk5mZFcly8vp+wIsFy43AkKI0jwNfBB6QNBjYAegPvFqYSNIYYAxAdXU1M2fOzKjInW/p0qVd+vy6OtffhivPdZdlIFCJdVG0fCHwU0kNwJPAY8CqtXaKmAxMBqirq4v6+voOLej6ZObMmXTl8+vqXH8dZ8D42yqa35RhfXJbd1kGgkZgu4Ll/sDCwgQR8TZwCoAkAQvSl5mZVUiWfQSzgJ0k1UjqCYwEphcmkLR5ug3gVOC+NDiYmVmFZHZFEBGrJJ0G3Al0A66MiLmSxqbbJwG7AFdLeh94GvhqVuUxM7PSsmwaIiJmADOK1k0qeP8QsFOWZbB8q3w7c++K5mfWEfxksZlZzjkQmJnlnAOBmVnOORCYmeWcA4GZWc45EJiZ5Vymt4+a5c7LDTCxeGzFDE1cXLm8uroc152vCMzMcs6BwMws5xwIzMxyzoHAzCznHAjMzHLOdw21woOWmVlXl+kVgaRhkuZJmi9pfIntm0m6RdLjkuZKOiXL8piZ2doyCwSSugGXA4eSTFI/StKuRcn+HXg6IgYB9cCPCyaqMTOzCsjyimAwMD8inouIFcBUoPhpjQD6ptNU9gHeoMScxWZmlh1FFM8n30EHlo4FhkXEqenyicCQiDitIE1fkukrdwb6AiMiYq1GeUljgDEA1dXVe06dOjWTMpfy5EuVffqvZrNu9OnTp6J5dmUVr79ey+mzfGHrCTvKNrWVy6vCXHcda+jQoXMioq7Utiw7i1ViXXHUOQRoAPYHdgTulnR/8bzFETEZmAxQV1cX9fX1HV7Y5pxc6c7iQX+nfvbZlctwPXrMPQuVr78F1M+rYP2N6rr157qrnCybhhqB7QqW+wPF4fYU4KZIzAcWkFwdmJlZhWQZCGYBO0mqSTuAR5I0AxV6ATgAQFI18EnguQzLZGZmRTJrGoqIVZJOA+4EugFXRsRcSWPT7ZOA84Apkp4kaUr6XkQsyqpMZma2tkwfKIuIGcCMonWTCt4vBA7OsgxmZtYyDzFhZpZzDgRmZjnnQGBmlnMOBGZmOedAYGaWcw4EZmY550BgZpZzDgRmZjnnQGBmlnMOBGZmOedAYGaWcw4EZmY519mT139HUkP6ekrS+5K2yLJMZma2pk6dvD4iLomI2oioBc4C/hwRb2RVJjMzW1tnT15faBRwbYblMTOzErIMBP2AFwuWG9N1a5G0CTAM+H2G5TEzsxI6e/L6JkcADzbXLCRpDDAGoLq6mpkzZ3ZIActx5qdXVSwvgKUbb8vMT55TuQwr+Fl2Btffhst1VzlZBoJyJq9vMpIWmoUiYjIwGaCuri7q6+s7qIitO3n8bRXLC2DKoAXUzzu7chmOWly5vDqB62/D5bqrnM6evB5JmwH7AX/IsCxmZtaMzp68HuBo4K6IeCerspiZWfM6dfL6dHkKMCXLcpiZWfP8ZLGZWc45EJiZ5ZwDgZlZzjkQmJnlnAOBmVnOORCYmeWcA4GZWc45EJiZ5ZwDgZlZzjkQmJnlnAOBmVnOORCYmeWcA4GZWc5lGggkDZM0T9J8SeObSVMvqUHSXEl/zrI8Zma2tsyGoZbUDbgcOIhktrJZkqZHxNMFaTYHrgCGRcQLkj6WVXnMzKy0LK8IBgPzI+K5iFgBTAWGF6U5AbgpIl4AiIh/ZVgeMzMrQRHNzSe/jgeWjiX5pX9qunwiMCQiTitIcynQA9gN6Av8NCKuLnGswsnr95w6dWomZS7lyZcqO69oTa/l9Fne3NTOGdimtnJ5dQLX34bLddexhg4dOici6kpty3KGMpVYVxx1ugN7AgcAvYCHJD0cEX9bYydPXp+d9WgC7Sy4/jZcrrvKabVpSNLhktrThNQIbFew3B8oDreNwB0R8U5ELALuAwa1Iy8zM2uncv7AjwT+LuliSbu04dizgJ0k1UjqmR5nelGaPwCfk9Rd0ibAEOCZNuRhZmbrqNWmoYj4sqRNgVHAVZICuAq4NiKWtLDfKkmnAXcC3YArI2KupLHp9kkR8YykO4AngA+AX0fEU+t+WmZmVq6y+ggi4m1Jvydpxz8DOBr4jqSfRcTPW9hvBjCjaN2kouVLgEvaWG4zM+sg5fQRHCFpGnAvyR0+gyPiUJK2/HEZl8/MzDJWzhXBccBPIuK+wpUR8a6kr2RTLDMzq5RyAsHZwMtNC5J6AdUR8XxE3JNZyczMrCLKuWvoBpKO3Cbvp+vMzKwLKCcQdE+HiAAgfd8zuyKZmVkllRMIXpN0ZNOCpOHAouyKZGZmlVROH8FY4BpJl5EMG/EiMDrTUpmZWcWU80DZP4C9JfUhGaSu2YfIzMxsw1PWA2WSvkAyQmiVlIwlFxHnZlguMzOrkHIeKJsEjABOJ2kaOg7YIeNymZlZhZTTWfyZiBgNvBkR5wD7sOaoomZmtgErJxAsS/99V9K2wEqgJrsimZlZJZXTR3BLOrfwJcCjJJPL/CrLQpmZWeW0eEWQTkhzT0S8FRG/J+kb2DkiJpRzcEnDJM2TNF/S+BLb6yUtltSQvso6rpmZdZwWrwgi4gNJPybpFyAilgPLyzmwpG7A5cBBJDORzZI0PSKeLkp6f0Qc3uaSm5lZhyinj+AuSceo6b7R8g0G5kfEc+mwFFOB4W0uoZmZZUoRxfPJFyWQlgC9gVUkHccCIiI2bWW/Y4FhEXFqunwiMCQiTitIUw/8nuSKYSEwLiLmljjWGGAMQHV19Z5Tp04t8/TW3ZMvVXaC6Zpey+mzvHhq5wxtU1u5vDqB62/D5brrWEOHDp0TEXWltpXzZHHfduZb6gqiOOo8CuwQEUslHQbcDOxUogyTgckAdXV1UV9f384itd3J42+rWF4AUwYtoH7e2ZXLcFRlv2yV5vrbcLnuKqfVQCDp86XWF09UU0Ijaz5v0J/kV3/hMd4ueD9D0hWStooID2pnZlYh5dw++p2C91Ukbf9zgP1b2W8WsJOkGuAlYCRwQmECSVsDr0ZESBpM0mfxepllNzOzDlBO09ARhcuStgMuLmO/VZJOA+4EugFXRsRcSWPT7ZOAY4FvSFoFvAeMjNY6LczMrEOVNehckUbgU+UkjIgZwIyidZMK3l8GXNaOMpiZWQcpp4/g53zYybsRUAs8nmGZzMysgsq5Iphd8H4VcG1EPJhReczMrMLKCQQ3Assi4n1InhiWtElEvJtt0czMrBLKebL4HqBXwXIv4I/ZFMfMzCqtnEBQFRFLmxbS95tkVyQzM6ukcgLBO5L2aFqQtCfJrZ5mZtYFlNNHcAZwg6Smp4K3IZm60szMuoByHiibJWln4JMk4wc9GxErMy+ZmZlVRDmT1/870DsinoqIJ4E+kr6ZfdHMzKwSyukj+FpEvNW0EBFvAl/LrERmZlZR5QSCjQonpUlnHuuZXZHMzKySyuksvhO4XtIkkqEmxgK3Z1oqMzOrmHICwfdIZgf7Bkln8WMkdw6ZmVkX0GrTUER8ADwMPAfUAQcAz5RzcEnDJM2TNF/S+BbS7SXp/XR6SzMzq6Bmrwgk/RvJZDKjSCaLuQ4gIoaWc+C0L+Fy4CCSoatnSZoeEU+XSHcRSROUmZlVWEtXBM+S/Po/IiL2jYifA++34diDgfkR8VxErACmAsNLpDudZAL7f7Xh2GZm1kFa6iM4huSK4E+S7iD5Q15qQvrm9ANeLFhuBIYUJpDUDziaZNrLvZo7kKQxJP0UVFdXM3PmzDYUY92c+elVFcsLYOnG2zLzk+dULsMKfpadwfW34XLdVU6zgSAipgHTJPUGjgK+BVRL+gUwLSLuauXYpYJG8TSUlwLfi4j3C+5QLVWWycBkgLq6uqivr28l645z8vjbKpYXwJRBC6ifd3blMhy1uHJ5dQLX34bLdVc55Qwx8Q5wDXCNpC2A44DxQGuBoBHYrmC5P7CwKE0dMDUNAlsBh0laFRE3l1V6MzNbZ22aszgi3gB+mb5aMwvYSVIN8BJJM9MJRceraXovaQpwq4OAmVlltWfy+rJExCpJp5HcDdQNuDIi5koam26f1OIBzMysIjILBAARMQOYUbSuZACIiJOzLIuZmZVWzlhDZmbWhTkQmJnlnAOBmVnOORCYmeWcA4GZWc45EJiZ5ZwDgZlZzjkQmJnlnAOBmVnOORCYmeWcA4GZWc45EJiZ5VymgaC1yeslDZf0hKQGSbMl7ZtleczMbG2ZjT5a5uT19wDTIyIkDQSuB3bOqkxmZra2LK8IWp28PiKWRkTT9JW9WXsqSzMzy1iWgaDU5PX9ihNJOlrSs8BtwFcyLI+ZmZWgD3+Qd/CBpeOAQyLi1HT5RGBwRJzeTPrPAxMi4sAS28YAYwCqq6v3nDp1aiZlLuXJlyo7wXRNr+X0WV48tXOGtqmtXF6dwPW34XLddayhQ4fOiYi6UtuynKGsnMnrV4uI+yTtKGmriFhUtG0yMBmgrq4u6uvrMyhuaSePv61ieQFMGbSA+nlnVy7DUZX9slWa62/D5bqrnCybhlZPXi+pJ8nk9dMLE0j6hCSl7/cAegKvZ1gmMzMr0tmT1x8DjJa0EngPGBFZtVWZmVlJnTp5fURcBFyUZRnMzKxlfrLYzCznHAjMzHLOgcDMLOccCMzMcs6BwMws5xwIzMxyzoHAzCznHAjMzHLOgcDMLOccCMzMcs6BwMws5xwIzMxyzoHAzCznMg0EkoZJmidpvqTxJbZ/SdIT6esvkgZlWR4zM1tbZoFAUjfgcuBQYFdglKRdi5ItAPaLiIHAeaSzkJmZWeVkeUUwGJgfEc9FxApgKjC8MEFE/CUi3kwXHyaZztLMzCooy8nrjwWGFU1ePyQiTmsm/Thg56b0Rds8eX1WuvDk5+D625C57jpWZ01erxLrSkYdSUOBrwL7ltruyesztB5NoJ0F19+Gy3VXOVkGgkZgu4Ll/sBa4VbSQODXwKER4YnrzcwqLMs+glnATpJqJPUERgLTCxNI2h64CTgxIv6WYVnMzKwZmV0RRMQqSacBdwLdgCsjYq6ksen2ScAEYEvgCkkAq5prwzIzs2xk2TRERMwAZhStm1Tw/lRgrc5hMzOrnEwDgZlZW61cuZLGxkYmH7k1KnnPSTY267kVzwy4vmL58cwzmRy2qqqK/v3706NHj7L3cSAws/VKY2Mjffv2pbr75qRNxhVR02s5fSt5++i2u3T4ISOC119/ncbGRmpqasrez2MNmdl6ZdmyZWy55ZYVDQJdhSS23HJLli1b1qb9HAjMbL3jINB+7fnsHAjMzHLOfQRmtl478rIHO/R400/7bKtpXnzpFT5/zKnMuf0atvjIZrz51tvsMewEZt44mR36b8uU66Zz558f4torLli9z6I33mSX/Y6hcfYdbLxxz7WOOeW66cx+4mku+++1BmLudL4iMDMrsl2/rfnG6GMZf8HPABh//s8Y86UvskP/bQH44mH7c/d9f+Xd995bvc+Nt97DkQfvVzIIrO8cCMzMSvjW177Ew48+yaW/uoYHZjVw5tdPXL1t0759+Pzeu3PLXfetXjd1+p2MGj6MW+76M0MOH83uB4/iwBFjefW19X/kHAcCM7MSevTowSX/eQbfmvhjLj1nHD17rnlf/qjhw5g6/S4AFr7yGn977p8M/Wwd+w7enYdv+Q2P3XUtI4cfwsVX/KYzit8m7iMwM2vG7fc+yDbVW/HUs/M56PN7r7Ht8AM/xze/fwFvL1nK9bfcxbFfOJBu3brR+PKrjPjGeF7+1yJWrFhJzfbbdlLpy+crAjOzEhqemsfd9/+Vh2/5DT/51e948aVXqD1oJLUHjWTS1TfSq1cVw+o/w7Tb/8TUP9zJqOGHAHD6Dy/mtFNG8OQ91/PLi37AsuUrOvlMWucrAjOzIhHBN846n0vPGcf2/bbhO98YzfgLfk7D3WtOijXqqGGcdcHPeXvpO+y950AAFr+9lH5bfxSA39xwS8XL3h4OBGa2Xivnds+O9qtrbmL7fluvbg765knHMeX66fz5oTnst8+eq9MdvN/enHTGBL466qjVD3JNPPPrHPf179Fv64+y9x6fZsGLFRy2op0yDQSShgE/JRmG+tcRcWHR9p2Bq4A9gB9ExI+yLI+ZWTnGfPkYxnz5mNXL3bp1Y84dv1srXffu3XntyXvXWDf8kHqGH1K/VtqTRxzJySOO7PCydoTMAoGkbsDlwEEks5XNkjQ9Ip4uSPYG8B/AUVmVw8zMWpZlZ/FgYH5EPBcRK4CpwPDCBBHxr4iYBazMsBxmZtaCLJuG+gEvFiw3AkPacyBJY4AxANXV1cycOXOdC1euMz+9qmJ5ASzdeFtmfvKcymVYwc+yM7j+NjybbbYZS5YsobpXZfN9f6MeLNm4grd6LlmS2aGXLVvWpr+TWQaCUkPgRXsOFBGTgckAdXV1UV9fvw7FapuTx99WsbwApgxaQP28syuX4ajFlcurE7j+NjzPPPMMffv2ZUHjWxXNt6bXysrOR7Dl7pkduqqqit13L//4WTYNNQLbFSz3B9b/7nMzs5zJMhDMAnaSVCOpJzASmJ5hfmZm1g6ZNQ1FxCpJpwF3ktw+emVEzJU0Nt0+SdLWwGxgU+ADSWcAu0bE21mVy8w2LAN/vUOHHu+JU//Zapo+O32WpX//cPjrcoaQnvjjSfTpvQnjxo5eY/3zLy7k8JP+H0/de0PJ/W6++WYmTJjAihUr6NGjB+eddx5HHXVUi+VraGhg4cKFHHbYYa2eSzkyfY4gImYAM4rWTSp4/wpJk5GZWe48/vjjjBs3jrvvvpuamhoWLFjAQQcdxMc//nEGDhzY7H4NDQ3Mnj27wwKBxxoyM2uDfzYu5IDjv87AA4/ngOO/zgsvvbxWmjlPPM2gA0ewzxEncfmU65o91o9+9CO+//3vr55ovqamhrPOOotLLrkEgPr6embPng3AokWLGDBgACtWrGDChAlcd9111NbWct11zR+/XA4EZmZF3lu2fPUAc7UHjWTCj1Y3ZHDaDy5i9LGH88Qfr+dLXzyU//jhJWvtf8q3J/Kz877LQ7e0PAT13Llz2XPPPddYV1dXx9y5c5vdp2fPnpx77rmMGDGChoYGRowY0baTK8GBwMysSK+qjWm4e+rq17njxq7e9tCcJznh6GEAnHjMF3jgkYY19l389hLeWrx09ZhEJx7zhWbziYi1JpsvtS5rDgRmZuug+G92xNrrmpzyrbOpPWgkh514OgC77bbb6qafJo8++ii77rorkIxl9MEHHwDJQ2JZcSAwM2uDz9QNZOof7gTgmptuZ9/Baz64tflmfdls0z488MhjSZppt6/edtVPzqHh7qnM+O3PARg3bhwXXHABzz//PADPP/88559/PmeeeSYAAwYMYM6cOQDceOONq4/Tt29flnTgk8kehtrM1mvl3O5ZST8777t85dsTuWTS1Xx0i49w1U8mrpXmqv+ZyFe+fQ6b9KrikPp9mj1WbW0tF110EUcccQQrV66kR48eXHzxxdTW1gJJoDj++OP57W9/y/777796v6FDh3LhhRdSW1vLWWedtc79BIpo16gPnaauri6KL6WyNKDiQxT8vbJDFEzsekMUFHL9bXieeeYZdtllF56o+BATyys7xMS22Q0x0fQZFpI0JyLqSqV305CZWc45EJiZ5ZwDgZmtdza0Juv1SXs+OwcCM1uvVFVV8frrrzsYtENE8Prrr1NVVdWm/XzXkJmtV/r3709jYyOvvvwGKjmtSTbe77mSqpVvVSw/Fj+TyWGrqqro379tQ7g5EJjZeqVHjx7U1NQw9JdPt564A00Z9Hd2z+kdX5k2DUkaJmmepPmS1hq/VYmfpdufkLRHluUxM7O1ZRYIJHUDLgcOBXYFRknatSjZocBO6WsM8IusymNmZqVleUUwGJgfEc9FxApgKjC8KM1w4OpIPAxsLmmbDMtkZmZFsuwj6Ae8WLDcCAwpI00/YI0BviWNIbliAFgqaV7HFnX9MRS2AhZVLMNzKjvKYVfn+ttw5aDump3qLctAUOosi+8HKycNETEZmNwRhVrfSZrd3GPgtv5z/W248lx3WTYNNQLbFSz3B4oH8ignjZmZZSjLQDAL2ElSjaSewEhgelGa6cDo9O6hvYHFEbH2vG9mZpaZzJqGImKVpNOAO4FuwJURMVfS2HT7JJKJ7Q8D5gPvAqdkVZ4NSC6awLow19+GK7d1t8ENQ21mZh3LYw2ZmeWcA4GZWc45EJRB0tGSQtLOBesGSHoqfV8v6dYS+9VLWiypIX39sZ35nyFpk/afgZWrmbr+qKS/SnpM0uckfbMD8pmZDr/yuKQHJX2yYH3ubmGsxOcuabikmwuWz5I0v2D5CEnFN7TkggNBeUYBD5Dc+dRW90dEbfo6sJ35nwG0KRBI8oCC7VOqrg8Ano2I3UkegGzTH6T0rrhS37UvRcQg4DfAJe0sb1dRic/9L0DhBML7AG9L+li6/BngwaJjVPx71Bl5OhC0QlIf4LPAV2lfICh1zC9LeiS9SvhlOi4Tkn4habakuZLOSdf9B7At8CdJf0rXLS041rGSpqTvp0j6nzTdRZJ2lHSHpDmS7m/6tSXpOElPpb9G7+uIc+oKStW1pFrgYuAwSQ3ARcCOad1dkqb5jqRZ6cCJTfU2QNIzkq4AHmXN52WK3Qd8IqPTWu9V6nOPiNeAxZKaPut+wO9JAgDpv3+RNFHSZEl3AVenx7xf0qPp6zNpXvWS7pM0TdLTkiY1BR5JSyX9OE1/j6SPpuub+06u8d3N4GNuWUT41cIL+DLwv+n7vwB7pO8HAE+l7+uBW0vsWw8sBhrS1w+AXYBbgB5pmiuA0en7LdJ/uwEzgYHp8vPAVgXHXVrw/lhgSvp+CnAr0C1dvgfYKX0/BLg3ff8k0C99v3lnf8bry6uFuj4ZuKy43tPlg0luOxTJD6tbgc+n6T4A9m4mr5lAXfr+O8B1xevz8qrw5z4FGA18kmT8swNIAk534E2gCpgIzAF6pftsAlSl73cCZqfv64FlwMfT7+zdwLHptiC54gOYUHAezX0n1/juVvrl5oPWjQIuTd9PTZcfbcP+90fE4U0LSp6t2BOYJQmgF/CvdPPxSsZV6g5sQzJq6xNtLO8NEfF++ivrM8ANaT4AG6f/PghMkXQ9cFMbj9+VtaeuD05fj6XLfUj+WLwA/DOSwRSbc42k90gC/entK3KXUMnP/UGS70U34CHgEZI/1LsD8yJiWfp9mR4R76X79AAuS69S3gf+reB4j0TEcwCSrgX2BW4kCUbXpWn+D7iple8kpN/dVs47Ew4ELZC0JbA/8ClJQfKfJyR9d10OC/wmIs4qyqsGGAfsFRFvps09zc03V/jwR3Gad9J/NwLeiojatXaOGCtpCPAFoEFSbUS83uYz6ULWoa4FXBARvyw63gA+rIvmfCkiZrezyF1CJ3zufyEJut2AX0XEEklVJL/uC/sHCo/xLeBVYBDJ92pZwbbiB7GaezAraOE7WSLPinIfQcuOJRkme4eIGBAR2wELSKJ+e90DHNvUQSVpC0k7AJuS/EdYLKmaZK6GJkuAvgXLr0raJW2PPLpUJhHxNrBA0nFpPpI0KH2/Y0T8NSImkIy22FL7dV6UW9fFdXEn8JX01x6S+hV0PlrrKv25P03S5/Y5PryaaADGkgSJUjYDXo6ID4ATSYJIk8FKhtHZCBhB0uENyd/WY9P3JwAPtPSd7GwOBC0bBUwrWvd7koptl4h4GvhP4C5JT5C0K24TEY+T/MecC1zJmr9OJgO3N3UWA+NJ2hPvpWjI7iJfAr4q6fH0uE3zQVwi6Uklt7/eBzze3vPpQsqq6/TK6cG0s/2SiLgL+B3wkKQnSZoF+mLlqujnHkmD/F+BRRGxMl39EEk7f3OB4ArgJEkPkzQLFf5yfwi4EHiKJIA1ncs7wG6S5pBc8Zybrm/uO9mpPMSEmVk7SKoHxhX2ARZsWxoRfSpeqHbyFYGZWc75isDMLOd8RWBmlnMOBGZmOedAYGaWcw4EZoCSkS9/W7DcXdJrKjGqbNF+tZIOK1ieKGncOpRjnfY3aw8HArPEOyRPt/ZKlw8CXipjv1qS6VbNNlgOBGYfup1k2A1IHnS6tmmDpN6SrkxHu3xMydj2PUkeFBqhZFTMEWnyXZXMK/CcktFjm47x7fSBqKcknVGw/gdK5ib4I8lgaGYV5UBg9qGpwMh07JmBJE+gNvkByUiRewFDSeYP6EEyYNl1kcw30TTI2M7AIcBg4GxJPSTtCZxCMuLk3sDXJO2erh9JMujZF4G9sj5Js2IedM4sFRFPpIOWjQJmFG0+GDiyoP2+Cti+mUPdFhHLgeWS/gVUk4ydMy0i3gGQdBPJeDcbpevfTdfncoYs61wOBGZrmg78iGQ0yi0L1gs4JiLmFSZOR3Ettrzg/fsk3zOVSNfET3Vap3LTkNmargTOjYgni9bfCZyudCB5Sbun64tHxWzOfcBRkjaR1Jtk1Nj70/VHS+olqS9wREechFlb+IrArEBENAI/LbHpPJLJU55Ig8HzwOHAn4DxSqZTvKCF4z6azjHxSLrq1xHxGICk60iGQv4nSXAwqyiPNWRmlnNuGjIzyzkHAjOznHMgMDPLOQcCM7OccyAwM8s5BwIzs5xzIDAzy7n/D5z78sPDjW2pAAAAAElFTkSuQmCC\n",
591 | "text/plain": [
592 | ""
593 | ]
594 | },
595 | "metadata": {
596 | "needs_background": "light"
597 | },
598 | "output_type": "display_data"
599 | }
600 | ],
601 | "source": [
602 | "ax = res_df.plot.bar(rot=0)\n",
603 | "ax.set_ylabel(\"Accuracy\")\n",
604 | "ax.set_title(\"Feature Selection & Accuracy\")\n",
605 | "ax.set_ylim(0,1)\n",
606 | "ax.set_yticks(np.arange(0,1,0.1))\n",
607 | "ax.legend(loc='lower right')\n",
608 | "ax.grid(True)\n",
609 | "ax.figure.savefig(\"PI+Wrapper.png\")\n"
610 | ]
611 | }
612 | ],
613 | "metadata": {
614 | "kernelspec": {
615 | "display_name": "Python 3",
616 | "language": "python",
617 | "name": "python3"
618 | },
619 | "language_info": {
620 | "codemirror_mode": {
621 | "name": "ipython",
622 | "version": 3
623 | },
624 | "file_extension": ".py",
625 | "mimetype": "text/x-python",
626 | "name": "python",
627 | "nbconvert_exporter": "python",
628 | "pygments_lexer": "ipython3",
629 | "version": "3.8.8"
630 | }
631 | },
632 | "nbformat": 4,
633 | "nbformat_minor": 5
634 | }
635 |
--------------------------------------------------------------------------------
/FS-Random-Forest.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Random Forest Feature Importance\n",
8 | "As a side-effect of buiding a random forest ensemble, we get a very useful estimate of feature importance. "
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": null,
14 | "metadata": {},
15 | "outputs": [],
16 | "source": [
17 | "import pandas as pd\n",
18 | "import numpy as np\n",
19 | "from sklearn.model_selection import train_test_split\n",
20 | "from sklearn.feature_selection import mutual_info_classif\n",
21 | "from sklearn.preprocessing import MinMaxScaler\n",
22 | "from sklearn.ensemble import RandomForestClassifier\n",
23 | "import matplotlib.pyplot as plt \n",
24 | "%matplotlib inline"
25 | ]
26 | },
27 | {
28 | "cell_type": "markdown",
29 | "metadata": {},
30 | "source": [
31 | "### Segmentation Data"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": null,
37 | "metadata": {},
38 | "outputs": [],
39 | "source": [
40 | "seg_data = pd.read_csv('segmentation-all.csv')\n",
41 | "print(seg_data.shape)\n",
42 | "seg_data.head()"
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": null,
48 | "metadata": {},
49 | "outputs": [],
50 | "source": [
51 | "seg_data['Class'].value_counts()"
52 | ]
53 | },
54 | {
55 | "cell_type": "markdown",
56 | "metadata": {},
57 | "source": [
58 | "Load the data, scale it and divide into train and test sets. \n",
59 | "The filters are *trained* using the training data and then a classifier is trained on the feature subset and tested on the test set. "
60 | ]
61 | },
62 | {
63 | "cell_type": "code",
64 | "execution_count": null,
65 | "metadata": {},
66 | "outputs": [],
67 | "source": [
68 | "y = seg_data.pop('Class').values\n",
69 | "X_raw = seg_data.values\n",
70 | "\n",
71 | "X_tr_raw, X_ts_raw, y_train, y_test = train_test_split(X_raw, y, \n",
72 | " random_state=1, test_size=1/2)\n",
73 | "scaler = MinMaxScaler()\n",
74 | "X_train = scaler.fit_transform(X_tr_raw)\n",
75 | "X_test = scaler.transform(X_ts_raw)\n",
76 | "\n",
77 | "feature_names = seg_data.columns\n",
78 | "X_train.shape, X_test.shape"
79 | ]
80 | },
81 | {
82 | "cell_type": "markdown",
83 | "metadata": {},
84 | "source": [
85 | "Build the Random Forest and calculate the scores. "
86 | ]
87 | },
88 | {
89 | "cell_type": "code",
90 | "execution_count": null,
91 | "metadata": {},
92 | "outputs": [],
93 | "source": [
94 | "n_trees = 1000\n",
95 | "RF = RandomForestClassifier(n_estimators=n_trees, max_depth=2, random_state=0)\n",
96 | "RF.fit(X_train,y_train)"
97 | ]
98 | },
99 | {
100 | "cell_type": "code",
101 | "execution_count": null,
102 | "metadata": {},
103 | "outputs": [],
104 | "source": [
105 | "rf_scores = RF.feature_importances_\n",
106 | "rf_scores"
107 | ]
108 | },
109 | {
110 | "cell_type": "markdown",
111 | "metadata": {},
112 | "source": [
113 | "Calculate the I-gain scores for comparison."
114 | ]
115 | },
116 | {
117 | "cell_type": "code",
118 | "execution_count": null,
119 | "metadata": {},
120 | "outputs": [],
121 | "source": [
122 | "i_scores = mutual_info_classif(X_train,y_train)\n",
123 | "i_scores\n",
124 | "# The i-gain scores for the features"
125 | ]
126 | },
127 | {
128 | "cell_type": "code",
129 | "execution_count": null,
130 | "metadata": {},
131 | "outputs": [],
132 | "source": [
133 | "df=pd.DataFrame({'Mutual Info.':i_scores,'RF Score':rf_scores,'Feature':feature_names})\n",
134 | "df.set_index('Feature', inplace = True)\n",
135 | "df.sort_values('Mutual Info.', inplace = True, ascending = False)\n",
136 | "df"
137 | ]
138 | },
139 | {
140 | "cell_type": "markdown",
141 | "metadata": {},
142 | "source": [
143 | "Plotting the two sets of scores"
144 | ]
145 | },
146 | {
147 | "cell_type": "code",
148 | "execution_count": null,
149 | "metadata": {},
150 | "outputs": [],
151 | "source": [
152 | "n = len(df.index)\n",
153 | "rr = range(0,n)\n",
154 | "fig, ax = plt.subplots(figsize=(6,5))\n",
155 | "ax2 = ax.twinx()\n",
156 | "ax.bar(df.index, df[\"RF Score\"], label='RF Score',width=.35, color = 'g')\n",
157 | "\n",
158 | "ax2.set_xticks(rr)\n",
159 | "ax2.plot(df.index, df[\"Mutual Info.\"], label='I-Gain', color = 'navy')\n",
160 | "\n",
161 | "ax.set_xticklabels(list(df.index), rotation = 90)\n",
162 | "ax.set_xlabel('Features')\n",
163 | "ax.set_ylabel('I-Gain')\n",
164 | "ax2.set_ylabel('RF Score')\n",
165 | "fig.legend(loc=\"upper right\", bbox_to_anchor=(1,1), bbox_transform=ax.transAxes)\n",
166 | "plt.show()"
167 | ]
168 | },
169 | {
170 | "cell_type": "code",
171 | "execution_count": null,
172 | "metadata": {},
173 | "outputs": [],
174 | "source": [
175 | "from scipy import stats\n",
176 | "stats.spearmanr(rf_scores, i_scores)"
177 | ]
178 | },
179 | {
180 | "cell_type": "markdown",
181 | "metadata": {},
182 | "source": [
183 | "## Penguins"
184 | ]
185 | },
186 | {
187 | "cell_type": "code",
188 | "execution_count": null,
189 | "metadata": {},
190 | "outputs": [],
191 | "source": [
192 | "penguins_df = pd.read_csv('penguins.csv', index_col = 0)\n",
193 | "\n",
194 | "feature_names = penguins_df.columns\n",
195 | "print(penguins_df.shape)\n",
196 | "penguins_df.head()"
197 | ]
198 | },
199 | {
200 | "cell_type": "code",
201 | "execution_count": null,
202 | "metadata": {},
203 | "outputs": [],
204 | "source": [
205 | "y = penguins_df.pop('species').values\n",
206 | "X = penguins_df.values\n",
207 | "\n",
208 | "X_train, X_test, y_train, y_test = train_test_split(X, y, \n",
209 | " random_state=1, test_size=1/2)\n",
210 | "feature_names = penguins_df.columns\n",
211 | "X_train.shape, X_test.shape"
212 | ]
213 | },
214 | {
215 | "cell_type": "code",
216 | "execution_count": null,
217 | "metadata": {},
218 | "outputs": [],
219 | "source": [
220 | "RF = RandomForestClassifier(n_estimators=n_trees, max_depth=2, random_state=0)\n",
221 | "RF.fit(X_train,y_train)"
222 | ]
223 | },
224 | {
225 | "cell_type": "code",
226 | "execution_count": null,
227 | "metadata": {},
228 | "outputs": [],
229 | "source": [
230 | "rf_scores = RF.feature_importances_\n",
231 | "rf_scores"
232 | ]
233 | },
234 | {
235 | "cell_type": "code",
236 | "execution_count": null,
237 | "metadata": {},
238 | "outputs": [],
239 | "source": [
240 | "feature_names"
241 | ]
242 | },
243 | {
244 | "cell_type": "code",
245 | "execution_count": null,
246 | "metadata": {},
247 | "outputs": [],
248 | "source": [
249 | "i_scores = mutual_info_classif(X_train,y_train)\n",
250 | "i_scores\n",
251 | "# The i-gain scores for the features"
252 | ]
253 | },
254 | {
255 | "cell_type": "code",
256 | "execution_count": null,
257 | "metadata": {},
258 | "outputs": [],
259 | "source": [
260 | "pen_df=pd.DataFrame({'Mutual Info.':i_scores,'RF Score':rf_scores,'Feature':feature_names})\n",
261 | "pen_df.set_index('Feature', inplace = True)\n",
262 | "pen_df.sort_values('Mutual Info.', inplace = True, ascending = False)\n",
263 | "pen_df"
264 | ]
265 | },
266 | {
267 | "cell_type": "code",
268 | "execution_count": null,
269 | "metadata": {},
270 | "outputs": [],
271 | "source": [
272 | "n = len(pen_df.index)\n",
273 | "rr = range(0,n)\n",
274 | "fig, ax = plt.subplots(figsize=(2.5,5))\n",
275 | "ax2 = ax.twinx()\n",
276 | "ax.bar(pen_df.index, pen_df[\"RF Score\"], label='RF Score',width=.35, color = 'g')\n",
277 | "\n",
278 | "ax2.set_xticks(rr)\n",
279 | "ax2.plot(pen_df.index, pen_df[\"Mutual Info.\"], label='I-Gain', color = 'navy')\n",
280 | "\n",
281 | "ax.set_xticklabels(list(pen_df.index), rotation = 90)\n",
282 | "ax.set_xlabel('Features')\n",
283 | "ax.set_ylabel('I-Gain')\n",
284 | "ax2.set_ylabel('RF Score')\n",
285 | "fig.legend(loc=\"upper right\", bbox_to_anchor=(1,1), bbox_transform=ax.transAxes)\n",
286 | "plt.show()"
287 | ]
288 | },
289 | {
290 | "cell_type": "code",
291 | "execution_count": null,
292 | "metadata": {},
293 | "outputs": [],
294 | "source": [
295 | "stats.spearmanr(rf_scores, i_scores)"
296 | ]
297 | }
298 | ],
299 | "metadata": {
300 | "kernelspec": {
301 | "display_name": "Python 3",
302 | "language": "python",
303 | "name": "python3"
304 | },
305 | "language_info": {
306 | "codemirror_mode": {
307 | "name": "ipython",
308 | "version": 3
309 | },
310 | "file_extension": ".py",
311 | "mimetype": "text/x-python",
312 | "name": "python",
313 | "nbconvert_exporter": "python",
314 | "pygments_lexer": "ipython3",
315 | "version": "3.8.3"
316 | }
317 | },
318 | "nbformat": 4,
319 | "nbformat_minor": 2
320 | }
321 |
--------------------------------------------------------------------------------
/FS-ReliefF.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Feature Selection using ReliefF\n",
8 | "https://epistasislab.github.io/scikit-rebate/using/\n",
9 | "`pip install skrebate`"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": null,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "import pandas as pd\n",
19 | "import numpy as np\n",
20 | "from skrebate import ReliefF\n",
21 | "from sklearn.feature_selection import mutual_info_classif\n",
22 | "from sklearn.model_selection import train_test_split\n",
23 | "from sklearn.preprocessing import MinMaxScaler\n",
24 | "from sklearn.neighbors import KNeighborsClassifier\n",
25 | "from sklearn.metrics import accuracy_score\n",
26 | "import matplotlib.pyplot as plt "
27 | ]
28 | },
29 | {
30 | "cell_type": "code",
31 | "execution_count": null,
32 | "metadata": {},
33 | "outputs": [],
34 | "source": [
35 | "seg_data = pd.read_csv('segmentation-all.csv')\n",
36 | "print(seg_data.shape)\n",
37 | "seg_data.head()"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "metadata": {},
44 | "outputs": [],
45 | "source": [
46 | "seg_data['Class'].value_counts()"
47 | ]
48 | },
49 | {
50 | "cell_type": "markdown",
51 | "metadata": {},
52 | "source": [
53 | "Load the data, scale it and divide into train and test sets. \n",
54 | "The filters are *trained* using the training data and then a classifier is trained on the feature subset and tested on the test set. "
55 | ]
56 | },
57 | {
58 | "cell_type": "code",
59 | "execution_count": null,
60 | "metadata": {},
61 | "outputs": [],
62 | "source": [
63 | "y = seg_data.pop('Class').values\n",
64 | "X_raw = seg_data.values\n",
65 | "\n",
66 | "X_tr_raw, X_ts_raw, y_train, y_test = train_test_split(X_raw, y, \n",
67 | " random_state=42, test_size=1/2)\n",
68 | "scaler = MinMaxScaler()\n",
69 | "X_train = scaler.fit_transform(X_tr_raw)\n",
70 | "X_test = scaler.transform(X_ts_raw)\n",
71 | "\n",
72 | "feature_names = seg_data.columns\n",
73 | "X_train.shape, X_test.shape"
74 | ]
75 | },
76 | {
77 | "cell_type": "markdown",
78 | "metadata": {},
79 | "source": [
80 | "### ReliefF\n",
81 | "- `ReliefF` will produce scores for all features.\n",
82 | "- `n_features_to_select` controls the transform behaviour, if a dataset is transformed this number of features will be retained. "
83 | ]
84 | },
85 | {
86 | "cell_type": "code",
87 | "execution_count": null,
88 | "metadata": {},
89 | "outputs": [],
90 | "source": [
91 | "reliefFS = ReliefF(n_features_to_select=11, n_neighbors=100, n_jobs = -1)"
92 | ]
93 | },
94 | {
95 | "cell_type": "code",
96 | "execution_count": null,
97 | "metadata": {},
98 | "outputs": [],
99 | "source": [
100 | "reliefFS.fit(X_train,y_train)"
101 | ]
102 | },
103 | {
104 | "cell_type": "code",
105 | "execution_count": null,
106 | "metadata": {},
107 | "outputs": [],
108 | "source": [
109 | "relief_scores = reliefFS.feature_importances_"
110 | ]
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": null,
115 | "metadata": {},
116 | "outputs": [],
117 | "source": [
118 | "reliefFS.transform(X_train).shape"
119 | ]
120 | },
121 | {
122 | "cell_type": "markdown",
123 | "metadata": {},
124 | "source": [
125 | "Also calcuate I-Gain scores: to be used for comparision."
126 | ]
127 | },
128 | {
129 | "cell_type": "code",
130 | "execution_count": null,
131 | "metadata": {},
132 | "outputs": [],
133 | "source": [
134 | "i_scores = mutual_info_classif(X_train,y_train)\n",
135 | "i_scores\n",
136 | "# The i-gain scores for the features"
137 | ]
138 | },
139 | {
140 | "cell_type": "code",
141 | "execution_count": null,
142 | "metadata": {},
143 | "outputs": [],
144 | "source": [
145 | "from scipy import stats\n",
146 | "stats.spearmanr(relief_scores, i_scores)"
147 | ]
148 | },
149 | {
150 | "cell_type": "markdown",
151 | "metadata": {},
152 | "source": [
153 | "Store the ReliefF and I-Gain scores in a dataframe. \n",
154 | "**Note:** The mutual information score returned by `mutual_info_classif` is effectively an information gain score. "
155 | ]
156 | },
157 | {
158 | "cell_type": "code",
159 | "execution_count": null,
160 | "metadata": {},
161 | "outputs": [],
162 | "source": [
163 | "df=pd.DataFrame({'Mutual Info.':i_scores,'ReliefF':relief_scores,'Feature':feature_names})\n",
164 | "df.set_index('Feature', inplace = True)\n",
165 | "df.sort_values('Mutual Info.', inplace = True, ascending = False)\n",
166 | "df"
167 | ]
168 | },
169 | {
170 | "cell_type": "markdown",
171 | "metadata": {},
172 | "source": [
173 | "### Plotting the ReliefF and I-Gain scores\n",
174 | "We see that the two scores are fairly well correlated. \n",
175 | "The Spearman correlation is 0.81."
176 | ]
177 | },
178 | {
179 | "cell_type": "code",
180 | "execution_count": null,
181 | "metadata": {},
182 | "outputs": [],
183 | "source": [
184 | "fig, ax = plt.subplots()\n",
185 | "rr = range(0,len(feature_names))\n",
186 | "ax2 = ax.twinx()\n",
187 | "ax.plot(df.index, df[\"Mutual Info.\"], label='I-Gain')\n",
188 | "ax2.plot(df.index, df[\"ReliefF\"], color='red', label='Relief')\n",
189 | "ax.set_xticks(rr)\n",
190 | "\n",
191 | "ax.set_xticklabels(list(df.index), rotation = 90)\n",
192 | "ax.set_xlabel('Features', fontsize=12, fontweight='bold')\n",
193 | "ax.set_ylabel('I-Gain')\n",
194 | "ax2.set_ylabel('ReliefF')\n",
195 | "fig.legend(loc=\"upper right\", bbox_to_anchor=(1,1), bbox_transform=ax.transAxes)"
196 | ]
197 | },
198 | {
199 | "cell_type": "markdown",
200 | "metadata": {},
201 | "source": [
202 | "## Feature Selection\n",
203 | "Compare \n",
204 | "- Baseline: all features\n",
205 | "- Visual inspection of the ReliefF plot suggests we select the top 11 features. "
206 | ]
207 | },
208 | {
209 | "cell_type": "markdown",
210 | "metadata": {},
211 | "source": [
212 | "### Baseline Classifier"
213 | ]
214 | },
215 | {
216 | "cell_type": "code",
217 | "execution_count": null,
218 | "metadata": {},
219 | "outputs": [],
220 | "source": [
221 | "model = KNeighborsClassifier(n_neighbors=3)\n",
222 | "model = model.fit(X_train,y_train)\n",
223 | "y_pred = model.predict(X_test)\n",
224 | "acc_all = accuracy_score(y_pred,y_test)\n",
225 | "acc_all"
226 | ]
227 | },
228 | {
229 | "cell_type": "code",
230 | "execution_count": null,
231 | "metadata": {},
232 | "outputs": [],
233 | "source": [
234 | "n_features = X_train.shape[1]\n",
235 | "n_features"
236 | ]
237 | },
238 | {
239 | "cell_type": "markdown",
240 | "metadata": {},
241 | "source": [
242 | "### After feature selection\n",
243 | "We produce a reduced dataset with the 11 top ranking features selected by ReliefF"
244 | ]
245 | },
246 | {
247 | "cell_type": "code",
248 | "execution_count": null,
249 | "metadata": {},
250 | "outputs": [],
251 | "source": [
252 | "X_tr_relief = reliefFS.transform(X_train)\n",
253 | "X_ts_relief = reliefFS.transform(X_test)\n",
254 | "X_tr_relief.shape"
255 | ]
256 | },
257 | {
258 | "cell_type": "code",
259 | "execution_count": null,
260 | "metadata": {},
261 | "outputs": [],
262 | "source": [
263 | "kNN_relief = model.fit(X_tr_relief,y_train)\n",
264 | "y_pred = kNN_relief.predict(X_ts_relief)\n",
265 | "acc_11 = accuracy_score(y_pred,y_test)\n",
266 | "acc_11"
267 | ]
268 | },
269 | {
270 | "cell_type": "code",
271 | "execution_count": null,
272 | "metadata": {},
273 | "outputs": [],
274 | "source": [
275 | "import matplotlib.pyplot as plt \n",
276 | "import numpy as np\n",
277 | "%matplotlib inline \n",
278 | "\n",
279 | "fig, ax = plt.subplots(figsize=(2.5,3.5))\n",
280 | "width = 0.5\n",
281 | "sb = 'skyblue'\n",
282 | "\n",
283 | "options = ['All', 'ReliefF 11']\n",
284 | "scores = [acc_all,acc_11]\n",
285 | "\n",
286 | "y_pos = np.arange(len(options))\n",
287 | "\n",
288 | "p1 = ax.bar(y_pos, scores, width, align='center', \n",
289 | " color=['red', 'blue'],alpha=0.5)\n",
290 | "\n",
291 | "ax.set_ylim([0.5, 1])\n",
292 | "plt.grid(axis = 'y')\n",
293 | "plt.yticks(np.arange(0.5,1.05,0.05))\n",
294 | "ax.text(0,acc_all, '%0.3f' % acc_all, ha='center', va = 'top')\n",
295 | "ax.text(1,acc_11, '%0.3f' % acc_11, ha='center',va = 'top')\n",
296 | "\n",
297 | "plt.xticks(y_pos, options)\n",
298 | "plt.ylabel('Test Set Accuracy')\n",
299 | "plt.xlabel('Features')\n",
300 | "plt.show()"
301 | ]
302 | },
303 | {
304 | "cell_type": "code",
305 | "execution_count": null,
306 | "metadata": {},
307 | "outputs": [],
308 | "source": []
309 | }
310 | ],
311 | "metadata": {
312 | "kernelspec": {
313 | "display_name": "Python 3",
314 | "language": "python",
315 | "name": "python3"
316 | },
317 | "language_info": {
318 | "codemirror_mode": {
319 | "name": "ipython",
320 | "version": 3
321 | },
322 | "file_extension": ".py",
323 | "mimetype": "text/x-python",
324 | "name": "python",
325 | "nbconvert_exporter": "python",
326 | "pygments_lexer": "ipython3",
327 | "version": "3.8.3"
328 | }
329 | },
330 | "nbformat": 4,
331 | "nbformat_minor": 2
332 | }
333 |
--------------------------------------------------------------------------------
/HarryPotterTT.csv:
--------------------------------------------------------------------------------
1 | Name, Magic, Cunning, Courage, Wisdom, Temper
2 | 'Harry Potter',62, 21,42,26,7
3 | 'Hermione Granger',60,16,40,73,2
4 | 'Ron Weasley',45,14,40,22,4
5 | 'Prof. Dumbledore',105,24,39,82,0
6 | 'Prof. Snape',85,24,19,71,7
7 | 'Prof. McGonagail',95,19,29,76,5
8 | 'Prof. Moody',82,20,35,69,5
9 | 'Rubeus Hagrid',12,11,30,8,7
10 | 'Fred Weasley',87,13,30,22,4
11 | 'George Weasley',87,13,30,22,4
12 | 'Arthur Weasley',62,5,29,60,2
13 | 'Vincent Crabbe',10,13,8,4,7
14 | 'Draco Malfoy',42,22,10,12,9
15 | 'Gregory Goyle',10,14,7,2,8
16 | 'Padma Patil',24,9,23,13,1
17 | 'Parvati Patil',24,11,23,15,2
18 | 'Fleur Delacour',59,19,36,54,6
19 | 'Cho Chang',40,8,25,31,3
20 | 'Cedric Diggory',58,23,40,55,2
21 | 'Viktor Krum',56,22,38,30,7
22 | 'Neville Longbottom,24,9,28,15,2
23 | 'Lucius Malfoy',88,24,10,60,9
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # FeatSelTutorial
2 | Python notebooks for the tutorial paper on feature selection
3 | - **FS-Wrappers:** Code for SFS and BE Wrappers from `mlxtend`.
4 | - **FS-Filters:** Code for using I-Gain and Chi-square Filters from `scikit-learn`.
5 | - **FS-Permutation-FI:** Code for using Permutation Feature Importance Filters from `scikit-learn`.
6 | - **FS-D-Tree:** Building D-Trees with embedded feature selction using `scikit-learn`.
7 | - **FS-Lasso:** Feature selection for Logistic Regression using `scikit-learn`.
8 | - **FS-Permutation+Wrapper:** A two stage strategy using Permutation FI and Wrapper.
9 | - **FS-ReliefF:** Code for using ReliefF Filters from `skrebate`.
10 | - **FS-Random-Forest:** Feature importance from Random Forest using `scikit-learn`.
11 | - **FS-CFS:** Correlation Based feature importance using code from from https://github.com/jundongl/scikit-feature.
12 | - **FS-PCA:** Principal Component Analysis using the PCA implementation in `scikit-learn`.
13 | - **FS-LDA:** Linear Discriminant Analysis using the LDA implementation in `scikit-learn`.
14 |
15 |
16 |
17 |
18 | ### Data Files
19 | - `penguins.csv` from https://github.com/allisonhorst/palmerpenguins
20 | - `segmentation.all` from https://archive.ics.uci.edu/ml/datasets/Image+Segmentation
21 | - `ionsphere.csv`from https://archive.ics.uci.edu/ml/datasets/ionosphere
22 | - `HarryPotterTT.csv` created by the authors
23 |
--------------------------------------------------------------------------------
/entropy_estimators.py:
--------------------------------------------------------------------------------
1 | # Written by Greg Ver Steeg (http://www.isi.edu/~gregv/npeet.html)
2 |
3 | import scipy.spatial as ss
4 | from scipy.special import digamma
5 | from math import log
6 | import numpy.random as nr
7 | import numpy as np
8 | import random
9 |
10 |
11 | # continuous estimators
12 |
13 | def entropy(x, k=3, base=2):
14 | """
15 | The classic K-L k-nearest neighbor continuous entropy estimator x should be a list of vectors,
16 | e.g. x = [[1.3],[3.7],[5.1],[2.4]] if x is a one-dimensional scalar and we have four samples
17 | """
18 |
19 | assert k <= len(x)-1, "Set k smaller than num. samples - 1"
20 | d = len(x[0])
21 | N = len(x)
22 | intens = 1e-10 # small noise to break degeneracy, see doc.
23 | x = [list(p + intens * nr.rand(len(x[0]))) for p in x]
24 | tree = ss.cKDTree(x)
25 | nn = [tree.query(point, k+1, p=float('inf'))[0][k] for point in x]
26 | const = digamma(N)-digamma(k) + d*log(2)
27 | return (const + d*np.mean(map(log, nn)))/log(base)
28 |
29 |
30 | def mi(x, y, k=3, base=2):
31 | """
32 | Mutual information of x and y; x, y should be a list of vectors, e.g. x = [[1.3],[3.7],[5.1],[2.4]]
33 | if x is a one-dimensional scalar and we have four samples
34 | """
35 |
36 | assert len(x) == len(y), "Lists should have same length"
37 | assert k <= len(x) - 1, "Set k smaller than num. samples - 1"
38 | intens = 1e-10 # small noise to break degeneracy, see doc.
39 | x = [list(p + intens * nr.rand(len(x[0]))) for p in x]
40 | y = [list(p + intens * nr.rand(len(y[0]))) for p in y]
41 | points = zip2(x, y)
42 | # Find nearest neighbors in joint space, p=inf means max-norm
43 | tree = ss.cKDTree(points)
44 | dvec = [tree.query(point, k+1, p=float('inf'))[0][k] for point in points]
45 | a, b, c, d = avgdigamma(x, dvec), avgdigamma(y, dvec), digamma(k), digamma(len(x))
46 | return (-a-b+c+d)/log(base)
47 |
48 |
49 | def cmi(x, y, z, k=3, base=2):
50 | """
51 | Mutual information of x and y, conditioned on z; x, y, z should be a list of vectors, e.g. x = [[1.3],[3.7],[5.1],[2.4]]
52 | if x is a one-dimensional scalar and we have four samples
53 | """
54 |
55 | assert len(x) == len(y), "Lists should have same length"
56 | assert k <= len(x) - 1, "Set k smaller than num. samples - 1"
57 | intens = 1e-10 # small noise to break degeneracy, see doc.
58 | x = [list(p + intens * nr.rand(len(x[0]))) for p in x]
59 | y = [list(p + intens * nr.rand(len(y[0]))) for p in y]
60 | z = [list(p + intens * nr.rand(len(z[0]))) for p in z]
61 | points = zip2(x, y, z)
62 | # Find nearest neighbors in joint space, p=inf means max-norm
63 | tree = ss.cKDTree(points)
64 | dvec = [tree.query(point, k+1, p=float('inf'))[0][k] for point in points]
65 | a, b, c, d = avgdigamma(zip2(x, z), dvec), avgdigamma(zip2(y, z), dvec), avgdigamma(z, dvec), digamma(k)
66 | return (-a-b+c+d)/log(base)
67 |
68 |
69 | def kldiv(x, xp, k=3, base=2):
70 | """
71 | KL Divergence between p and q for x~p(x), xp~q(x); x, xp should be a list of vectors, e.g. x = [[1.3],[3.7],[5.1],[2.4]]
72 | if x is a one-dimensional scalar and we have four samples
73 | """
74 |
75 | assert k <= len(x) - 1, "Set k smaller than num. samples - 1"
76 | assert k <= len(xp) - 1, "Set k smaller than num. samples - 1"
77 | assert len(x[0]) == len(xp[0]), "Two distributions must have same dim."
78 | d = len(x[0])
79 | n = len(x)
80 | m = len(xp)
81 | const = log(m) - log(n-1)
82 | tree = ss.cKDTree(x)
83 | treep = ss.cKDTree(xp)
84 | nn = [tree.query(point, k+1, p=float('inf'))[0][k] for point in x]
85 | nnp = [treep.query(point, k, p=float('inf'))[0][k-1] for point in x]
86 | return (const + d*np.mean(map(log, nnp))-d*np.mean(map(log, nn)))/log(base)
87 |
88 |
89 | # Discrete estimators
90 | def entropyd(sx, base=2):
91 | """
92 | Discrete entropy estimator given a list of samples which can be any hashable object
93 | """
94 |
95 | return entropyfromprobs(hist(sx), base=base)
96 |
97 |
98 | def midd(x, y):
99 | """
100 | Discrete mutual information estimator given a list of samples which can be any hashable object
101 | """
102 |
103 | return -entropyd(list(zip(x, y)))+entropyd(x)+entropyd(y)
104 |
105 |
106 | def cmidd(x, y, z):
107 | """
108 | Discrete mutual information estimator given a list of samples which can be any hashable object
109 | """
110 |
111 | return entropyd(list(zip(y, z)))+entropyd(list(zip(x, z)))-entropyd(list(zip(x, y, z)))-entropyd(z)
112 |
113 |
114 | def hist(sx):
115 | # Histogram from list of samples
116 | d = dict()
117 | for s in sx:
118 | d[s] = d.get(s, 0) + 1
119 | return map(lambda z: float(z)/len(sx), d.values())
120 |
121 |
122 | def entropyfromprobs(probs, base=2):
123 | # Turn a normalized list of probabilities of discrete outcomes into entropy (base 2)
124 | return -sum(map(elog, probs))/log(base)
125 |
126 |
127 | def elog(x):
128 | # for entropy, 0 log 0 = 0. but we get an error for putting log 0
129 | if x <= 0. or x >= 1.:
130 | return 0
131 | else:
132 | return x*log(x)
133 |
134 |
135 | # Mixed estimators
136 | def micd(x, y, k=3, base=2, warning=True):
137 | """ If x is continuous and y is discrete, compute mutual information
138 | """
139 |
140 | overallentropy = entropy(x, k, base)
141 | n = len(y)
142 | word_dict = dict()
143 | for sample in y:
144 | word_dict[sample] = word_dict.get(sample, 0) + 1./n
145 | yvals = list(set(word_dict.keys()))
146 |
147 | mi = overallentropy
148 | for yval in yvals:
149 | xgiveny = [x[i] for i in range(n) if y[i] == yval]
150 | if k <= len(xgiveny) - 1:
151 | mi -= word_dict[yval]*entropy(xgiveny, k, base)
152 | else:
153 | if warning:
154 | print("Warning, after conditioning, on y={0} insufficient data. Assuming maximal entropy in this case.".format(yval))
155 | mi -= word_dict[yval]*overallentropy
156 | return mi # units already applied
157 |
158 |
159 | # Utility functions
160 | def vectorize(scalarlist):
161 | """
162 | Turn a list of scalars into a list of one-d vectors
163 | """
164 |
165 | return [(x,) for x in scalarlist]
166 |
167 |
168 | def shuffle_test(measure, x, y, z=False, ns=200, ci=0.95, **kwargs):
169 | """
170 | Shuffle test
171 | Repeatedly shuffle the x-values and then estimate measure(x,y,[z]).
172 | Returns the mean and conf. interval ('ci=0.95' default) over 'ns' runs, 'measure' could me mi,cmi,
173 | e.g. Keyword arguments can be passed. Mutual information and CMI should have a mean near zero.
174 | """
175 |
176 | xp = x[:] # A copy that we can shuffle
177 | outputs = []
178 | for i in range(ns):
179 | random.shuffle(xp)
180 | if z:
181 | outputs.append(measure(xp, y, z, **kwargs))
182 | else:
183 | outputs.append(measure(xp, y, **kwargs))
184 | outputs.sort()
185 | return np.mean(outputs), (outputs[int((1.-ci)/2*ns)], outputs[int((1.+ci)/2*ns)])
186 |
187 |
188 | # Internal functions
189 | def avgdigamma(points, dvec):
190 | # This part finds number of neighbors in some radius in the marginal space
191 | # returns expectation value of
192 | N = len(points)
193 | tree = ss.cKDTree(points)
194 | avg = 0.
195 | for i in range(N):
196 | dist = dvec[i]
197 | # subtlety, we don't include the boundary point,
198 | # but we are implicitly adding 1 to kraskov def bc center point is included
199 | num_points = len(tree.query_ball_point(points[i], dist-1e-15, p=float('inf')))
200 | avg += digamma(num_points)/N
201 | return avg
202 |
203 |
204 | def zip2(*args):
205 | # zip2(x,y) takes the lists of vectors and makes it a list of vectors in a joint space
206 | # E.g. zip2([[1],[2],[3]],[[4],[5],[6]]) = [[1,4],[2,5],[3,6]]
207 | return [sum(sublist, []) for sublist in zip(*args)]
--------------------------------------------------------------------------------
/ionosphere.csv:
--------------------------------------------------------------------------------
1 | 1,0,0.99539,-0.05889,0.85243,0.02306,0.83398,-0.37708,1,0.03760,0.85243,-0.17755,0.59755,-0.44945,0.60536,-0.38223,0.84356,-0.38542,0.58212,-0.32192,0.56971,-0.29674,0.36946,-0.47357,0.56811,-0.51171,0.41078,-0.46168,0.21266,-0.34090,0.42267,-0.54487,0.18641,-0.45300,g
2 | 1,0,1,-0.18829,0.93035,-0.36156,-0.10868,-0.93597,1,-0.04549,0.50874,-0.67743,0.34432,-0.69707,-0.51685,-0.97515,0.05499,-0.62237,0.33109,-1,-0.13151,-0.45300,-0.18056,-0.35734,-0.20332,-0.26569,-0.20468,-0.18401,-0.19040,-0.11593,-0.16626,-0.06288,-0.13738,-0.02447,b
3 | 1,0,1,-0.03365,1,0.00485,1,-0.12062,0.88965,0.01198,0.73082,0.05346,0.85443,0.00827,0.54591,0.00299,0.83775,-0.13644,0.75535,-0.08540,0.70887,-0.27502,0.43385,-0.12062,0.57528,-0.40220,0.58984,-0.22145,0.43100,-0.17365,0.60436,-0.24180,0.56045,-0.38238,g
4 | 1,0,1,-0.45161,1,1,0.71216,-1,0,0,0,0,0,0,-1,0.14516,0.54094,-0.39330,-1,-0.54467,-0.69975,1,0,0,1,0.90695,0.51613,1,1,-0.20099,0.25682,1,-0.32382,1,b
5 | 1,0,1,-0.02401,0.94140,0.06531,0.92106,-0.23255,0.77152,-0.16399,0.52798,-0.20275,0.56409,-0.00712,0.34395,-0.27457,0.52940,-0.21780,0.45107,-0.17813,0.05982,-0.35575,0.02309,-0.52879,0.03286,-0.65158,0.13290,-0.53206,0.02431,-0.62197,-0.05707,-0.59573,-0.04608,-0.65697,g
6 | 1,0,0.02337,-0.00592,-0.09924,-0.11949,-0.00763,-0.11824,0.14706,0.06637,0.03786,-0.06302,0,0,-0.04572,-0.15540,-0.00343,-0.10196,-0.11575,-0.05414,0.01838,0.03669,0.01519,0.00888,0.03513,-0.01535,-0.03240,0.09223,-0.07859,0.00732,0,0,-0.00039,0.12011,b
7 | 1,0,0.97588,-0.10602,0.94601,-0.20800,0.92806,-0.28350,0.85996,-0.27342,0.79766,-0.47929,0.78225,-0.50764,0.74628,-0.61436,0.57945,-0.68086,0.37852,-0.73641,0.36324,-0.76562,0.31898,-0.79753,0.22792,-0.81634,0.13659,-0.82510,0.04606,-0.82395,-0.04262,-0.81318,-0.13832,-0.80975,g
8 | 0,0,0,0,0,0,1,-1,0,0,-1,-1,0,0,0,0,1,1,-1,-1,0,0,0,0,1,1,1,1,0,0,1,1,0,0,b
9 | 1,0,0.96355,-0.07198,1,-0.14333,1,-0.21313,1,-0.36174,0.92570,-0.43569,0.94510,-0.40668,0.90392,-0.46381,0.98305,-0.35257,0.84537,-0.66020,0.75346,-0.60589,0.69637,-0.64225,0.85106,-0.65440,0.57577,-0.69712,0.25435,-0.63919,0.45114,-0.72779,0.38895,-0.73420,g
10 | 1,0,-0.01864,-0.08459,0,0,0,0,0.11470,-0.26810,-0.45663,-0.38172,0,0,-0.33656,0.38602,-0.37133,0.15018,0.63728,0.22115,0,0,0,0,-0.14803,-0.01326,0.20645,-0.02294,0,0,0.16595,0.24086,-0.08208,0.38065,b
11 | 1,0,1,0.06655,1,-0.18388,1,-0.27320,1,-0.43107,1,-0.41349,0.96232,-0.51874,0.90711,-0.59017,0.89230,-0.66474,0.69876,-0.70997,0.70645,-0.76320,0.63081,-0.80544,0.55867,-0.89128,0.47211,-0.86500,0.40303,-0.83675,0.30996,-0.89093,0.22995,-0.89158,g
12 | 1,0,1,-0.54210,1,-1,1,-1,1,0.36217,1,-0.41119,1,1,1,-1,1,-0.29354,1,-0.93599,1,1,1,1,1,-0.40888,1,-0.62745,1,-1,1,-1,1,-1,b
13 | 1,0,1,-0.16316,1,-0.10169,0.99999,-0.15197,1,-0.19277,0.94055,-0.35151,0.95735,-0.29785,0.93719,-0.34412,0.94486,-0.28106,0.90137,-0.43383,0.86043,-0.47308,0.82987,-0.51220,0.84080,-0.47137,0.76224,-0.58370,0.65723,-0.68794,0.68714,-0.64537,0.64727,-0.67226,g
14 | 1,0,1,-0.86701,1,0.22280,0.85492,-0.39896,1,-0.12090,1,0.35147,1,0.07772,1,-0.14767,1,-1,1,-1,0.61831,0.15803,1,0.62349,1,-0.17012,1,0.35924,1,-0.66494,1,0.88428,1,-0.18826,b
15 | 1,0,1,0.07380,1,0.03420,1,-0.05563,1,0.08764,1,0.19651,1,0.20328,1,0.12785,1,0.10561,1,0.27087,1,0.44758,1,0.41750,1,0.20033,1,0.36743,0.95603,0.48641,1,0.32492,1,0.46712,g
16 | 1,0,0.50932,-0.93996,1,0.26708,-0.03520,-1,1,-1,0.43685,-1,0,0,-1,-0.34265,-0.37681,0.03623,1,-1,0,0,0,0,-0.16253,0.92236,0.39752,0.26501,0,0,1,0.23188,0,0,b
17 | 1,0,0.99645,0.06468,1,-0.01236,0.97811,0.02498,0.96112,0.02312,0.99274,0.07808,0.89323,0.10346,0.94212,0.05269,0.88809,0.11120,0.86104,0.08631,0.81633,0.11830,0.83668,0.14442,0.81329,0.13412,0.79476,0.13638,0.79110,0.15379,0.77122,0.15930,0.70941,0.12015,g
18 | 0,0,0,0,-1,-1,1,1,-1,1,-1,1,1,-1,1,1,-1,-1,-1,1,1,-1,-1,1,-1,1,1,-1,-1,1,-1,-1,1,-1,b
19 | 1,0,0.67065,0.02528,0.66626,0.05031,0.57197,0.18761,0.08776,0.34081,0.63621,0.12131,0.62099,0.14285,0.78637,0.10976,0.58373,0.18151,0.14395,0.41224,0.53888,0.21326,0.51420,0.22625,0.48838,0.23724,0.46167,0.24618,0.43433,0.25306,0.40663,0.25792,1,0.33036,g
20 | 0,0,1,-1,0,0,0,0,1,1,1,-1,-0.71875,1,0,0,-1,1,1,1,-1,1,1,0.56250,-1,1,1,1,1,-1,1,1,1,1,b
21 | 1,0,1,-0.00612,1,-0.09834,1,-0.07649,1,-0.10605,1,-0.11073,1,-0.39489,1,-0.15616,0.92124,-0.31884,0.86473,-0.34534,0.91693,-0.44072,0.96060,-0.46866,0.81874,-0.40372,0.82681,-0.42231,0.75784,-0.38231,0.80448,-0.40575,0.74354,-0.45039,g
22 | 0,0,1,1,0,0,0,0,-1,-1,0,0,0,0,-1,-1,-1,-1,-1,1,-1,1,0,0,0,0,1,-1,-1,1,-1,1,-1,1,b
23 | 1,0,0.96071,0.07088,1,0.04296,1,0.09313,0.90169,-0.05144,0.89263,0.02580,0.83250,-0.06142,0.87534,0.09831,0.76544,0.00280,0.75206,-0.05295,0.65961,-0.07905,0.64158,-0.05929,0.55677,-0.07705,0.58051,-0.02205,0.49664,-0.01251,0.51310,-0.00015,0.52099,-0.00182,g
24 | 0,0,-1,1,0,0,0,0,-1,1,1,1,0,0,0,0,1,-1,-1,1,1,1,0,0,-1,-1,1,-1,1,1,-1,1,0,0,b
25 | 1,0,1,-0.06182,1,0.02942,1,-0.05131,1,-0.01707,1,-0.11726,0.84493,-0.05202,0.93392,-0.06598,0.69170,-0.07379,0.65731,-0.20367,0.94910,-0.31558,0.80852,-0.31654,0.84932,-0.34838,0.72529,-0.29174,0.73094,-0.38576,0.54356,-0.26284,0.64207,-0.39487,g
26 | 1,0,1,0.57820,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,-0.62796,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,b
27 | 1,0,1,-0.08714,1,-0.17263,0.86635,-0.81779,0.94817,0.61053,0.95473,-0.41382,0.88486,-0.31736,0.87937,-0.23433,0.81051,-0.62180,0.12245,-1,0.90284,0.11053,0.62357,-0.78547,0.55389,-0.82868,0.48136,-0.86583,0.40650,-0.89674,0.32984,-0.92128,-0.13341,-1,g
28 | 0,0,-1,-1,0,0,-1,1,1,-0.37500,0,0,0,0,0,0,1,-1,-1,-1,1,-1,0,0,1,-1,-1,1,-1,-1,0,0,-1,1,b
29 | 1,0,1,0.08380,1,0.17387,1,-0.13308,0.98172,0.64520,1,0.47904,1,0.59113,1,0.70758,1,0.82777,1,0.95099,1,1,0.98042,1,0.91624,1,0.83899,1,0.74822,1,0.64358,1,0.52479,1,g
30 | 0,0,-1,-1,1,1,1,-1,-1,1,1,-1,-1,-1,0,0,1,1,-1,-1,1,-1,1,-1,1,1,1,-1,1,-1,-1,1,1,-1,b
31 | 1,0,1,-0.14236,1,-0.16256,1,-0.23656,1,-0.07514,1,-0.25010,1,-0.26161,1,-0.21975,1,-0.38606,1,-0.46162,1,-0.35519,1,-0.59661,1,-0.47643,0.98820,-0.49687,1,-0.75820,1,-0.75761,1,-0.84437,g
32 | 1,0,1,-1,1,1,1,-1,1,-1,1,-1,1,-0.01840,1,-1,1,1,1,-0.85583,1,1,1,-1,0,0,1,1,1,-0.79141,1,1,1,1,b
33 | 1,0,0.88208,-0.14639,0.93408,-0.11057,0.92100,-0.16450,0.88307,-0.17036,0.88462,-0.31809,0.85269,-0.31463,0.82116,-0.35924,0.80681,-0.33632,0.75243,-0.47022,0.70555,-0.47153,0.66150,-0.50085,0.61297,-0.48086,0.56804,-0.54629,0.50179,-0.59854,0.47075,-0.57377,0.42189,-0.58086,g
34 | 1,0,0.71253,-0.02595,0.41287,-0.23067,0.98019,-0.09473,0.99709,-0.10236,1,-0.10951,0.58965,1,0.83726,-1,0.82270,-0.17863,0.80760,-0.28257,-0.25914,0.92730,0.51933,0.05456,0.65493,-0.20392,0.93124,-0.41307,0.63811,-0.21901,0.86136,-0.87354,-0.23186,-1,b
35 | 1,0,1,-0.15899,0.72314,0.27686,0.83443,-0.58388,1,-0.28207,1,-0.49863,0.79962,-0.12527,0.76837,0.14638,1,0.39337,1,0.26590,0.96354,-0.01891,0.92599,-0.91338,1,0.14803,1,-0.11582,1,-0.11129,1,0.53372,1,-0.57758,g
36 | 1,0,0.66161,-1,1,1,1,-0.67321,0.80893,-0.40446,1,-1,1,-0.89375,1,0.73393,0.17589,0.70982,1,0.78036,1,0.85268,1,-1,1,0.85357,1,-0.08571,0.95982,-0.36250,1,0.65268,1,0.34732,b
37 | 1,0,1,0.00433,1,-0.01209,1,-0.02960,1,-0.07014,0.97839,-0.06256,1,-0.06544,0.97261,-0.07917,0.92561,-0.13665,0.94184,-0.14327,0.99589,-0.14248,0.94815,-0.13565,0.89469,-0.20851,0.89067,-0.17909,0.85644,-0.18552,0.83777,-0.20101,0.83867,-0.20766,g
38 | 0,0,1,1,1,-1,0,0,0,0,-1,-1,0,0,0,0,-1,1,1,1,-1,1,-1,1,1,-1,1,1,-1,1,1,1,0,0,b
39 | 1,0,0.91241,0.04347,0.94191,0.02280,0.94705,0.05345,0.93582,0.01321,0.91911,0.06348,0.92766,0.12067,0.92048,0.06211,0.88899,0.12722,0.83744,0.14439,0.80983,0.11849,0.77041,0.14222,0.75755,0.11299,0.73550,0.13282,0.66387,0.15300,0.70925,0.10754,0.65258,0.11447,g
40 | 1,0,1,0.02461,0.99672,0.04861,0.97545,0.07143,0.61745,-1,0.91036,0.11147,0.88462,0.53640,0.82077,0.14137,0.76929,0.15189,1,0.41003,0.65850,0.16371,0.60138,0.16516,0.54446,0.16390,0.48867,0.16019,0.43481,0.15436,0.38352,0.14677,1,1,b
41 | 1,0,1,0.06538,1,0.20746,1,0.26281,0.93051,0.32213,0.86773,0.39039,0.75474,0.50082,0.79555,0.52321,0.65954,0.60756,0.57619,0.62999,0.47807,0.67135,0.40553,0.68840,0.34384,0.72082,0.27712,0.72386,0.19296,0.70682,0.11372,0.72688,0.06990,0.71444,g
42 | 1,0,-1,-1,1,1,1,-0.14375,0,0,-1,1,1,1,0.17917,-1,-1,-1,0.08750,-1,1,-1,-1,1,-1,-1,1,-1,-1,-1,1,1,0,0,b
43 | 1,0,0.90932,0.08791,0.86528,0.16888,1,0.16598,0.55187,0.68154,0.70207,0.36719,0.16286,0.42739,0.57620,0.46086,0.51067,0.49618,0.31639,0.12967,0.37824,0.54462,0.31274,0.55826,0.24856,0.56527,0.18626,0.56605,0.12635,0.56101,0.06927,0.55061,0.12137,0.67739,g
44 | 1,0,-0.64286,-1,1,0.82857,1,-1,1,-0.23393,1,0.96161,1,-0.37679,1,-1,1,0.13839,1,-1,1,-0.03393,-0.84286,1,0.53750,0.85714,1,1,1,-1,1,-1,1,-1,b
45 | 1,0,0.99025,-0.05785,0.99793,-0.13009,0.98663,-0.19430,0.99374,-0.25843,0.92738,-0.30130,0.92651,-0.37965,0.89812,-0.43796,0.84922,-0.52064,0.87433,-0.57075,0.79016,-0.59839,0.74725,-0.64615,0.68282,-0.68479,0.65247,-0.73174,0.61010,-0.75353,0.54752,-0.80278,0.49195,-0.83245,g
46 | 0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,-0.37500,-1,-1,-1,0,0,0,0,-1,-1,-1,-1,-1,1,1,0,0,0,b
47 | 1,0,1,-0.03730,1,-0.07383,0.99601,-0.11039,0.99838,-0.09931,0.98941,-0.13814,0.96674,-0.21695,0.95288,-0.25099,0.91236,-0.34400,0.90581,-0.32152,0.89991,-0.34691,0.87874,-0.37643,0.86213,-0.42990,0.83172,-0.43122,0.81433,-0.42593,0.77919,-0.47977,0.75115,-0.50152,g
48 | 1,0,0.94598,-0.02685,-1,0.26131,-0.36393,0.35639,0.69258,-0.63427,1,-0.03353,-0.29020,-0.00550,-0.54852,0.15452,0.91921,-0.46270,1,-0.50424,-0.29735,-0.31454,-0.73864,0.37361,0.83872,-0.46734,0.52208,-0.58130,1,-0.61393,-0.09634,0.20477,-0.06117,0.41913,b
49 | 1,0,0.98166,0.00874,0.98103,-0.03818,0.97565,-0.05699,0.95947,-0.06971,0.99004,-0.04507,0.94713,-0.11102,0.93369,-0.12790,0.94217,-0.11583,0.79682,-0.19200,0.88274,-0.17387,0.86257,-0.18739,0.88487,-0.19689,0.81813,-0.21136,0.78546,-0.23864,0.76911,-0.23095,0.74323,-0.23902,g
50 | 1,0,0,0,1,0.51724,0,0,0.10991,-1,0,0,0,0,-1,-0.22414,-0.55711,-0.83297,0.76940,0.63147,0,0,0.53448,0.35668,-0.90302,0.44828,1,-1,-1,0.81573,0,0,0,0,b
51 | 1,0,0.84134,-0.18362,0.43644,0.02919,0.93421,-0.00267,0.87947,0.13795,0.81121,-0.01789,0.88559,0.54991,0.91714,-0.57486,0.75000,-0.29520,0.86676,-0.20104,1,1,0.46610,-0.16290,0.90066,-0.02778,0.93358,-0.01158,0.61582,-0.32298,0.84463,-0.25706,0.93323,-0.01425,g
52 | 0,0,1,1,1,-1,0,0,0,0,1,1,1,1,-1,-1,1,-1,-1,1,0,0,1,-1,1,-1,1,1,-1,-1,0,0,0,0,b
53 | 1,0,1,1,1,1,0.91010,1,-0.26970,1,-0.83152,1,-1,1,-1,0.72526,-1,-0.57779,-1,-0.42052,-1,-1,-0.52838,-1,0.90014,-1,1,-1,1,-1,1,-0.34686,1,0.34845,g
54 | 1,0,-0.67935,-1,-1,1,1,0.63317,0.03515,-1,-1,-1,1,1,0.88683,-1,-1,1,0.83840,1,1,-1,-1,-1,-0.18856,1,1,-1,-1,-1,-1,1,1,0.33611,b
55 | 1,0,0.95659,0.08143,0.97487,-0.05667,0.97165,-0.08484,0.96097,-0.06561,0.94717,0.01279,0.95436,-0.16795,0.94612,-0.19497,0.99630,-0.32268,0.90343,-0.35902,0.91428,-0.27316,0.90140,-0.29807,0.99899,-0.40747,0.87244,-0.34586,0.92059,-0.30619,0.83951,-0.39061,0.82166,-0.41173,g
56 | 1,0,0.08333,-0.20685,-1,1,-1,1,0.71875,0.47173,-0.82143,-0.62723,-1,-1,-1,1,-0.02753,0.59152,-0.42113,-0.42113,-0.74628,-1,-1,-0.46801,-1,0.23810,1,-1,-1,-0.38914,-1,-1,-1,0.61458,b
57 | 1,0,1,-0.02259,1,-0.04494,1,-0.06682,1,-0.08799,1,0.56173,1,-0.12738,1,-0.14522,1,0.32407,1,-0.17639,0.99484,-0.18949,0.95601,-0.20081,1,-0.92284,0.87280,-0.21793,0.82920,-0.22370,0.78479,-0.22765,0.73992,-0.22981,g
58 | 0,0,-1,1,1,-1,-1,1,0,0,1,1,-1,-0.18750,1,1,-1,-1,1,-1,-1,-1,1,1,1,-1,1,1,1,1,0,0,-1,-1,b
59 | 1,0,1,0.05812,0.94525,0.07418,0.99952,0.13231,1,-0.01911,0.94846,0.07033,0.95713,0.14644,0.94862,0.11224,0.90896,0.20119,0.96741,0.16265,0.99695,0.14258,0.90784,0.16410,0.91667,0.22431,0.88423,0.23571,0.88568,0.22511,0.78324,0.29576,0.83574,0.31166,g
60 | 1,0,0.17188,-1,-1,1,0,0,0,0,-1,1,0,0,-0.61354,-0.67708,0.80521,0.36146,0.51979,0.14375,0,0,-1,-0.27083,-0.84792,0.96250,1,1,-1,0.67708,0,0,0,0,b
61 | 1,0,1,0.09771,1,0.12197,1,0.22574,0.98602,0.09237,0.94930,0.19211,0.92992,0.24288,0.89241,0.28343,0.85529,0.26721,0.83656,0.33129,0.83393,0.31698,0.74829,0.39597,0.76193,0.34658,0.68452,0.42746,0.62764,0.46031,0.56791,0.47033,0.54252,0.50903,g
62 | 1,0,0.01667,-0.35625,0,0,0,0,0,0,0,0,0,0,0.12292,-0.55000,0.22813,0.82813,1,-0.42292,0,0,0.08333,-1,-0.10625,-0.16667,1,-0.76667,-1,0.18854,0,0,1,-0.27292,b
63 | 1,0,1,0.16801,0.99352,0.16334,0.94616,0.33347,0.91759,0.22610,0.91408,0.37107,0.84250,0.46899,0.81011,0.49225,0.78473,0.48311,0.65091,0.56977,0.56553,0.58071,0.55586,0.64720,0.48311,0.55236,0.43317,0.69129,0.35684,0.76147,0.33921,0.66844,0.22101,0.78685,g
64 | 1,0,0.63816,1,0.20833,-1,1,1,0.87719,0.30921,-0.66886,1,-0.05921,0.58772,0.01754,0.05044,-0.51535,-1,0.14254,-0.03289,0.32675,-0.43860,-1,1,0.80921,-1,1,-0.06140,1,1,0.20614,-1,1,1,b
65 | 1,0,1,-0.41457,1,0.76131,0.87060,0.18593,1,-0.09925,0.93844,0.47990,0.65452,-0.16080,1,0.00879,0.97613,-0.50126,0.80025,-0.24497,0.88065,-0.19095,1,-0.12312,0.93593,0.10678,0.92890,-0.07249,1,-0.27387,0.43970,0.19849,0.51382,-0.05402,g
66 | 1,0,0.84783,0.10598,1,0.39130,1,-1,0.66938,0.08424,1,0.27038,1,0.60598,1,0.35507,1,0.02672,0.58424,-0.43025,1,0.63496,0.89130,0.26585,0.91033,-0.33333,1,0.15942,0.37681,-0.01947,1,0.22464,1,0.37409,b
67 | 1,0,1,0.28046,1,0.02477,1,0.07764,1,0.04317,0.98762,0.33266,1,0.05489,1,0.04384,0.95750,-0.24598,0.84371,-0.08668,1,0.04150,0.99933,0.27376,1,-0.39056,0.96414,-0.02174,0.86747,0.23360,0.94578,-0.22021,0.80355,-0.07329,g
68 | 0,0,1,-1,1,-1,1,-1,1,-1,1,1,1,1,1,-1,1,1,1,1,1,1,1,-1,1,-1,1,-1,1,0.65625,0,0,1,-1,b
69 | 1,0,1,0.67784,0.81309,0.82021,0.43019,1,0.20619,0.80541,-0.43872,1,-0.79135,0.77092,-1,0.40268,-0.39046,-0.58634,-0.97907,-0.42822,-0.73083,-0.76339,-0.37671,-0.97491,0.41366,-1,0.41778,-0.93296,0.25773,-1,0.93570,-0.35222,0.98816,0.03446,g
70 | 1,0,1,1,1,-1,1,-1,1,1,1,1,1,1,1,-1,1,1,1,1,1,1,1,1,1,1,1,0.5,0,0,1,-1,1,-1,b
71 | 1,0,1,0.03529,1,0.18281,1,0.26968,1,0.25068,1,0.28778,1,0.38643,1,0.31674,1,0.65701,1,0.53846,1,0.61267,1,0.59457,0.89593,0.68326,0.89502,0.71374,0.85611,0.67149,0.74389,0.85611,0.71493,0.75837,g
72 | 0,0,1,-1,1,1,-1,-1,1,-1,0,0,0,0,-1,1,1,-1,1,-1,-0.75000,1,1,-1,1,-1,1,-1,-1,-1,0,0,1,-1,b
73 | 1,0,0.96087,0.08620,0.96760,0.19279,0.96026,0.27451,0.98044,0.35052,0.92867,0.46281,0.86265,0.52517,0.82820,0.58794,0.73242,0.69065,0.69003,0.73140,0.54473,0.68820,0.48339,0.76197,0.40615,0.74689,0.33401,0.83796,0.24944,0.86061,0.13756,0.86835,0.09048,0.86285,g
74 | 1,0,0.69444,0.38889,0,0,-0.32937,0.69841,0,0,0,0,0,0,0.20635,-0.24206,0.21032,0.19444,0.46429,0.78175,0,0,0,0,0.73413,0.27381,0.76190,0.63492,0,0,0,0,0,0,b
75 | 1,0,1,0.05070,1,0.10827,1,0.19498,1,0.28453,1,0.34826,1,0.38261,0.94575,0.42881,0.89126,0.50391,0.75906,0.58801,0.80644,0.59962,0.79578,0.62758,0.66643,0.63942,0.59417,0.69435,0.49538,0.72684,0.47027,0.71689,0.33381,0.75243,g
76 | 0,0,1,1,0,0,1,-1,1,-1,1,1,1,1,1,-1,1,1,1,1,1,-1,-1,-1,1,-1,1,-1,1,1,0,0,1,-1,b
77 | 1,0,1,0.04078,1,0.11982,1,0.16159,1,0.27921,0.98703,0.30889,0.92745,0.37639,0.91118,0.39749,0.81939,0.46059,0.78619,0.46994,0.79400,0.56282,0.70331,0.58129,0.67077,0.59723,0.58903,0.60990,0.53952,0.60932,0.45312,0.63636,0.40442,0.62658,g
78 | 0,0,1,1,1,-1,1,1,1,1,1,1,1,1,1,1,1,-1,-1,1,-1,1,-1,1,1,-1,1,1,-1,1,-1,-1,-1,1,b
79 | 1,0,1,0.24168,1,0.48590,1,0.72973,1,1,1,1,1,1,1,0.77128,1,1,1,1,0.74468,1,0.89647,1,0.64628,1,0.38255,1,0.10819,1,-0.17370,1,-0.81383,1,g
80 | 0,0,1,1,1,-1,1,1,-1,1,0,0,1,1,0,0,0,0,-1,1,-1,1,1,1,1,-1,1,1,1,1,1,-1,-1,1,b
81 | 1,0,1,-0.06604,1,0.62937,1,0.09557,1,0.20280,1,-1,1,-0.40559,1,-0.15851,1,0.04895,1,-0.61538,1,-0.26573,1,-1,1,-0.58042,1,-0.81372,1,-1,1,-0.78555,1,-0.48252,g
82 | 0,0,1,-1,1,1,1,1,1,1,1,1,1,-1,1,-1,1,1,1,-1,1,1,1,1,1,-1,1,1,1,-1,1,1,1,-1,b
83 | 1,0,0.92277,0.07804,0.92679,0.16251,0.89702,0.24618,0.84111,0.35197,0.78801,0.42196,0.70716,0.46983,0.70796,0.56476,0.60459,0.64200,0.51247,0.64924,0.39903,0.66975,0.34232,0.68343,0.23693,0.76146,0.18765,0.73885,0.09694,0.71038,0.02735,0.77072,-0.04023,0.69509,g
84 | 1,0,0.68198,-0.17314,0.82332,0.21908,0.46643,0.32862,0.25795,0.58304,1,-0.15194,0.01060,0.44523,0.01060,0.38869,0.18681,0.41168,0.10567,0.36353,0.04325,0.30745,-0.00083,0.24936,-0.02862,0.19405,-0.04314,0.14481,-0.04779,0.10349,-0.04585,0.07064,-0.04013,0.04586,b
85 | 1,0,0.74852,-0.02811,0.65680,-0.05178,0.80621,0.02811,0.85947,0.02515,0.63462,0.08728,0.71598,0.07840,0.73077,0.05178,0.78550,-0.27811,0.65976,-0.01479,0.78698,0.06953,0.34615,-0.18639,0.65385,0.02811,0.61009,-0.06637,0.53550,-0.21154,0.59024,-0.14053,0.56361,0.02959,g
86 | 1,0,0.39179,-0.06343,0.97464,0.04328,1,1,0.35821,0.15299,0.54478,0.13060,0.61567,-0.82090,0.57836,0.67910,0.66791,-0.10448,0.46642,-0.11567,0.65574,0.14792,0.83209,0.45522,0.47015,0.16418,0.49309,0.14630,0.32463,-0.02612,0.39118,0.13521,0.34411,0.12755,b
87 | 1,0,0.67547,0.04528,0.76981,-0.10566,0.77358,0.03774,0.66038,-0.04528,0.64528,0.01132,0.66792,-0.13962,0.72075,-0.02264,0.76981,0.08679,0.61887,-0.07925,0.75849,-0.23774,0.73962,-0.14717,0.84906,-0.15094,0.73886,-0.05801,0.66792,0.02264,0.86415,0.03774,0.73208,0.00755,g
88 | 1,0,0.72727,-0.05000,0.89241,0.03462,1,0.72727,0.66364,-0.05909,0.48182,-0.16818,0.81809,0.09559,0.56818,1,0.50455,0.21818,0.66818,0.10000,1,-0.30000,0.98636,-1,0.57273,0.32727,0.56982,0.14673,0.42273,0.08182,0.48927,0.14643,1,1,b
89 | 1,0,0.57647,-0.01569,0.40392,0,0.38431,0.12941,0.40000,-0.05882,0.56471,0.14118,0.46667,0.08235,0.52549,-0.05490,0.58039,0.01569,0.50196,0,0.45882,0.06667,0.58039,0.08235,0.49804,0.00392,0.48601,0.10039,0.46275,0.08235,0.45098,0.23529,0.43137,0.17255,g
90 | 1,0,0.41932,0.12482,0.35000,0.12500,0.23182,0.27955,-0.03636,0.44318,0.04517,0.36194,-0.19091,0.33636,-0.13350,0.27322,0.02727,0.40455,-0.34773,0.12727,-0.20028,0.05078,-0.18636,0.36364,-0.14003,-0.04802,-0.09971,-0.07114,-1,-1,-0.02916,-0.07464,-0.00526,-0.06314,b
91 | 1,0,0.88305,-0.21996,1,0.36373,0.82403,0.19206,0.85086,0.05901,0.90558,-0.04292,0.85193,0.25000,0.77897,0.25322,0.69206,0.57940,0.71030,0.39056,0.73176,0.27575,1,0.34871,0.56760,0.52039,0.69811,0.53235,0.80901,0.58584,0.43026,0.70923,0.52361,0.54185,g
92 | 1,0,0.84557,-0.08580,-0.31745,-0.80553,-0.08961,-0.56435,0.80648,0.04576,0.89514,-0.00763,-0.18494,0.63966,-0.20019,-0.68065,0.85701,-0.11344,0.77979,-0.15729,-0.06959,0.50810,-0.34128,0.80934,0.78932,-0.03718,0.70882,-0.25288,0.77884,-0.14109,-0.21354,-0.78170,-0.18494,-0.59867,b
93 | 1,0,0.70870,-0.24783,0.64348,0.04348,0.45217,0.38261,0.65217,0.18261,0.5,0.26957,0.57826,-0.23043,0.50435,0.37826,0.38696,-0.42609,0.36087,-0.26087,0.26957,0.11739,0.53246,-0.03845,0.31304,-0.12174,0.49930,-0.04264,0.48348,-0.04448,0.64348,-0.25217,0.50435,0.14783,g
94 | 1,0,-0.54180,0.14861,-0.33746,0.73375,0.52012,-0.13932,0.31889,-0.06811,0.20743,-0.15170,0.47368,0.08978,0.56347,-0.15480,0.16409,0.45201,0.33746,0.03406,0.50464,0.07121,-0.63777,-0.61610,1,0.65635,0.41348,-0.40116,-0.15170,0.11146,0.02399,0.55820,0.52632,-0.08978,b
95 | 1,0,0.29202,0.13582,0.45331,0.16808,0.51783,-0.00509,0.52632,0.20883,0.52462,-0.16638,0.47368,-0.04754,0.55518,0.03905,0.81664,-0.22411,0.42445,-0.04244,0.34975,0.06621,0.28183,-0.20883,0.51731,-0.03176,0.50369,-0.03351,0.34635,0.09847,0.70798,-0.01868,0.39559,-0.03226,g
96 | 1,0,0.79157,0.16851,0,0,0.56541,0.06874,0.39468,1,0.38359,0.99557,-0.02439,0.53215,0.23725,0.12860,-0.02661,0.95122,-0.50998,0.84922,-0.10200,0.38803,-0.42572,0.23725,-0.91574,0.80710,-0.34146,0.88248,-1,0.69401,-1,0.12860,0,0,b
97 | 1,0,0.90116,0.16607,0.79299,0.37379,0.72990,0.50515,0.59784,0.72997,0.44303,0.81152,0.24412,0.87493,0.06438,0.85038,-0.12611,0.87396,-0.28739,0.79617,-0.46635,0.65924,-0.57135,0.53805,-0.68159,0.39951,-0.71844,0.25835,-0.72369,0.11218,-0.71475,-0.05525,-0.67699,-0.19904,g
98 | 1,0,0.97714,0.19049,0.82683,0.46259,0.71771,0.58732,0.47968,0.84278,0.31409,0.92643,0.10289,0.93945,-0.13254,0.84290,-0.32020,0.91624,-0.52145,0.79525,-0.68274,0.49508,-0.77408,0.33537,-0.85376,0.17849,-0.83314,-0.01358,-0.82366,-0.19321,-0.67289,-0.33662,-0.59943,-0.49700,g
99 | 1,0,-1,-1,0,0,0.50814,-0.78502,0.60586,0.32899,-1,-0.41368,0,0,0,0,1,-0.26710,0.36482,-0.63518,0.97068,-1,-1,-1,1,-0.59609,-1,-1,-1,-1,1,-1,0,0,b
100 | 1,0,0.74084,0.04974,0.79074,0.02543,0.78575,0.03793,0.66230,0.09948,0.67801,0.31152,0.75934,0.07348,0.74695,0.08442,0.70681,-0.07853,0.63613,0,0.70021,0.11355,0.68183,0.12185,0.67016,0.15445,0.64158,0.13608,0.65707,0.17539,0.59759,0.14697,0.57455,0.15114,g
101 | 1,0,1,-1,0,0,0.77941,-0.99265,0.80882,0.55147,-0.41912,-0.94853,0,0,0,0,0.72059,-0.77206,0.73529,-0.60294,0,0,0.18382,-1,-1,-1,-1,-1,1,-1,1,-1,0,0,b
102 | 1,0,1,0.01709,0.96215,-0.03142,1,-0.03436,1,-0.05071,0.99026,-0.07092,0.99173,-0.09002,1,-0.15727,1,-0.14257,0.98310,-0.11813,1,-0.18519,1,-0.19272,0.98971,-0.22083,0.96490,-0.20243,0.94599,-0.17123,0.96436,-0.22561,0.87011,-0.23296,g
103 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,-1,0,0,0,0,0,0,b
104 | 1,0,0.95704,-0.12095,0.63318,-0.12690,0.96365,-0.18242,0.97026,0.08460,0.92003,-0.01124,0.83543,-0.24719,1,-0.31395,0.99273,-0.21216,0.98678,-0.21018,1,-0.27165,0.93126,-0.39458,1,-0.19233,0.88793,-0.31565,0.81428,-0.23728,0.89095,-0.31857,0.69531,-0.41573,g
105 | 1,0,0.28409,-0.31818,0,0,0.68182,-1,0.30682,0.95833,0.64394,0.06439,0.34848,-0.84848,0,0,0.59091,-0.35985,0.45076,-0.80682,0,0,0,0,0.24242,0.17803,1,-0.23864,0.06061,-0.48485,0.16288,-0.70076,0,0,b
106 | 1,0,0.94490,-0.49311,1,-0.03692,0.98898,-0.87052,0.90083,0.66942,1,-0.10104,1,-0.12493,1,-0.15017,1,-0.17681,1,-0.20491,1,-0.23452,1,-0.26571,1,-0.29852,1,-0.33304,1,-0.36931,1,-0.40740,1,-0.44739,g
107 | 1,0,0,0,0,0,0,0,0,0,0.62195,1,0,0,0,0,0.36585,-0.71951,0.56098,-1,0,0,0,0,0,0,1,0.10976,0,0,0,0,0,0,b
108 | 1,0,0.99449,0.00526,0.84082,-0.11313,0.88237,-0.16431,0.99061,-0.06257,0.96484,-0.07496,0.85221,0.02966,0.87161,-0.20848,0.93881,-0.12977,0.98298,-0.08935,0.89876,0.00075,0.87836,-0.05882,0.93368,-0.19872,0.87579,-0.17806,0.94294,-0.16581,0.80253,-0.25741,0.76586,-0.27794,g
109 | 1,0,0.10135,0.10811,0,0,0,0,0.54730,0.82432,0.31081,1,0,0,0,0,0.37162,-1,0.33108,-1,0,0,0,0,-0.42568,-1,1,-1,0.55405,-0.23649,0,0,0,0,b
110 | 1,0,1,-0.57224,0.99150,-0.73371,0.89518,-0.97450,1,-0.35818,1,-0.23229,0.62890,-0.86402,1,-0.57535,1,-0.79603,0.76771,-0.88952,0.96601,-1,0.70120,-0.74896,0.61946,-0.76904,0.53777,-0.77986,0.81020,-1,1,-1,0.30445,-0.76112,g
111 | 1,0,0.65909,-0.62879,0,0,0,0,0.77273,1,1,-0.28030,0,0,0,0,0.62121,-0.22727,0.84091,-1,1,-1,0,0,0,0,1,-0.93939,-0.12879,-0.93182,0,0,0,0,b
112 | 1,0,0.86284,0.19310,0.80920,0.41149,0.67203,0.55785,0.54559,0.69962,0.36705,0.81533,0.19617,0.85671,-0.04061,0.86284,-0.17241,0.75785,-0.34100,0.65747,-0.48199,0.56092,-0.60230,0.40996,-0.59234,0.25747,-0.63038,0.08818,-0.57241,-0.07816,-0.54866,-0.19923,-0.42912,-0.31954,g
113 | 1,0,0.42000,-0.61000,0,0,1,-1,0.90000,1,0.43000,0.64000,0,0,0,0,0.67000,-0.29000,0.84000,-1,0,0,0,0,0.21000,0.68000,1,0.22000,0,0,0,0,0,0,b
114 | 1,0,1,0.23395,0.91404,0.52013,0.78020,0.72144,0.47660,0.84222,0.27639,0.91730,0.09467,0.88248,-0.21980,0.91404,-0.34168,0.75517,-0.51360,0.64527,-0.64527,0.44614,-0.74102,0.29162,-0.70838,0.03591,-0.71731,-0.11943,-0.64962,-0.28183,-0.51251,-0.44505,-0.37432,-0.53319,g
115 | 1,0,0.91353,0.81586,-0.72973,1,-0.39466,0.55735,0.05405,0.29730,-0.18599,-0.10241,-0.03158,-0.08970,0.01401,-0.03403,0.01108,-0.00537,0.00342,0.00097,0.00048,0.00075,-0.00003,0.00019,-0.00003,0.00002,-0.00001,0,0,0,0,0,0,0,b
116 | 1,0,0.21429,-0.09524,0.33333,0.07143,0.19048,0.19048,0.23810,0.09524,0.40476,0.02381,0.30952,-0.04762,0.30952,-0.04762,0.28571,-0.11905,0.33333,0.04762,0.30952,0,0.21429,-0.11905,0.35714,-0.04762,0.22109,-0.02290,0.19048,0,0.16997,-0.02034,0.14694,-0.01877,g
117 | 1,0,1,-0.14754,1,0.04918,0.57377,-0.01639,0.65574,0.01639,0.85246,-0.03279,0.72131,0,0.68852,-0.16393,0.19672,-0.14754,0.65558,-0.17176,0.67213,0.03279,1,-0.29508,0.31148,-0.34426,0.52385,-0.20325,0.32787,-0.03279,0.27869,-0.44262,0.49180,-0.06557,b
118 | 1,0,0.98182,0,0.88627,0.03131,0.86249,0.04572,0.80000,0,0.69091,0.04545,0.79343,0.08436,0.77118,0.09579,0.62727,0.25455,0.68182,0.12727,0.70674,0.12608,0.68604,0.13493,0.74545,0.22727,0.64581,0.15088,0.67273,0.02727,0.60715,0.16465,0.58840,0.17077,g
119 | 1,0,0.39286,0.52381,-0.78824,0.11342,-0.16628,-0.76378,0.66667,0.01190,0.82143,0.40476,-0.67230,0.30729,-0.34797,-0.63668,0.46429,0.15476,0.54762,0.05952,-0.51830,0.44961,-0.47651,-0.47594,0.32143,0.70238,0.51971,0.38848,0.57143,0.39286,-0.54891,-0.29915,0.25441,-0.55837,b
120 | 1,0,0.86889,-0.07111,1,-0.02494,1,-0.06889,0.87778,0.00222,0.83556,-0.06444,1,-0.07287,1,-0.20000,0.86889,0.05333,0.88000,-0.03778,1,-0.11526,1,-0.18667,0.84444,0.03556,1,-0.14162,0.82222,-0.14667,1,-0.15609,1,-0.44222,g
121 | 1,0,0.43636,-0.12727,0.58182,-0.14545,0.18182,-0.67273,0.34545,-0.03636,0.29091,-0.05455,0.29091,0.29091,0.36364,-0.41818,0.20000,-0.01818,0.36364,0.05455,0.12727,0.49091,0.61818,0.16364,0.32727,0.16364,0.41098,-0.07027,0.34545,-0.05455,0.12727,-0.36364,0.29091,-0.29091,b
122 | 1,0,1,-0.92453,1,0.75472,0.49057,-0.05660,0.62264,0,1,-0.00054,0.45283,0.07547,0.62264,-0.05660,0.98878,-0.00085,0.52830,0,0.52830,0.07547,0.95190,-0.00112,1,0.79245,0.92192,-0.00128,0.94340,-1,1,0.43396,0.43396,-0.11321,g
123 | 1,0,0.73810,0.83333,-0.76190,-0.23810,0.33333,-0.14286,0.45238,-0.14286,-0.67285,0.12808,0.33333,0,0.28571,-0.07143,-0.38214,0.51163,0.23810,0.02381,0.45238,0.04762,0.16667,-0.26190,-0.57255,-0.10234,0.24889,-0.51079,1,0,-0.66667,-0.04762,0.26190,0.02381,b
124 | 1,0,0.43750,0.04167,0.58333,-0.10417,0.39583,0,0.33333,-0.06250,0.47917,0,0.29167,0.10417,0.54167,0.02083,0.43750,-0.22917,0.35417,-0.22917,0.33333,0.08333,0.25000,0.18750,0.39583,-0.18750,0.44012,-0.10064,0.41667,-0.08333,0.58333,-0.31250,0.33333,-0.06250,g
125 | 1,0,1,1,0,0,0,0,0,0,0.47744,-0.89098,-0.51504,0.45489,-0.95489,0.28571,0.64662,1,0,0,0,0,0.62030,0.20301,-1,-1,1,-1,1,1,0,0,0,0,b
126 | 1,0,0.95217,0.06595,0.93614,0.13030,0.90996,0.19152,0.84881,-0.49962,0.90023,0.61320,0.77937,0.34328,0.72254,0.37988,0.66145,0.40844,0.95472,0.59862,0.53258,0.44088,0.46773,0.44511,0.40440,0.44199,0.34374,0.43221,0.90330,1,0.23405,0.39620,0.18632,0.37191,g
127 | 1,0,0.59840,0.40332,0.82809,0.80521,0.76001,0.70709,0.84010,-0.10984,0.97311,0.07981,0.95824,-0.85727,0.91962,0.88444,0.95452,-0.05206,0.88673,0.18135,0.98484,-0.69594,0.86670,-0.85755,0.28604,-0.30063,1,0.17076,0.62958,0.42677,0.87757,0.81007,0.81979,0.68822,b
128 | 1,0,0.95882,0.10129,1,-0.01918,0.98313,0.02555,0.96974,-0.09316,0.98955,-0.02716,0.97980,-0.03096,1,-0.05343,1,-0.05179,0.93840,0.01557,0.97620,-0.09284,0.97889,-0.05318,0.91567,-0.15675,0.95677,-0.06995,0.90978,0.01307,1,-0.10797,0.93144,-0.06888,g
129 | 1,0,0,0,-0.33672,0.85388,0,0,0.68869,-1,0.97078,0.31385,-0.26048,-0.59212,-0.30241,0.65565,0.94155,0.16391,0,0,0,0,-0.18043,-1,0,0,1,-1,0,0,0.04447,0.61881,0,0,b
130 | 1,0,0.96933,0.00876,1,0.00843,0.98658,-0.00763,0.97868,-0.02844,0.99820,-0.03510,1,-0.01271,1,-0.02581,1,-0.01175,0.98485,0.00025,1,-0.02612,1,-0.04744,0.96019,-0.04527,0.99188,-0.03473,0.97020,-0.02478,1,-0.03855,0.98420,-0.04112,g
131 | 1,0,0,0,0.98919,-0.22703,0.18919,-0.05405,0,0,0.93243,0.07297,1,-0.20000,1,0.07027,1,-0.11351,0,0,1,-0.21081,1,-0.41622,0,0,1,-0.17568,0,0,1,-0.25946,0.28919,-0.15676,b
132 | 1,0,0.64122,0.01403,0.34146,-0.02439,0.52751,0.03466,0.19512,0.12195,0.43313,0.04755,0.21951,0.04878,0.29268,0,0.36585,0,0.31707,0.07317,0.26829,0.12195,0.23698,0.05813,0.21951,0.09756,0.19304,0.05641,0.17410,0.05504,0.19512,0,0.17073,0.07317,g
133 | 1,0,1,1,1,-1,0,0,0,0,1,1,1,-1,1,1,1,-1,0,0,0,0,1,-0.27778,0,0,1,-1,1,1,1,-1,0,0,b
134 | 1,0,0.34694,0.20408,0.46939,0.24490,0.40816,0.20408,0.46939,0.44898,0.30612,0.59184,0.12245,0.55102,0,0.51020,-0.06122,0.55102,-0.20408,0.55102,-0.28571,0.44898,-0.28571,0.32653,-0.61224,0.22449,-0.46579,0.14895,-0.59184,0.18367,-0.34694,0,-0.26531,-0.24490,g
135 | 1,0,0,0,1,-1,0,0,0,0,1,1,1,-0.25342,1,0.23288,1,-1,0,0,0,0,1,1,0,0,1,-1,0,0,1,-1,0,0,b
136 | 1,0,0.89706,0.38235,0.91176,0.37500,0.74265,0.67647,0.45588,0.77941,0.19118,0.88971,-0.02206,0.86029,-0.20588,0.82353,-0.37500,0.67647,-0.5,0.47794,-0.73529,0.38235,-0.86029,0.08824,-0.74265,-0.12500,-0.67925,-0.24131,-0.55147,-0.42647,-0.44118,-0.50735,-0.28676,-0.56618,g
137 | 1,0,-1,0.28105,0.22222,0.15033,-0.75693,-0.70984,-0.30719,0.71242,-1,1,-0.81699,0.33987,-0.79085,-0.02614,-0.98039,-0.83007,-0.60131,-0.54248,-0.04575,-0.83007,0.94118,-0.94118,-1,-0.43137,0.74385,0.09176,-1,0.05229,0.18301,0.02614,-0.40201,-0.48241,b
138 | 1,0,0.26667,-0.10000,0.53333,0,0.33333,-0.13333,0.36667,0.11667,0.56667,0.01667,0.71667,0.08333,0.70000,-0.06667,0.53333,0.20000,0.41667,-0.01667,0.31667,0.20000,0.70000,0,0.25000,0.13333,0.46214,0.05439,0.40000,0.03333,0.46667,0.03333,0.41667,-0.05000,g
139 | 1,0,-0.26667,0.40000,-0.27303,0.12159,-0.17778,-0.04444,0.06192,-0.06879,0.04461,0.02575,-0.00885,0.02726,-0.01586,-0.00166,-0.00093,-0.00883,0.00470,-0.00153,0.00138,0.00238,-0.00114,0.00102,-0.00069,-0.00050,0.00019,-0.00043,0.00026,0.00005,0,0.00015,-0.00008,0.00002,b
140 | 1,0,1,-0.37838,0.64865,0.29730,0.64865,-0.24324,0.86486,0.18919,1,-0.27027,0.51351,0,0.62162,-0.05405,0.32432,-0.21622,0.71833,-0.17666,0.62162,0.05405,0.75676,0.13514,0.35135,-0.29730,0.61031,-0.22163,0.58478,-0.23027,0.72973,-0.59459,0.51351,-0.24324,g
141 | 1,0,0.94531,-0.03516,-1,-0.33203,-1,-0.01563,0.97266,0.01172,0.93359,-0.01953,-1,0.16406,-1,-0.00391,0.95313,-0.03516,0.92188,-0.02734,-0.99219,0.11719,-0.93359,0.34766,0.95703,-0.00391,0.82041,0.13758,0.90234,-0.06641,-1,-0.18750,-1,-0.34375,b
142 | 1,0,0.95202,0.02254,0.93757,-0.01272,0.93526,0.01214,0.96705,-0.01734,0.96936,0.00520,0.95665,-0.03064,0.95260,-0.00405,0.99480,-0.02659,0.99769,0.01792,0.93584,-0.04971,0.93815,-0.02370,0.97052,-0.04451,0.96215,-0.01647,0.97399,0.01908,0.95434,-0.03410,0.95838,0.00809,g
143 | 1,0,1,-0.05529,1,-1,0.5,-0.11111,0.36111,-0.22222,1,-0.25712,0.16667,-0.11111,1,-0.34660,1,-0.38853,1,-0.42862,0,-0.25000,1,-0.50333,1,-0.27778,1,-0.57092,1,-0.27778,1,-0.63156,1,-0.65935,b
144 | 1,0,0.31034,-0.10345,0.24138,-0.10345,0.20690,-0.06897,0.07405,-0.05431,0.03649,-0.03689,0.01707,-0.02383,0.00741,-0.01482,0.00281,-0.00893,0.00078,-0.00523,-0.00003,-0.00299,-0.00028,-0.00166,-0.00031,-0.00090,-0.00025,-0.00048,-0.00018,-0.00024,-0.00012,-0.00012,-0.00008,-0.00006,g
145 | 1,0,0.62745,-0.07843,0.72549,0,0.60784,-0.07843,0.62745,-0.11765,0.68627,-0.11765,0.66667,-0.13725,0.64706,-0.09804,0.54902,-0.11765,0.54902,-0.21569,0.58824,-0.19608,0.66667,-0.23529,0.45098,-0.25490,0.52409,-0.24668,0.56863,-0.31373,0.43137,-0.21569,0.47059,-0.27451,b
146 | 1,0,0.25000,0.16667,0.46667,0.26667,0.19036,0.23966,0.07766,0.19939,0.01070,0.14922,-0.02367,0.10188,-0.03685,0.06317,-0.03766,0.03458,-0.03230,0.01532,-0.02474,0.00357,-0.01726,-0.00273,-0.01097,-0.00539,-0.00621,-0.00586,-0.00294,-0.00520,-0.00089,-0.00408,0.00025,-0.00291,g
147 | 1,0,-0.65625,0.15625,0.06250,0,0,0.06250,0.62500,0.06250,0.18750,0,-0.03125,0.09375,0.06250,0,0.15625,-0.15625,0.43750,-0.37500,0,-0.09375,0,0,0.03125,-0.46875,0.03125,0,-0.71875,0.03125,-0.03125,0,0,0.09375,b
148 | 1,0,1,-0.01081,1,-0.02703,1,-0.06486,0.95135,-0.01622,0.98919,-0.03243,0.98919,0.08649,1,-0.06486,0.95135,0.09189,0.97838,-0.00541,1,0.06486,1,0.04324,0.97838,0.09189,0.98556,0.01251,1,-0.03243,1,0.02703,1,-0.07027,g
149 | 1,0,0.85271,0.05426,1,0.08069,1,1,0.91473,-0.00775,0.83721,0.03876,1,0.27153,1,1,0.81395,0.04651,0.90698,0.11628,1,0.50670,1,-1,0.80620,0.03876,1,0.71613,0.84496,0.06977,1,0.87317,1,1,b
150 | 1,0,0.90374,-0.01604,1,0.08021,1,0.01604,0.93048,0.00535,0.93583,-0.01604,1,0,1,0.06417,1,0.04813,0.91444,0.04278,0.96791,0.02139,0.98930,-0.01604,0.96257,0.05348,0.96974,0.04452,0.87701,0.01070,1,0.09091,0.97861,0.06417,g
151 | 1,0,-0.20500,0.28750,0.23000,0.10000,0.28250,0.31750,0.32250,0.35000,0.36285,-0.34617,0.09250,0.27500,-0.09500,0.21000,-0.08750,0.23500,-0.34187,0.31408,-0.48000,-0.08000,0.29908,0.33176,-0.58000,-0.24000,0.32190,-0.28475,-0.47000,0.18500,-0.27104,-0.31228,0.40445,0.03050,b
152 | 1,0,0.60000,0.03333,0.63333,0.06667,0.70000,0.06667,0.70000,0,0.63333,0,0.80000,0,0.73333,0,0.70000,0.10000,0.66667,0.10000,0.73333,-0.03333,0.76667,0,0.63333,0.13333,0.65932,0.10168,0.60000,0.13333,0.60000,0.16667,0.63333,0.16667,g
153 | 1,0,0.05866,-0.00838,0.06704,0.00838,0,-0.01117,0.00559,-0.03911,0.01676,-0.07542,-0.00559,0.05307,0.06425,-0.03352,0,0.09497,-0.06425,0.07542,-0.04749,0.02514,0.02793,-0.00559,0.00838,0.00559,0.10335,-0.00838,0.03073,-0.00279,0.04469,0,0.04749,-0.03352,b
154 | 1,0,0.94653,0.28713,0.72554,0.67248,0.47564,0.82455,0.01267,0.89109,-0.24871,0.84475,-0.47644,0.56079,-0.75881,0.41743,-0.66455,0.07208,-0.65426,-0.19525,-0.52475,-0.44000,-0.30851,-0.55089,-0.04119,-0.64792,0.16085,-0.56420,0.36752,-0.41901,0.46059,-0.22535,0.50376,-0.05980,g
155 | 1,0,0.05460,0.01437,-0.02586,0.04598,0.01437,0.04598,-0.07759,0.00862,0.01724,-0.06609,-0.03736,0.04310,-0.08333,-0.04598,-0.09483,0.08046,-0.04023,0.05172,0.02011,0.02299,-0.03736,-0.01149,0.03161,-0.00862,0.00862,0.01724,0.02586,0.01149,0.02586,0.01149,-0.04598,-0.00575,b
156 | 1,0,0.72414,-0.01084,0.79704,0.01084,0.80000,0.00197,0.79015,0.01084,0.78424,-0.00985,0.83350,0.03251,0.85123,0.01675,0.80099,-0.00788,0.79113,-0.02956,0.75961,0.03350,0.74778,0.05517,0.72611,-0.01478,0.78041,0.00612,0.74089,-0.05025,0.82956,0.02956,0.79015,0.00788,g
157 | 1,0,0.03852,0.02568,0.00428,0,0.01997,-0.01997,0.02140,-0.04993,-0.04850,-0.01284,0.01427,-0.02282,0,-0.03281,-0.04708,-0.02853,-0.01712,0.03566,0.02140,0.00428,0.05136,-0.02282,0.05136,0.01854,0.03994,0.01569,0.01997,0.00713,-0.02568,-0.01854,-0.01427,0.01997,b
158 | 1,0,0.47090,0.22751,0.42328,0.33598,0.25661,0.47619,0.01852,0.49471,-0.02116,0.53968,-0.34127,0.31217,-0.41270,0.32540,-0.51587,0.06878,-0.5,-0.11640,-0.14815,-0.14550,-0.14815,-0.38095,-0.23280,0.00265,0.03574,-0.31739,0.15873,-0.21693,0.24868,-0.24339,0.26720,0.04233,g
159 | 1,0,0.08696,0.00686,0.13959,-0.04119,0.10526,-0.08238,0.12586,-0.06178,0.23341,-0.01144,0.12357,0.07780,0.14645,-0.13501,0.29062,-0.04805,0.18993,0.07323,0.11670,0,0.11213,-0.00229,0.15103,-0.10297,0.08467,0.01373,0.11213,-0.06636,0.09611,-0.07323,0.11670,-0.06865,b
160 | 1,0,0.94333,0.38574,0.48263,0.64534,0.21572,0.77514,-0.55941,0.64899,-0.73675,0.42048,-0.76051,0,-0.62706,-0.31079,-0.38391,-0.62157,-0.12797,-0.69287,0.49909,-0.63620,0.71481,-0.37660,0.73857,-0.05484,0.60098,0.30384,0.45521,0.60512,0.02742,0.54479,-0.21572,0.50457,g
161 | 1,0,0.01975,0.00705,0.04090,-0.00846,0.02116,0.01128,0.01128,0.04372,0.00282,0.00141,0.01975,-0.03103,-0.01975,0.06065,-0.04090,0.02680,-0.02398,-0.00423,0.04372,-0.02539,0.01834,0,0,-0.01269,0.01834,-0.01128,0.00564,-0.01551,-0.01693,-0.02398,0.00705,0,b
162 | 1,0,0.85736,0.00075,0.81927,-0.05676,0.77521,-0.04182,0.84317,0.09037,0.86258,0.11949,0.88051,-0.06124,0.78342,0.03510,0.83719,-0.06796,0.83570,-0.14190,0.88125,0.01195,0.90515,0.02240,0.79686,-0.01942,0.82383,-0.03678,0.88125,-0.06423,0.73936,-0.01942,0.79089,-0.09186,g
163 | 1,0,1,-1,1,1,-1,1,1,-1,1,-1,-1,-1,-1,1,1,1,1,1,-1,1,1,-1,1,-1,1,1,1,1,-1,1,-1,1,b
164 | 1,0,0.85209,0.39252,0.38887,0.76432,0.08858,0.98903,-0.42625,0.88744,-0.76229,0.49980,-0.93092,0.10768,-0.85900,-0.31044,-0.66030,-0.55262,-0.19260,-0.86063,0.28444,-0.80496,0.64649,-0.35230,0.77814,-0.23324,0.71698,0.21343,0.37830,0.58310,0.19667,0.66315,-0.11215,0.64933,g
165 | 1,0,1,1,1,0.51250,0.62500,-1,1,1,0.02500,0.03125,1,1,0,0,1,-1,1,1,1,1,0.31250,1,1,1,1,1,1,1,-0.94375,1,0,0,b
166 | 1,0,1,0.54902,0.62745,1,0.01961,1,-0.49020,0.92157,-0.82353,0.58824,-1,0.11765,-0.96078,-0.33333,-0.64706,-0.68627,-0.23529,-0.86275,0.35294,-1,0.74510,-0.72549,0.92157,-0.21569,0.92874,0.21876,0.72549,0.56863,0.23529,0.90196,-0.11765,0.90196,g
167 | 1,0,0,0,-1,-1,-1,1,0,0,-1,1,1,1,1,-1,0,0,0,0,-1,-1,-1,1,1,0.43750,1,-1,0,0,-1,-1,-1,1,b
168 | 1,0,0.44444,0.44444,0.53695,0.90763,-0.22222,1,-0.33333,0.88889,-1,0.33333,-1,-0.11111,-1,-0.22222,-0.66667,-0.77778,0.55556,-1,-0.22222,-0.77778,0.77778,-0.22222,0.33333,0,0.92120,0.45019,0.57454,0.84353,0.22222,1,-0.55556,1,g
169 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,b
170 | 1,0,1,0,1,0,0.5,0.50000,0.75000,0,0.91201,0.12094,0.89067,0.14210,0.86922,0.16228,0.75000,0.25000,0.75000,0.5,0.75000,0,1,-0.25000,0.5,0.50000,0.73944,0.26388,0.75000,0.25000,0.69635,0.29074,0.67493,0.30293,g
171 | 0,0,-1,1,1,1,0,0,1,-1,1,-1,1,-1,-1,-1,0,0,-1,-1,0,0,0,0,-1,-1,1,-1,1,1,-1,-1,0,0,b
172 | 1,0,1,0,1,0,0.66667,0.11111,1,-0.11111,0.88889,-0.11111,1,-0.22222,0.77778,0,0.77778,0,1,-0.11111,0.77778,-0.11111,0.66667,-0.11111,0.66667,0,0.90347,-0.05352,1,0.11111,0.88889,-0.11111,1,0,g
173 | 0,0,0,0,0,0,0,0,0,0,0,0,-1,-1,0,0,1,0.75000,0,0,0,0,-1,1,0,0,1,-1,-1,-1,1,1,0,0,b
174 | 1,0,1,0.45455,1,-0.45455,1,0.09091,1,-0.09091,1,0,1,-0.27273,1,-0.18182,1,0.09091,1,0,1,-0.36364,1,0.09091,1,-0.09091,1,-0.04914,1,0.45455,1,-0.27273,1,-0.18182,g
175 | 1,0,0.62121,-0.63636,0,0,0,0,0.34470,0.28788,0.42803,0.39394,-0.07576,0.51894,0.36364,0.31439,-0.53788,0.32955,0.12121,-0.14773,0.01894,-0.53409,-0.57576,0.17803,0.29167,-0.27273,0.25758,-0.57576,0.43182,0.24242,0.18182,-0.02273,0.17045,-0.41667,b
176 | 1,0,1,0.11765,1,0.23529,1,0.41176,1,0.05882,1,0.23529,1,0.11765,1,0.47059,1,-0.05882,1,-0.11765,1,0.35294,1,0.41176,1,-0.11765,1,0.20225,1,0.05882,1,0.35294,1,0.23529,g
177 | 1,0,0,0,-1,-0.62766,1,0.51064,0.07979,-0.23404,-1,-0.36170,0.12766,-0.59043,1,-1,0,0,0.82979,-0.07979,-0.25000,1,0.17021,-0.70745,0,0,-0.19149,-0.46809,-0.22340,-0.48936,0.74468,0.90426,-0.67553,0.45745,b
178 | 1,0,0.91667,0.29167,0.83333,-0.16667,0.70833,0.25000,0.87500,-0.08333,0.91667,0.04167,0.83333,0.12500,0.70833,0,0.87500,0.04167,1,0.08333,0.66667,-0.08333,0.75000,0.16667,0.83333,-0.12500,0.83796,0.05503,1,0.20833,0.70833,0,0.70833,0.04167,g
179 | 1,0,0.18590,-0.16667,0,0,0,0,0,0,0,0,0.11538,-0.19071,0,0,0,0,0,0,0,0,-0.05128,-0.06571,0.07853,0.08974,0.17308,-0.10897,0.12500,0.09615,0.02564,-0.04808,0.16827,0.19551,b
180 | 1,0,1,-0.08183,1,-0.11326,0.99246,-0.29802,1,-0.33075,0.96662,-0.34281,0.85788,-0.47265,0.91904,-0.48170,0.73084,-0.65224,0.68131,-0.63544,0.82450,-0.78316,0.58829,-0.74785,0.67033,-0.96296,0.48757,-0.85669,0.37941,-0.83893,0.24117,-0.88846,0.29221,-0.89621,g
181 | 1,0,1,1,-1,1,-1,-0.82456,0.34649,0.21053,0.46053,0.07018,0.22807,0.05702,0.35088,0.34649,0.72807,-0.03947,0.22807,0.53070,0,0,-0.29825,-0.16228,1,-0.66667,1,-1,1,-0.24561,0.35088,0.20175,0.82895,0.07895,b
182 | 1,0,1,0.24077,0.99815,0.00369,0.80244,-0.30133,0.89919,-0.23486,0.70643,-0.24077,0.73855,-0.30539,0.71492,-0.36078,0.47194,-0.61189,0.40473,-0.55059,0.61041,-0.39328,0.53176,-0.32681,0.23966,-0.52142,0.29208,-0.48390,0.12777,-0.39143,0.15657,-0.51329,0.18353,-0.46603,g
183 | 0,0,-1,1,1,-1,0,0,0,0,1,-1,1,1,0,0,1,-1,0,0,0,0,1,1,-1,1,1,-1,-1,1,-1,-1,0,0,b
184 | 1,0,0.92247,-0.19448,0.96419,-0.17674,0.87024,-0.22602,0.81702,-0.27070,0.79271,-0.28909,0.70302,-0.49639,0.63338,-0.49967,0.37254,-0.70729,0.27070,-0.72109,0.40506,-0.54172,0.33509,-0.59691,0.14750,-0.63601,0.09312,-0.59589,-0.07162,-0.54928,-0.01840,-0.54074,-0.07457,-0.47898,g
185 | 1,0,-1,-1,-0.50694,1,1,-1,1,0.53819,0,0,0.23958,-1,1,1,0,0,1,1,1,1,0,0,-0.71528,1,0.33333,-1,1,-1,0.69792,-1,0.47569,1,b
186 | 1,0,0.84177,0.43460,0.5,0.76160,0.09916,0.93460,-0.37764,0.88186,-0.72363,0.61181,-0.93882,0.19409,-0.86709,-0.25527,-0.62869,-0.65612,-0.25105,-0.85654,0.16245,-0.86498,0.51477,-0.66878,0.74895,-0.28903,0.77937,0.07933,0.64135,0.42827,0.31435,0.62447,-0.00422,0.69409,g
187 | 1,0,1,1,0,0,1,-1,-1,-1,1,1,1,-1,0,0,1,-1,1,1,0,0,1,-1,-1,-1,1,1,-1,1,-1,1,0,0,b
188 | 1,0,1,0.63548,1,1,0.77123,1,-0.33333,1,-1,1,0,1,-1,1,-1,0,-1,-0.66667,-1,-0.92536,-1,-0.33333,-0.33333,-1,0.19235,-1,1,-1,0,-1,1,-0.66667,g
189 | 0,0,-1,1,-1,-1,0,0,-1,1,1,-1,-1,-1,-1,1,0,0,-1,-1,-1,1,0,0,1,-1,1,1,1,-1,1,1,0,0,b
190 | 1,0,1,0.06843,1,0.14211,1,0.22108,1,-0.12500,1,0.39495,1,0.48981,1,0.58986,-0.37500,1,1,0,1,0.92001,1,1,1,1,1,1,1,0.25000,1,1,1,1,g
191 | 0,0,-1,-1,0,0,0,0,0,0,0,0,0,0,1,-1,0,0,-1,-1,0,0,1,1,1,-1,1,-1,0,0,0,0,0,0,b
192 | 1,0,0.64947,-0.07896,0.58264,-0.14380,-0.13129,-0.21384,0.29796,0.04403,0.38096,-0.26339,0.28931,-0.31997,0.03459,-0.18947,0.20269,-0.29441,0.15196,-0.29052,0.09513,-0.31525,0.06556,-0.26795,0.03004,-0.25124,-0.00046,-0.23210,-0.02612,-0.21129,-0.04717,-0.18950,0.01336,-0.27201,g
193 | 1,0,0,0,0,0,0,0,0,0,1,-0.33333,0.16667,0.26042,0,0,0,0,0,0,-0.19792,-0.21875,-0.16667,0.90625,-1,0.5,0.04167,0.75000,-0.22917,-1,-0.12500,-0.27083,-0.19792,-0.93750,b
194 | 1,0,1,0.05149,0.99363,0.10123,0.96142,0.14756,0.95513,-0.26496,0.66026,0.54701,0.80426,0.25283,0.73781,0.27380,0.66775,0.28714,0.59615,0.29304,0.52494,0.29200,0.45582,0.28476,0.39023,0.27226,0.32930,0.25553,0.27381,0.23568,0.22427,0.21378,0.18086,0.19083,g
195 | 1,0,1,-0.09524,-1,-1,-1,-1,1,0.31746,0.81349,0.76190,-1,-1,-1,1,0.47364,1,1,1,0.68839,-1,-1,-1,0.82937,0.36508,1,1,1,0.50794,-1,-0.32540,-1,0.72831,b
196 | 1,0,0.93669,-0.00190,0.60761,0.43204,0.92314,-0.40129,0.93123,0.16828,0.96197,0.09061,0.99676,0.08172,0.91586,0.05097,0.84628,-0.25324,0.87379,-0.14482,0.84871,0.26133,0.75081,-0.03641,0.84547,-0.02589,0.87293,-0.02302,0.98544,0.09385,0.78317,-0.10194,0.85841,-0.14725,g
197 | 1,0,1,-1,1,1,1,1,1,-0.5,1,1,1,1,1,1,0,0,1,1,1,1,1,-1,1,1,1,0.62500,1,-0.75000,-0.75000,1,1,1,b
198 | 1,0,1,0.23058,1,-0.78509,1,-0.10401,1,0.15414,1,0.27820,0.98120,-0.06861,1,0.06610,0.95802,-0.18954,0.83584,-0.15633,0.97400,0.03728,0.99624,0.09242,1,-0.01253,0.96238,-0.04597,0.91165,0.03885,1,-0.13722,0.96523,-0.11717,g
199 | 1,0,0.36876,-1,-1,-1,-0.07661,1,1,0.95041,0.74597,-0.38710,-1,-0.79313,-0.09677,1,0.48684,0.46502,0.31755,-0.27461,-0.14343,-0.20188,-0.11976,0.06895,0.03021,0.06639,0.03443,-0.01186,-0.00403,-0.01672,-0.00761,0.00108,0.00015,0.00325,b
200 | 1,0,0.79847,0.38265,0.80804,-0.16964,1,-0.07653,0.98151,-0.07398,0.70217,0.20663,0.99745,0.02105,0.98214,0.02487,1,-0.13074,0.95663,0.07717,1,0.00191,0.90306,0.30804,1,-0.14541,1,-0.00394,0.75638,0.07908,1,-0.18750,1,-0.05740,g
201 | 0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,-1,0,0,1,1,1,-1,1,1,1,0,1,1,1,-1,0,0,b
202 | 1,0,1,-0.28428,1,-0.25346,0.94623,-0.35094,1,-0.30566,0.92736,-0.49057,0.90818,-0.44119,0.75723,-0.58899,0.69748,-0.58019,0.59623,-0.57579,0.68459,-0.70975,0.54465,-0.87327,0.49214,-0.73333,0.35504,-0.76054,0.26352,-0.78239,0.16604,-0.73145,0.13994,-0.70000,g
203 | 1,0,0,0,0,0,0,0,-0.85000,-1,0,0,1,-1,0,0,-1,-1,-1,-1,1,-1,-0.60000,-1,1,1,-1,-0.20000,1,-1,0,1,0,0,b
204 | 1,0,1,0.09091,0.95455,-0.09091,0.77273,0,1,0,0.95455,0,1,0.04545,0.90909,-0.04545,1,0,1,0,0.86364,0.09091,0.77273,0.09091,0.90909,0.04545,0.91541,0.02897,0.95455,0.09091,0.86364,-0.09091,0.86364,0.04545,g
205 | 0,0,0,0,-1,1,1,1,-1,-1,0,0,-1,-1,-1,-0.31250,-1,-1,1,-1,1,-1,0,0,1,-1,-1,-1,0,0,1,-1,0,0,b
206 | 1,0,0.91176,-0.08824,0.97059,0.17647,0.82353,0.08824,0.91176,-0.02941,0.97059,-0.17647,0.97059,0.14706,0.94118,0.02941,1,0,1,0,0.76471,0.11765,0.88235,0.02941,0.85294,0.02941,0.92663,0.02600,0.94118,-0.11765,0.97059,0.05882,0.91176,0.05882,g
207 | 1,0,-1,1,-1,0.15244,0.28354,1,-1,1,-1,-1,1,1,-1,-0.23476,0.28301,-1,1,1,-0.31402,-1,-1,-1,1,-1,-1,-0.03578,1,-1,-1,-0.32317,0.14939,1,b
208 | 1,0,0.47368,-0.10526,0.83781,0.01756,0.83155,0.02615,0.68421,-0.05263,0.68421,0,0.79856,0.05028,0.78315,0.05756,0.84211,0.47368,1,0.05263,0.72550,0.07631,0.70301,0.08141,0.42105,0.21053,0.65419,0.08968,0.52632,-0.21053,0.60150,0.09534,0.57418,0.09719,g
209 | 1,0,-0.00641,-0.5,0,0,-0.01923,1,0,0,0,0,0,0,0,0,0,0,0.31410,0.92949,-0.35256,0.74359,-0.34615,-0.80769,0,0,-0.61538,-0.51282,0,0,0,0,0,0,b
210 | 1,0,1,0.45455,1,0.54545,0.81818,0.63636,1,-0.09091,1,0,0.81818,-0.45455,0.63636,0.27273,1,-0.63636,1,-0.27273,0.90909,-0.45455,1,0.07750,1,-0.09091,1,0.08867,1,0.36364,1,0.63636,0.72727,0.27273,g
211 | 0,0,-1,-1,1,-1,-1,1,0,0,1,-1,1,-1,0,0,0,0,0,0,-1,1,1,-1,-1,1,1,1,0,0,1,0.5,0,0,b
212 | 1,0,0.45455,0.09091,0.63636,0.09091,0.27273,0.18182,0.63636,0,0.36364,-0.09091,0.45455,-0.09091,0.48612,-0.01343,0.63636,-0.18182,0.45455,0,0.36364,-0.09091,0.27273,0.18182,0.36364,-0.09091,0.34442,-0.01768,0.27273,0,0.36364,0,0.28985,-0.01832,g
213 | 1,0,-1,-0.59677,0,0,-1,0.64516,-0.87097,1,0,0,0,0,0,0,0,0,0,0,-1,-1,0,0,0.29839,0.23387,1,0.51613,0,0,0,0,0,0,b
214 | 1,0,1,0.14286,1,0.71429,1,0.71429,1,-0.14286,0.85714,-0.14286,1,0.02534,1,0,0.42857,-0.14286,1,0.03617,1,-0.28571,1,0,0.28571,-0.28571,1,0.04891,1,0.05182,1,0.57143,1,0,g
215 | 0,0,1,1,1,-1,1,1,1,1,1,1,1,-1,1,1,1,-1,1,-1,1,1,1,1,1,-1,1,1,1,1,1,1,1,1,b
216 | 1,0,0.87032,0.46972,0.53945,0.82161,0.10380,0.95275,-0.38033,0.87916,-0.73939,0.58226,-0.92099,0.16731,-0.82417,-0.24942,-0.59383,-0.63342,-0.24012,-0.82881,0.18823,-0.78699,0.51557,-0.57430,0.69274,-0.24843,0.69097,0.10484,0.52798,0.39762,0.25974,0.56573,-0.06739,0.57552,g
217 | 0,0,1,-1,1,1,1,-1,1,1,1,-1,1,-1,1,-1,1,1,1,1,1,1,1,-1,1,1,1,1,1,1,1,1,1,-1,b
218 | 1,0,0.92657,0.04174,0.89266,0.15766,0.86098,0.19791,0.83675,0.36526,0.80619,0.40198,0.76221,0.40552,0.66586,0.48360,0.60101,0.51752,0.53392,0.52180,0.48435,0.54212,0.42546,0.55684,0.33340,0.55274,0.26978,0.54214,0.22307,0.53448,0.14312,0.49124,0.11573,0.46571,g
219 | 0,0,1,1,1,-1,1,-1,1,1,0,0,1,-1,0,0,0,0,0,0,-1,1,1,1,0,0,1,1,0,0,-1,-1,0,0,b
220 | 1,0,0.93537,0.13645,0.93716,0.25359,0.85705,0.38779,0.79039,0.47127,0.72352,0.59942,0.65260,0.75000,0.50830,0.73586,0.41629,0.82742,0.25539,0.85952,0.13712,0.85615,0.00494,0.88869,-0.07361,0.79780,-0.20995,0.78004,-0.33169,0.71454,-0.38532,0.64363,-0.47419,0.55835,g
221 | 0,0,1,-1,-1,1,-1,1,1,1,1,1,-1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,1,-1,1,-1,-1,1,-1,1,b
222 | 1,0,0.80627,0.13069,0.73061,0.24323,0.64615,0.19038,0.36923,0.45577,0.44793,0.46439,0.25000,0.57308,0.25192,0.37115,0.15215,0.51877,-0.09808,0.57500,-0.03462,0.42885,-0.08856,0.44424,-0.14943,0.40006,-0.19940,0.34976,-0.23832,0.29541,-0.26634,0.23896,-0.23846,0.31154,g
223 | 0,0,1,-1,1,1,1,-1,1,1,1,-1,1,1,1,-1,1,-1,1,1,1,1,1,-1,1,-1,1,-1,1,1,1,-1,1,1,b
224 | 1,0,0.97467,0.13082,0.94120,0.20036,0.88783,0.32248,0.89009,0.32711,0.85550,0.45217,0.72298,0.52284,0.69946,0.58820,0.58548,0.66893,0.48869,0.70398,0.44245,0.68159,0.35289,0.75622,0.26832,0.76210,0.16813,0.78541,0.07497,0.80439,-0.02962,0.77702,-0.10289,0.74242,g
225 | 0,0,0,0,1,1,0,0,1,1,0,0,1,-1,0,0,0,0,0,0,0,0,0,0,0,0,1,-1,0,0,-1,1,0,0,b
226 | 1,0,0.92308,0.15451,0.86399,0.29757,0.72582,0.36790,0.70588,0.56830,0.57449,0.62719,0.43270,0.74676,0.31705,0.67697,0.19128,0.76818,0.04686,0.76171,-0.12064,0.76969,-0.18479,0.71327,-0.29291,0.65708,-0.38798,0.58553,-0.46799,0.50131,-0.53146,0.40732,-0.56231,0.35095,g
227 | 0,0,0,0,1,1,1,1,0,0,0,0,-1,-1,0,0,-1,-1,0,0,0,0,1,1,0,0,1,1,0,0,-1,1,0,0,b
228 | 1,0,0.88804,0.38138,0.65926,0.69431,0.29148,0.87892,-0.06726,0.90135,-0.39597,0.80441,-0.64574,0.56502,-0.82960,0.26906,-0.78940,-0.08205,-0.62780,-0.30942,-0.46637,-0.55605,-0.16449,-0.64338,0.09562,-0.61055,0.30406,-0.48392,0.43227,-0.29838,0.47029,-0.09461,0.42152,0.12556,g
229 | 0,0,1,-1,1,1,1,1,1,1,1,1,1,-1,1,1,1,1,1,-1,1,-1,1,-1,1,-1,1,1,1,-1,1,1,1,1,b
230 | 1,0,0.73523,-0.38293,0.80151,0.10278,0.78826,0.15266,0.55580,0.05252,1,0.21225,0.71947,0.28954,0.68798,0.32925,0.49672,0.17287,0.64333,-0.02845,0.57399,0.42528,0.53120,0.44872,0.94530,0.57549,0.44174,0.48200,0.12473,1,0.35070,0.49721,0.30588,0.49831,g
231 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,b
232 | 1,0,0.94649,0.00892,0.97287,-0.00260,0.98922,0.00372,0.95801,0.01598,0.94054,0.03530,0.97213,0.04719,0.98625,0.01858,0.94277,0.07135,0.98551,-0.00706,0.97770,0.04980,0.96358,0.07098,0.93274,0.08101,0.95243,0.04356,0.97473,0.00818,0.97845,0.07061,1,-0.00260,g
233 | 0,0,1,1,-1,-1,-1,-1,0,0,0,0,-1,-1,0,0,0,0,0,0,-1,1,1,1,0,0,1,-1,0,0,-1,-1,-1,-1,b
234 | 1,0,0.50466,-0.16900,0.71442,0.01513,0.71063,0.02258,0.68065,0.01282,0.34615,0.05594,0.69050,0.04393,0.68101,0.05058,0.67023,0.05692,0.63403,-0.04662,0.64503,0.06856,0.63077,0.07381,0.84033,0.18065,0.59935,0.08304,0.38228,0.06760,0.56466,0.09046,0.54632,0.09346,g
235 | 1,0,0.68729,1,0.91973,-0.76087,0.81773,0.04348,0.76087,0.10702,0.86789,0.73746,0.70067,0.18227,0.75920,0.13712,0.93478,-0.25084,0.70736,0.18729,0.64883,0.24582,0.60201,0.77425,1,-0.53846,0.89262,0.22216,0.71070,0.53846,1,-0.06522,0.56522,0.23913,b
236 | 1,0,0.76296,-0.07778,1,-0.29630,1,-0.85741,0.80000,0.06111,0.45556,-0.42778,1,-0.12581,1,-0.83519,0.49259,0.01852,0.82222,-0.05926,0.98215,-0.19938,1,0.22037,0.69630,-0.26481,0.92148,-0.24549,0.78889,0.02037,0.87492,-0.27105,1,-0.57037,g
237 | 1,0,0.38521,0.15564,0.41245,0.07393,0.26459,0.24125,0.23346,0.13230,0.19455,0.25292,0.24514,0.36965,0.08949,0.22957,-0.03891,0.36965,0.05058,0.24903,0.24903,0.09728,0.07782,0.29961,-0.02494,0.28482,-0.06024,0.26256,-0.14786,0.14786,-0.09339,0.31128,-0.19066,0.28794,b
238 | 1,0,0.57540,-0.03175,0.75198,-0.05357,0.61508,-0.01190,0.53968,0.03373,0.61706,0.09921,0.59127,-0.02381,0.62698,0.01190,0.70833,0.02579,0.60317,0.01587,0.47817,-0.02778,0.59127,0.03770,0.5,0.03968,0.61291,-0.01237,0.61706,-0.13492,0.68849,-0.01389,0.62500,-0.03175,g
239 | 1,0,0.06404,-0.15271,-0.04433,0.05911,0.08374,-0.02463,-0.01478,0.18719,0.06404,0,0.12315,-0.09852,0.05911,0,0.01970,-0.02956,-0.12808,-0.20690,0.06897,0.01478,0.06897,0.02956,0.07882,0.16256,0.28079,-0.04926,-0.05911,-0.09360,0.04433,0.05419,0.07389,-0.10837,b
240 | 1,0,0.61857,0.10850,0.70694,-0.06935,0.70358,0.01678,0.74273,0.00224,0.71029,0.15772,0.71588,-0.00224,0.79754,0.06600,0.83669,-0.16555,0.68680,-0.09060,0.62528,-0.01342,0.60962,0.11745,0.71253,-0.09508,0.69845,-0.01673,0.63311,0.04810,0.78859,-0.05145,0.65213,-0.04698,g
241 | 1,0,0.25316,0.35949,0,0,-0.29620,-1,0,0,0.07595,-0.07342,0,0,0,0,0,0,0,0,0.00759,0.68101,-0.20000,0.33671,-0.10380,0.35696,0.05570,-1,0,0,0.06329,-1,0,0,b
242 | 1,0,0.88103,-0.00857,0.89818,-0.02465,0.94105,-0.01822,0.89175,-0.12755,0.82208,-0.10932,0.88853,0.01179,0.90782,-0.13719,0.87138,-0.06109,0.90782,-0.02358,0.87996,-0.14577,0.82851,-0.12433,0.90139,-0.19507,0.88245,-0.14903,0.84352,-0.12862,0.88424,-0.18542,0.91747,-0.16827,g
243 | 1,0,0.42708,-0.5,0,0,0,0,0.46458,0.51042,0.58958,0.02083,0,0,0,0,0.16458,-0.45417,0.59167,-0.18333,0,0,0,0,0.98750,-0.40833,-1,-1,-0.27917,-0.75625,0,0,0,0,b
244 | 1,0,0.88853,0.01631,0.92007,0.01305,0.92442,0.01359,0.89179,-0.10223,0.90103,-0.08428,0.93040,-0.01033,0.93094,-0.08918,0.86025,-0.05057,0.89451,-0.04024,0.88418,-0.12126,0.88907,-0.11909,0.82980,-0.14138,0.86453,-0.11808,0.85536,-0.13051,0.83524,-0.12452,0.86786,-0.12235,g
245 | 1,0,0,0,1,0.12889,0.88444,-0.02000,0,0,1,-0.42444,1,0.19556,1,-0.05333,1,-0.81556,0,0,1,-0.04000,1,-0.18667,0,0,1,-1,0,0,1,0.11778,0.90667,-0.09556,b
246 | 1,0,0.81143,0.03714,0.85143,-0.00143,0.79000,0.00714,0.79571,-0.04286,0.87571,0,0.85571,-0.06714,0.86429,0.00286,0.82857,-0.05429,0.81000,-0.11857,0.76857,-0.08429,0.84286,-0.05000,0.77000,-0.06857,0.81598,-0.08669,0.82571,-0.10429,0.81429,-0.05000,0.82143,-0.15143,g
247 | 1,0,0,0,0,0,0,0,0,0,0,0,-1,1,1,0.55172,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,b
248 | 1,0,0.49870,0.01818,0.43117,-0.09610,0.50649,-0.04156,0.50130,0.09610,0.44675,0.05974,0.55844,-0.11948,0.51688,-0.03636,0.52727,-0.05974,0.55325,-0.01039,0.48571,-0.03377,0.49091,-0.01039,0.59221,0,0.53215,-0.03280,0.43117,0.03377,0.54545,-0.05455,0.58961,-0.08571,g
249 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,-1,0,0,0,0,0,0,b
250 | 1,0,1,0.5,1,0.25000,0.25000,1,0.16851,0.91180,-0.13336,0.80454,-0.34107,0.60793,-0.43820,0.37856,-0.43663,0.16709,-0.36676,0.00678,-0.26477,-0.09025,-0.16178,-0.12964,-0.07782,-0.12744,-0.02089,-0.10242,0.01033,-0.07036,0.02224,-0.04142,0.02249,-0.02017,g
251 | 1,0,0,0,0,0,1,1,-1,-1,0,0,1,-0.11111,0,0,0,0,-1,1,1,1,1,-1,0,0,1,-1,0,0,0,0,1,1,b
252 | 1,0,0.87048,0.38027,0.64099,0.69212,0.31347,0.86625,-0.03933,0.90740,-0.42173,0.79346,-0.70561,0.51560,-0.81049,0.22735,-0.81136,-0.12539,-0.67474,-0.38102,-0.38334,-0.62861,-0.13013,-0.70762,0.15552,-0.66421,0.38544,-0.51568,0.52573,-0.29897,0.56239,-0.05938,0.51460,0.16645,g
253 | 1,0,0,0,0,0,0,0,-1,1,0,0,1,0.37333,-0.12000,-0.12000,0,0,-1,-1,0,0,1,-1,0,0,1,0.22667,0,0,0,0,0,0,b
254 | 1,0,0.88179,0.43491,0.59573,0.77655,0.19672,0.94537,-0.24103,0.92544,-0.62526,0.71257,-0.86443,0.33652,-0.92384,-0.05338,-0.77356,-0.44707,-0.46950,-0.73285,-0.10237,-0.82217,0.26384,-0.77570,0.55984,-0.55910,0.72147,-0.24433,0.72478,0.09599,0.58137,0.38915,0.34749,0.57656,g
255 | 1,0,0.32834,0.02520,0.15236,0.21278,0.14919,0.74003,-0.25706,0.92324,-0.10312,0.19380,-0.61352,0.25786,-0.94053,-0.05409,-0.13117,-0.14329,-0.30315,-0.44615,-0.11409,-0.85597,0.02668,-0.22786,0.27942,-0.06295,0.33737,-0.11876,0.27657,-0.11409,0.15078,0.13296,0.12197,0.20468,g
256 | 1,0,0.83427,0.39121,0.54040,0.78579,0.12326,0.89402,-0.33221,0.83578,-0.70086,0.59564,-0.86622,0.21909,-0.84442,-0.24164,-0.59714,-0.61894,-0.19354,-0.87787,0.12439,-0.89064,0.51109,-0.72454,0.79143,-0.27734,0.83008,0.08718,0.66592,0.49079,0.37542,0.70011,-0.03983,0.79444,g
257 | 1,0,0.62335,-0.03490,0.59085,0.00481,0.60409,-0.07461,0.63177,0.00963,0.62455,-0.07461,0.67028,0.07220,0.62936,-0.08424,0.67509,0.09146,0.67148,0,0.58965,0.10108,0.50060,0.03129,0.65945,0.14079,0.60463,0.02019,0.51384,0.04452,0.61733,-0.00963,0.61372,-0.09146,g
258 | 1,0,0.74449,-0.02390,0.70772,0.03309,0.72243,0.16912,0.79228,0.07721,0.81434,0.43934,0.63787,0.00551,0.70772,0.21691,1,0.06066,0.61029,0.05147,0.67463,0.04228,0.52022,-0.25000,0.72978,-0.15809,0.61727,0.07124,0.30882,0.08640,0.55916,0.07458,0.60294,0.21691,g
259 | 1,0,0.61538,0.18923,0.78157,0.01780,0.77486,0.02647,0.65077,-0.10308,0.77538,0.08000,0.73961,0.05060,0.72322,0.05776,0.68615,-0.08923,0.61692,0.16308,0.66233,0.07573,0.63878,0.08041,0.60154,-0.07231,0.58803,0.08767,0.55077,0.25692,0.53389,0.09207,0.50609,0.09322,g
260 | 1,0,0.68317,0.05375,0.84803,0.00202,0.84341,0.00301,0.84300,0.09901,0.75813,0.04102,0.81892,0.00585,0.80738,0.00673,0.80622,-0.12447,0.77935,-0.03536,0.76365,0.00909,0.74635,0.00978,0.79632,-0.04243,0.70824,0.01096,0.62235,0.11598,0.66624,0.01190,0.64407,0.01227,g
261 | 1,0,0.5,0,0.38696,0.10435,0.49130,0.06522,0.46957,-0.03913,0.35652,-0.12609,0.45652,0.04783,0.50435,0.02609,0.35652,0.19565,0.42174,0.14783,0.42174,-0.02609,0.32174,-0.11304,0.47391,-0.00870,0.41789,0.06908,0.38696,0.03913,0.35217,0.14783,0.44783,0.17391,g
262 | 1,0,0.79830,0.09417,0.78129,0.20656,0.71628,0.28068,0.69320,0.41252,0.65917,0.50122,0.57898,0.60814,0.49210,0.58445,0.33354,0.67861,0.29587,0.63548,0.09599,0.68104,0.02066,0.72236,-0.08748,0.63183,-0.11925,0.60696,-0.18226,0.56015,-0.25516,0.51701,-0.27339,0.42467,g
263 | 1,0,1,0.09802,1,0.25101,0.98390,0.33044,0.80365,0.53020,0.74977,0.60297,0.56937,0.71942,0.55311,0.74079,0.29452,0.82193,0.21137,0.79777,0.09709,0.82162,-0.01734,0.79870,-0.15144,0.75596,-0.22839,0.69187,-0.31713,0.60948,-0.40291,0.54522,-0.42815,0.44534,g
264 | 1,0,0.89410,0.13425,0.87001,0.31543,0.78896,0.43388,0.63388,0.59975,0.54003,0.71016,0.39699,0.76161,0.24266,0.79523,0.09134,0.79598,-0.09159,0.76261,-0.20201,0.66926,-0.30263,0.62610,-0.40552,0.50489,-0.46215,0.40753,-0.50314,0.27252,-0.52823,0.19172,-0.48808,0.05972,g
265 | 1,0,0.94631,0.17498,0.90946,0.33143,0.85096,0.49960,0.73678,0.63842,0.59215,0.73838,0.48698,0.83614,0.30459,0.90665,0.17959,0.93429,-0.00701,0.93109,-0.18880,0.89383,-0.33023,0.82492,-0.46534,0.76482,-0.58563,0.66335,-0.67929,0.52564,-0.75321,0.42488,-0.81210,0.26092,g
266 | 1,0,0.91767,0.18198,0.86090,0.35543,0.72873,0.45747,0.60425,0.69865,0.50376,0.74922,0.36100,0.81795,0.15664,0.83558,0.00396,0.85210,-0.16390,0.77853,-0.35996,0.76193,-0.43087,0.65385,-0.53140,0.53886,-0.60328,0.40972,-0.64511,0.27338,-0.65710,0.13667,-0.64056,0.05394,g
267 | 1,0,0.76627,0.21106,0.63935,0.38112,0.48409,0.52500,0.15000,0.22273,0.13753,0.59565,-0.07727,0.44545,0,0.48636,-0.27491,0.42014,-0.56136,0.36818,-0.36591,0.18864,-0.40533,0.07588,-0.38483,-0.03229,-0.33942,-0.12486,-0.27540,-0.19714,-0.19962,-0.24648,-0.11894,-0.27218,g
268 | 1,0,0.58940,-0.60927,0.85430,0.55298,0.81126,0.07285,0.56623,0.16225,0.32781,0.24172,0.50331,0.12252,0.63907,0.19868,0.71854,0.42715,0.54305,0.13907,0.65232,0.27815,0.68874,0.07285,0.51872,0.26653,0.49013,0.27687,0.46216,0.28574,0.43484,0.29324,0.40821,0.29942,g
269 | 1,0,1,0.11385,0.70019,-0.12144,0.81594,0.09677,0.71157,0.01139,0.56167,-0.07780,0.69070,0.12524,0.58634,0.03985,0.53131,-0.03416,0.69450,0.16888,0.72676,0.07211,0.32068,0.05882,0.53321,0.37381,0.49090,0.17951,0.15180,0.32448,0.44141,0.18897,0.56167,0.15180,g
270 | 1,0,0.84843,0.06794,0.80562,-0.02299,0.77031,-0.03299,0.66725,-0.06620,0.59582,-0.07666,0.67260,-0.05771,0.64260,-0.06438,0.39199,0.04530,0.71254,0.01394,0.55970,-0.08039,0.53430,-0.08453,0.47038,-0.22822,0.48659,-0.09128,0.52613,-0.08537,0.44277,-0.09621,0.42223,-0.09808,g
271 | 1,0,1,0.08013,0.96775,-0.00482,0.96683,-0.00722,0.87980,-0.03923,1,0.01419,0.96186,-0.01436,0.95947,-0.01671,0.98497,0.01002,0.91152,-0.08848,0.95016,-0.02364,0.94636,-0.02591,0.98164,0.02003,0.93772,-0.03034,1,-0.05843,0.92774,-0.03464,0.92226,-0.03673,g
272 | 1,0,0.47938,-0.12371,0.42784,-0.12371,0.70103,-0.39175,0.73196,0.07216,0.26289,-0.21649,0.49485,0.15979,0.45361,-0.11856,0.42268,0.06186,0.5,-0.27320,0.54639,0.18557,0.42268,0.08247,0.70619,0.19588,0.53396,-0.12447,0.15464,-0.26289,0.47423,0.04124,0.45361,-0.51546,g
273 | 1,0,0.63510,-0.04388,0.76530,0.02968,0.61432,0.36028,0.65358,-0.00462,0.64203,0.08314,0.79446,-0.43418,0.72517,0.54965,0.59584,0.13857,0.63510,0.21940,0.63279,-0.25404,0.70951,0.15359,0.64665,0.23095,0.68775,0.17704,0.61663,0.07621,0.66316,0.19841,0.69053,0.36721,g
274 | 1,0,0.50112,-0.03596,0.61124,0.01348,0.58876,0.01573,0.58876,0.02472,0.66742,-0.00449,0.71685,-0.04719,0.66517,0.00899,0.57303,0.02472,0.64719,-0.07416,0.56854,0.14157,0.57528,-0.03596,0.46517,0.04944,0.56588,0.00824,0.47640,-0.03596,0.54607,0.10562,0.60674,-0.08090,g
275 | 1,0,0.71521,-0.00647,0.66667,-0.04207,0.63107,-0.05178,0.77994,0.08091,0.67314,0.09709,0.64725,0.15858,0.60194,-0.01942,0.54369,-0.04531,0.46926,-0.10032,0.64725,0.14887,0.39159,0.21683,0.52427,-0.05502,0.45105,0.00040,0.31392,-0.06796,0.49191,-0.10680,0.30421,-0.05178,g
276 | 1,0,0.68148,0.10370,0.77037,0.03457,0.65185,0.08148,0.60988,-0.00494,0.79012,0.11852,0.59753,0.04938,0.62469,0.09630,0.78272,-0.17531,0.73827,-0.10864,0.48642,0.00988,0.60988,0.08148,0.66667,-0.12840,0.63773,-0.02451,0.76543,0.02222,0.61235,-0.07160,0.51358,-0.04691,g
277 | 1,0,0.60678,-0.02712,0.67119,0.04068,0.52881,-0.04407,0.50508,0.03729,0.70508,-0.07797,0.57966,-0.02034,0.53220,0.07797,0.64068,0.11864,0.56949,-0.02373,0.53220,0.00678,0.71525,-0.03390,0.52881,-0.03390,0.57262,0.00750,0.58644,-0.00339,0.58983,-0.02712,0.50169,0.06780,g
278 | 1,0,0.49515,0.09709,0.29612,0.05825,0.34951,0,0.57282,-0.02427,0.58252,0.02427,0.33495,0.04854,0.52427,0.00485,0.47087,-0.10680,0.43204,0.00485,0.34951,0.05825,0.18932,0.25728,0.31068,-0.15049,0.36547,0.03815,0.39320,0.17476,0.26214,0,0.37379,-0.01942,g
279 | 1,0,0.98822,0.02187,0.93102,0.34100,0.83904,0.35222,0.74706,0.48906,0.73584,0.51879,0.55076,0.60179,0.43130,0.66237,0.31800,0.70443,0.28379,0.68873,0.07515,0.73696,0.06338,0.71284,-0.16489,0.69714,-0.16556,0.60510,-0.16209,0.55805,-0.34717,0.44195,-0.33483,0.37465,g
280 | 1,0,0.97905,0.15810,0.90112,0.35237,0.82039,0.48561,0.71760,0.64888,0.58827,0.73743,0.40349,0.83156,0.25140,0.84804,0.04700,0.85475,-0.12193,0.79749,-0.26180,0.80754,-0.37835,0.71676,-0.51034,0.58324,-0.57587,0.46040,-0.61899,0.30796,-0.65754,0.18345,-0.64134,0.02968,g
281 | 1,0,0.99701,0.21677,0.91966,0.47030,0.76902,0.62415,0.53312,0.78120,0.36774,0.88291,0.10107,0.83312,-0.06827,0.89274,-0.28269,0.72073,-0.43707,0.61688,-0.55769,0.48120,-0.65000,0.35534,-0.64658,0.15908,-0.66651,0.02277,-0.64872,-0.13462,-0.54615,-0.22949,-0.47201,-0.35032,g
282 | 1,0,0.94331,0.19959,0.96132,0.40803,0.80514,0.56569,0.56687,0.70830,0.41836,0.83230,0.14939,0.89489,0.05167,0.93682,-0.24742,0.83939,-0.42811,0.75554,-0.50251,0.62563,-0.65515,0.50428,-0.68851,0.30912,-0.77097,0.15619,-0.75406,-0.04399,-0.75199,-0.17921,-0.66932,-0.34367,g
283 | 1,0,0.93972,0.28082,0.80486,0.52821,0.58167,0.73151,0.34961,0.80511,0.10797,0.90403,-0.20015,0.89335,-0.39730,0.82163,-0.58835,0.62867,-0.76305,0.40368,-0.81262,0.18888,-0.81317,-0.04284,-0.75273,-0.26883,-0.63237,-0.46438,-0.46422,-0.61446,-0.26389,-0.70835,-0.08937,-0.71273,g
284 | 1,0,0.89835,0.35157,0.67333,0.62233,0.43898,0.94353,-0.03643,0.80510,-0.22838,0.75334,-0.25137,0.48816,-0.57377,0.28415,-0.66750,0.10591,-0.47359,-0.06193,-0.81056,-0.06011,-0.33197,-0.47592,-0.12897,-0.53620,0.07158,-0.51925,0.24321,-0.43478,0.36586,-0.30057,0.42805,0.13297,g
285 | 1,0,0.29073,0.10025,0.23308,0.17293,0.03759,0.34336,0.12030,0.26316,0.06266,0.21303,-0.04725,0.12767,-0.06333,0.07907,-0.06328,0.04097,-0.05431,0.01408,-0.04166,-0.00280,-0.02876,-0.01176,-0.01755,-0.01505,-0.00886,-0.01475,-0.00280,-0.01250,0.00096,-0.00948,0.00290,-0.00647,g
286 | 1,0,0.58459,-0.35526,1,0.35338,0.75376,-0.00564,0.82519,0.19361,0.50188,-0.27632,0.65977,0.06391,0.69737,0.14662,0.72368,-0.42669,0.76128,0.04511,0.66917,0.20489,0.84774,-0.40977,0.64850,-0.04699,0.56836,-0.10571,0.52820,-0.13346,0.15602,-0.12218,0.44767,-0.10309,g
287 | 1,0,0.83609,0.13215,0.72171,0.06059,0.65829,0.08315,0.23888,0.12961,0.43837,0.20330,0.49418,0.12686,0.44747,0.13507,0.29352,0.02922,0.48158,0.15756,0.32835,0.14616,0.29495,0.14638,0.26436,0.14530,0.23641,0.14314,0.26429,0.16137,0.18767,0.13632,0.16655,0.13198,g
288 | 1,0,0.94080,0.11933,0.85738,0.01038,0.85124,0.01546,0.76966,-0.00278,0.84459,0.10916,0.83289,0.03027,0.82680,0.03506,0.74838,0.01943,0.80019,0.02405,0.80862,0.04901,0.80259,0.05352,0.77336,0.02220,0.79058,0.06235,0.85939,0.09251,0.77863,0.07090,0.77269,0.07508,g
289 | 1,0,0.87111,0.04326,0.79946,0.18297,0.99009,0.29292,0.89455,-0.08337,0.88598,-0.02028,0.90446,-0.26724,0.89410,0.19964,0.88644,-0.04642,0.84452,-0.00991,0.97882,-0.34024,0.78954,-0.25101,0.86661,-0.09193,0.85967,-0.02908,0.78774,-0.04101,0.75935,0.21812,0.88238,0.09193,g
290 | 1,0,0.74916,0.02549,0.98994,0.09792,0.75855,0.12877,0.74313,-0.09188,0.95842,0.02482,0.97921,-0.00469,0.96110,0.10195,0.91482,0.03756,0.71026,0.02683,0.81221,-0.08048,1,0,0.71764,-0.01207,0.82271,0.02552,0.72435,-0.01073,0.90409,0.11066,0.72837,0.02750,g
291 | 1,0,0.47337,0.19527,0.06213,-0.18343,0.62316,0.01006,0.45562,-0.04438,0.56509,0.01775,0.44675,0.27515,0.71598,-0.03846,0.55621,0.12426,0.41420,0.11538,0.52767,0.02842,0.51183,-0.10651,0.47929,-0.02367,0.46514,0.03259,0.53550,0.25148,0.31953,-0.14497,0.34615,-0.00296,g
292 | 1,0,0.59887,0.14689,0.69868,-0.13936,0.85122,-0.13936,0.80979,0.02448,0.50471,0.02825,0.67420,-0.04520,0.80791,-0.13748,0.51412,-0.24482,0.81544,-0.14313,0.70245,-0.00377,0.33333,0.06215,0.56121,-0.33145,0.61444,-0.16837,0.52731,-0.02072,0.53861,-0.31262,0.67420,-0.22034,g
293 | 1,0,0.84713,-0.03397,0.86412,-0.08493,0.81953,0,0.73673,-0.07643,0.71975,-0.13588,0.74947,-0.11677,0.77495,-0.18684,0.78132,-0.21231,0.61996,-0.10191,0.79193,-0.15711,0.89384,-0.03397,0.84926,-0.26115,0.74115,-0.23312,0.66242,-0.22293,0.72611,-0.37792,0.65817,-0.24841,g
294 | 1,0,0.87772,-0.08152,0.83424,0.07337,0.84783,0.04076,0.77174,-0.02174,0.77174,-0.05707,0.82337,-0.10598,0.67935,-0.00543,0.88043,-0.20924,0.83424,0.03261,0.86413,-0.05978,0.97283,-0.27989,0.85054,-0.18750,0.83705,-0.10211,0.85870,-0.03261,0.78533,-0.10870,0.79076,-0.00543,g
295 | 1,0,0.74704,-0.13241,0.53755,0.16996,0.72727,0.09486,0.69565,-0.11067,0.66798,-0.23518,0.87945,-0.19170,0.73715,0.04150,0.63043,-0.00395,0.63636,-0.11858,0.79249,-0.25296,0.66403,-0.28656,0.67194,-0.10474,0.61847,-0.12041,0.60079,-0.20949,0.37549,0.06917,0.61067,-0.01383,g
296 | 1,0,0.46785,0.11308,0.58980,0.00665,0.55432,0.06874,0.47894,-0.13969,0.52993,0.01330,0.63858,-0.16186,0.67849,-0.03326,0.54545,-0.13525,0.52993,-0.04656,0.47894,-0.19512,0.50776,-0.13525,0.41463,-0.20177,0.53930,-0.11455,0.59867,-0.02882,0.53659,-0.11752,0.56319,-0.04435,g
297 | 1,0,0.88116,0.27475,0.72125,0.42881,0.61559,0.63662,0.38825,0.90502,0.09831,0.96128,-0.20097,0.89200,-0.35737,0.77500,-0.65114,0.62210,-0.78768,0.45535,-0.81856,0.19095,-0.83943,-0.08079,-0.78334,-0.26356,-0.67557,-0.45511,-0.54732,-0.60858,-0.30512,-0.66700,-0.19312,-0.75597,g
298 | 1,0,0.93147,0.29282,0.79917,0.55756,0.59952,0.71596,0.26203,0.92651,0.04636,0.96748,-0.23237,0.95130,-0.55926,0.81018,-0.73329,0.62385,-0.90995,0.36200,-0.92254,0.06040,-0.93618,-0.19838,-0.83192,-0.46906,-0.65165,-0.69556,-0.41223,-0.85725,-0.13590,-0.93953,0.10007,-0.94823,g
299 | 1,0,0.88241,0.30634,0.73232,0.57816,0.34109,0.58527,0.05717,1,-0.09238,0.92118,-0.62403,0.71996,-0.69767,0.32558,-0.81422,0.41195,-1,-0.00775,-0.78973,-0.41085,-0.76901,-0.45478,-0.57242,-0.67605,-0.31610,-0.81876,-0.02979,-0.86841,0.25392,-0.82127,0.00194,-0.81686,g
300 | 1,0,0.83479,0.28993,0.69256,0.47702,0.49234,0.68381,0.21991,0.86761,-0.08096,0.85011,-0.35558,0.77681,-0.52735,0.58425,-0.70350,0.31291,-0.75821,0.03939,-0.71225,-0.15317,-0.58315,-0.39168,-0.37199,-0.52954,-0.16950,-0.60863,0.08425,-0.61488,0.25164,-0.48468,0.40591,-0.35339,g
301 | 1,0,0.92870,0.33164,0.76168,0.62349,0.49305,0.84266,0.21592,0.95193,-0.13956,0.96167,-0.47202,0.83590,-0.70747,0.65490,-0.87474,0.36750,-0.91814,0.05595,-0.89824,-0.26173,-0.73969,-0.54069,-0.50757,-0.74735,-0.22323,-0.86122,0.07810,-0.87159,0.36021,-0.78057,0.59407,-0.60270,g
302 | 1,0,0.83367,0.31456,0.65541,0.57671,0.34962,0.70677,0.17293,0.78947,-0.18976,0.79886,-0.41729,0.66541,-0.68421,0.47744,-0.74725,0.19492,-0.72180,-0.04887,-0.62030,-0.28195,-0.49165,-0.53463,-0.26577,-0.66014,-0.01530,-0.69706,0.22708,-0.64428,0.43100,-0.51206,0.64662,-0.30075,g
303 | 1,0,0.98455,-0.02736,0.98058,-0.04104,1,-0.07635,0.98720,0.01456,0.95278,-0.02604,0.98500,-0.07458,0.99382,-0.07149,0.97396,-0.09532,0.97264,-0.12224,0.99294,-0.05252,0.95278,-0.08914,0.97352,-0.08341,0.96653,-0.12912,0.93469,-0.14916,0.97132,-0.15755,0.96778,-0.18800,g
304 | 1,0,0.94052,-0.01531,0.94170,0.01001,0.94994,-0.01472,0.95878,-0.01060,0.94641,-0.03710,0.97173,-0.01767,0.97055,-0.03887,0.95465,-0.04064,0.95230,-0.04711,0.94229,-0.02179,0.92815,-0.04417,0.92049,-0.04476,0.92695,-0.05827,0.90342,-0.07479,0.91991,-0.07244,0.92049,-0.07420,g
305 | 1,0,0.97032,-0.14384,0.91324,-0.00228,0.96575,-0.17123,0.98630,0.18265,0.91781,0.00228,0.93607,-0.08447,0.91324,-0.00228,0.86758,-0.08676,0.97032,-0.21233,1,0.10274,0.92009,-0.05251,0.92466,0.06849,0.94043,-0.09252,0.97032,-0.20091,0.85388,-0.08676,0.96575,-0.21918,g
306 | 1,0,0.52542,-0.03390,0.94915,0.08475,0.52542,-0.16949,0.30508,-0.01695,0.50847,-0.13559,0.64407,0.28814,0.83051,-0.35593,0.54237,0.01695,0.55932,0.03390,0.59322,0.30508,0.86441,0.05085,0.40678,0.15254,0.67287,-0.00266,0.66102,-0.03390,0.83051,-0.15254,0.76271,-0.10169,g
307 | 1,0,0.33333,-0.25000,0.44444,0.22222,0.38889,0.16667,0.41667,0.13889,0.5,-0.11111,0.54911,-0.08443,0.58333,0.33333,0.55556,0.02778,0.25000,-0.19444,0.47222,-0.05556,0.52778,-0.02778,0.38889,0.08333,0.41543,-0.14256,0.19444,-0.13889,0.36924,-0.14809,0.08333,-0.5,g
308 | 1,0,0.51207,1,1,0.53810,0.71178,0.80833,0.45622,0.46427,0.33081,1,0.21249,1,-0.17416,1,-0.33081,0.98722,-0.61382,1,-0.52674,0.71699,-0.88500,0.47894,-1,0.35175,-1,0.09569,-1,-0.16713,-1,-0.42226,-0.91903,-0.65557,g
309 | 1,0,0.75564,0.49638,0.83550,0.54301,0.54916,0.72063,0.35225,0.70792,0.13469,0.94749,-0.09818,0.93778,-0.37604,0.82223,-0.52742,0.71161,-0.68358,0.67989,-0.70163,0.24956,-0.79147,0.02995,-0.98988,-0.29099,-0.70352,-0.32792,-0.63312,-0.19185,-0.34131,-0.60454,-0.19609,-0.62956,g
310 | 1,0,0.83789,0.42904,0.72113,0.58385,0.45625,0.78115,0.16470,0.82732,-0.13012,0.86947,-0.46177,0.78497,-0.59435,0.52070,-0.78470,0.26529,-0.84014,0.03928,-0.62041,-0.31351,-0.47412,-0.48905,-0.37298,-0.67796,-0.05054,-0.62691,0.14690,-0.45911,0.37093,-0.39167,0.48319,-0.24313,g
311 | 1,0,0.93658,0.35107,0.75254,0.65640,0.45571,0.88576,0.15323,0.95776,-0.21775,0.96301,-0.56535,0.83397,-0.78751,0.58045,-0.93104,0.26020,-0.93641,-0.06418,-0.87028,-0.40949,-0.65079,-0.67464,-0.36799,-0.84951,-0.04578,-0.91221,0.27330,-0.85762,0.54827,-0.69613,0.74828,-0.44173,g
312 | 1,0,0.92436,0.36924,0.71976,0.68420,0.29303,0.94078,-0.11108,0.76527,-0.31605,0.92453,-0.66616,0.78766,-0.92145,0.42314,-0.94315,0.09585,-1,0.03191,-0.66431,-0.66278,-0.46010,-0.78174,-0.13486,-0.88082,0.19765,-0.85137,0.48904,-0.70247,0.69886,-0.46048,0.76066,-0.13194,g
313 | 1,0,1,0.16195,1,-0.05558,1,0.01373,1,-0.12352,1,-0.01511,1,-0.01731,1,-0.06374,1,-0.07157,1,0.05900,1,-0.10108,1,-0.02685,1,-0.22978,1,-0.06823,1,0.08299,1,-0.14194,1,-0.07439,g
314 | 1,0,0.95559,-0.00155,0.86421,-0.13244,0.94982,-0.00461,0.82809,-0.51171,0.92441,0.10368,1,-0.14247,0.99264,-0.02542,0.95853,-0.15518,0.84013,0.61739,1,-0.16321,0.87492,-0.08495,0.85741,-0.01664,0.84132,-0.01769,0.82427,-0.01867,0.80634,-0.01957,0.78761,-0.02039,g
315 | 1,0,0.79378,0.29492,0.64064,0.52312,0.41319,0.68158,0.14177,0.83548,-0.16831,0.78772,-0.42911,0.72328,-0.57165,0.41471,-0.75436,0.16755,-0.69977,-0.09856,-0.57695,-0.23503,-0.40637,-0.38287,-0.17437,-0.52540,0.01523,-0.48707,0.19030,-0.38059,0.31008,-0.23199,0.34572,-0.08036,g
316 | 1,0,0.88085,0.35232,0.68389,0.65128,0.34816,0.79784,0.05832,0.90842,-0.29784,0.86490,-0.62635,0.69590,-0.77106,0.39309,-0.85803,0.08408,-0.81641,-0.24017,-0.64579,-0.50022,-0.39766,-0.68337,-0.11147,-0.75533,0.17041,-0.71504,0.40675,-0.57649,0.56626,-0.36765,0.62765,-0.13305,g
317 | 1,0,0.89589,0.39286,0.66129,0.71804,0.29521,0.90824,-0.04787,0.94415,-0.45725,0.84605,-0.77660,0.58511,-0.92819,0.25133,-0.92282,-0.15315,-0.76064,-0.48404,-0.50931,-0.76197,-0.14895,-0.88591,0.21581,-0.85703,0.53229,-0.68593,0.74846,-0.40656,0.83142,-0.07029,0.76862,0.27926,g
318 | 1,0,1,-0.24051,1,-0.20253,0.87342,-0.10127,0.88608,0.01266,1,0.11392,0.92405,0.06329,0.84810,-0.03797,0.63291,-0.36709,0.87342,-0.01266,0.93671,0.06329,1,0.25316,0.62025,-0.37975,0.84637,-0.05540,1,-0.06329,0.53165,0.02532,0.83544,-0.02532,g
319 | 1,0,0.74790,0.00840,0.83312,0.01659,0.82638,0.02469,0.86555,0.01681,0.60504,0.05882,0.79093,0.04731,0.77441,0.05407,0.64706,0.19328,0.84034,0.04202,0.71285,0.07122,0.68895,0.07577,0.66387,0.08403,0.63728,0.08296,0.61345,0.01681,0.58187,0.08757,0.55330,0.08891,g
320 | 1,0,0.85013,0.01809,0.92211,0.01456,0.92046,0.02180,0.92765,0.08010,0.87597,0.11370,0.91161,0.04320,0.90738,0.05018,0.87339,0.02842,0.95866,0,0.89097,0.07047,0.88430,0.07697,0.83721,0.10853,0.86923,0.08950,0.87597,0.08786,0.85198,0.10134,0.84258,0.10698,g
321 | 1,0,1,-0.01179,1,-0.00343,1,-0.01565,1,-0.01565,1,-0.02809,1,-0.02187,0.99828,-0.03087,0.99528,-0.03238,0.99314,-0.03452,1,-0.03881,1,-0.05039,1,-0.04931,0.99842,-0.05527,0.99400,-0.06304,0.99057,-0.06497,0.98971,-0.06668,g
322 | 1,0,0.89505,-0.03168,0.87525,0.05545,0.89505,0.01386,0.92871,0.02772,0.91287,-0.00990,0.94059,-0.01584,0.91881,0.03366,0.93663,0,0.94257,0.01386,0.90495,0.00792,0.88713,-0.01782,0.89307,0.02376,0.89002,0.01611,0.88119,0.00198,0.87327,0.04158,0.86733,0.02376,g
323 | 1,0,0.90071,0.01773,1,-0.01773,0.90071,0.00709,0.84752,0.05674,1,0.03546,0.97872,0.01064,0.97518,0.03546,1,-0.03191,0.89716,-0.03191,0.86170,0.07801,1,0.09220,0.90071,0.04610,0.94305,0.03247,0.94681,0.02482,1,0.01064,0.93617,0.02128,g
324 | 1,0,0.39394,-0.24242,0.62655,0.01270,0.45455,0.09091,0.63636,0.09091,0.21212,-0.21212,0.57576,0.15152,0.39394,0,0.56156,0.04561,0.51515,0.03030,0.78788,0.18182,0.30303,-0.15152,0.48526,0.05929,0.46362,0.06142,0.33333,-0.03030,0.41856,0.06410,0.39394,0.24242,g
325 | 1,0,0.86689,0.35950,0.72014,0.66667,0.37201,0.83049,0.08646,0.85893,-0.24118,0.86121,-0.51763,0.67577,-0.68714,0.41524,-0.77019,0.09898,-0.69397,-0.13652,-0.49488,-0.42207,-0.32537,-0.57679,-0.02844,-0.59954,0.15360,-0.53127,0.32309,-0.37088,0.46189,-0.19681,0.40956,0.01820,g
326 | 1,0,0.89563,0.37917,0.67311,0.69438,0.35916,0.88696,-0.04193,0.93345,-0.38875,0.84414,-0.67274,0.62078,-0.82680,0.30356,-0.86150,-0.05365,-0.73564,-0.34275,-0.51778,-0.62443,-0.23428,-0.73855,0.06911,-0.73856,0.33531,-0.62296,0.52414,-0.42086,0.61217,-0.17343,0.60073,0.08660,g
327 | 1,0,0.90547,0.41113,0.65354,0.74761,0.29921,0.95905,-0.13342,0.97820,-0.52236,0.83263,-0.79657,0.55086,-0.96631,0.15192,-0.93001,-0.25554,-0.71863,-0.59379,-0.41546,-0.85205,-0.02250,-0.93788,0.36318,-0.85368,0.67538,-0.61959,0.85977,-0.28123,0.88654,0.09800,0.75495,0.46301,g
328 | 1,0,1,1,0.36700,0.06158,0.12993,0.92713,-0.27586,0.93596,-0.31527,0.37685,-0.87192,0.36946,-0.92857,-0.08867,-0.38916,-0.34236,-0.46552,-0.82512,-0.05419,-0.93596,0.25616,-0.20443,0.73792,-0.45950,0.85471,-0.06831,1,1,0.38670,0.00246,0.17758,0.79790,g
329 | 1,0,1,0.51515,0.45455,0.33333,0.06061,0.36364,-0.32104,0.73062,-0.45455,0.48485,-0.57576,0,-0.57576,-0.12121,-0.33333,-0.48485,-0.09091,-0.84848,0.48485,-0.57576,0.57576,-0.42424,1,-0.39394,0.72961,0.12331,0.96970,0.57576,0.24242,0.36364,0.09091,0.33333,g
330 | 1,0,0.88110,0,0.94817,-0.02744,0.93598,-0.01220,0.90244,0.01829,0.90244,0.01829,0.93902,0.00915,0.95732,0.00305,1,0.02744,0.94207,-0.01220,0.90854,0.02439,0.91463,0.05488,0.99695,0.04878,0.89666,0.02226,0.90854,0.00915,1,0.05488,0.97561,-0.01220,g
331 | 1,0,0.82624,0.08156,0.79078,-0.08156,0.90426,-0.01773,0.92908,0.01064,0.80142,0.08865,0.94681,-0.00709,0.94326,0,0.93262,0.20213,0.95035,-0.00709,0.91489,0.00709,0.80496,0.07092,0.91135,0.15957,0.89527,0.08165,0.77660,0.06738,0.92553,0.18085,0.92553,0,g
332 | 1,0,0.74468,0.10638,0.88706,0.00982,0.88542,0.01471,0.87234,-0.01418,0.73050,0.10638,0.87657,0.02912,0.87235,0.03382,0.95745,0.07801,0.95035,0.04255,0.85597,0.04743,0.84931,0.05178,0.87234,0.11348,0.83429,0.06014,0.74468,-0.03546,0.81710,0.06800,0.80774,0.07173,g
333 | 1,0,0.87578,0.03727,0.89951,0.00343,0.89210,0.00510,0.86335,0,0.95031,0.07453,0.87021,0.00994,0.86303,0.01151,0.83851,-0.06211,0.85714,0.02484,0.84182,0.01603,0.83486,0.01749,0.79503,-0.04348,0.82111,0.02033,0.81988,0.08696,0.80757,0.02308,0.80088,0.02441,g
334 | 1,0,0.97513,0.00710,0.98579,0.01954,1,0.01954,0.99290,0.01599,0.95737,0.02309,0.97158,0.03552,1,0.03730,0.97869,0.02131,0.98579,0.05684,0.97158,0.04796,0.94494,0.05506,0.98401,0.03552,0.97540,0.06477,0.94849,0.08171,0.99112,0.06217,0.98934,0.09947,g
335 | 1,0,1,0.01105,1,0.01105,1,0.02320,0.99448,-0.01436,0.99448,-0.00221,0.98343,0.02320,1,0.00884,0.97569,0.00773,0.97901,0.01657,0.98011,0.00663,0.98122,0.02099,0.97127,-0.00663,0.98033,0.01600,0.97901,0.01547,0.98564,0.02099,0.98674,0.02762,g
336 | 1,0,1,-0.01342,1,0.01566,1,-0.00224,1,0.06264,0.97763,0.04474,0.95973,0.02908,1,0.06488,0.98881,0.03356,1,0.03579,0.99776,0.09396,0.95749,0.07383,1,0.10067,0.99989,0.08763,0.99105,0.08501,1,0.10067,1,0.10067,g
337 | 1,0,0.88420,0.36724,0.67123,0.67382,0.39613,0.86399,0.02424,0.93182,-0.35148,0.83713,-0.60316,0.58842,-0.78658,0.38778,-0.83285,-0.00642,-0.69318,-0.32963,-0.52504,-0.53924,-0.27377,-0.68126,0.00806,-0.69774,0.26028,-0.60678,0.44569,-0.43383,0.54209,-0.21542,0.56286,0.02823,g
338 | 1,0,0.90147,0.41786,0.64131,0.75725,0.30440,0.95148,-0.20449,0.96534,-0.55483,0.81191,-0.81857,0.50949,-0.96986,0.10345,-0.91456,-0.31412,-0.70163,-0.65461,-0.32354,-0.88999,0.05865,-0.94172,0.44483,-0.82154,0.74105,-0.55231,0.89415,-0.18725,0.87893,0.20359,0.70555,0.54852,g
339 | 1,0,0.32789,0.11042,0.15970,0.29308,0.14020,0.74485,-0.25131,0.91993,-0.16503,0.26664,-0.63714,0.24865,-0.97650,-0.00337,-0.23227,-0.19909,-0.30522,-0.48886,-0.14426,-0.89991,0.09345,-0.28916,0.28307,-0.18560,0.39599,-0.11498,0.31005,0.05614,0.21443,0.20540,0.13376,0.26422,g
340 | 1,0,0.65845,0.43617,0.44681,0.74804,0.05319,0.85106,-0.32027,0.82139,-0.68253,0.52408,-0.84211,0.07111,-0.82811,-0.28723,-0.47032,-0.71725,-0.04759,-0.86002,0.23292,-0.76316,0.56663,-0.52128,0.74300,-0.18645,0.74758,0.23713,0.45185,0.59071,0.20549,0.76764,-0.18533,0.74356,g
341 | 1,0,0.19466,0.05725,0.04198,0.25191,-0.10557,0.48866,-0.18321,-0.18321,-0.41985,0.06107,-0.45420,0.09160,-0.16412,-0.30534,-0.10305,-0.39695,0.18702,-0.17557,0.34012,-0.11953,0.28626,-0.16031,0.21645,0.24692,0.03913,0.31092,-0.03817,0.26336,-0.16794,0.16794,-0.30153,-0.33588,g
342 | 1,0,0.98002,0.00075,1,0,0.98982,-0.00075,0.94721,0.02394,0.97700,0.02130,0.97888,0.03073,0.99170,0.02338,0.93929,0.05713,0.93552,0.05279,0.97738,0.05524,1,0.06241,0.94155,0.08107,0.96709,0.07255,0.95701,0.08088,0.98190,0.08126,0.97247,0.08616,g
343 | 1,0,0.82254,-0.07572,0.80462,0.00231,0.87514,-0.01214,0.86821,-0.07514,0.72832,-0.11734,0.84624,0.05029,0.83121,-0.07399,0.74798,0.06705,0.78324,0.06358,0.86763,-0.02370,0.78844,-0.06012,0.74451,-0.02370,0.76717,-0.02731,0.74046,-0.07630,0.70058,-0.04220,0.78439,0.01214,g
344 | 1,0,0.35346,-0.13768,0.69387,-0.02423,0.68195,-0.03574,0.55717,-0.06119,0.61836,-0.10467,0.62099,-0.06527,0.59361,-0.07289,0.42271,-0.26409,0.58213,0.04992,0.49736,-0.08771,0.46241,-0.08989,0.45008,-0.00564,0.39146,-0.09038,0.35588,-0.10306,0.32232,-0.08637,0.28943,-0.08300,g
345 | 1,0,0.76046,0.01092,0.86335,0.00258,0.85821,0.00384,0.79988,0.02304,0.81504,0.12068,0.83096,0.00744,0.81815,0.00854,0.82777,-0.06974,0.76531,0.03881,0.76979,0.01148,0.75071,0.01232,0.77138,-0.00303,0.70886,0.01375,0.66161,0.00849,0.66298,0.01484,0.63887,0.01525,g
346 | 1,0,0.66667,-0.01366,0.97404,0.06831,0.49590,0.50137,0.75683,-0.00273,0.65164,-0.14071,0.40164,-0.48907,0.39208,0.58743,0.76776,0.31831,0.78552,0.11339,0.47541,-0.44945,1,0.00683,0.60656,0.06967,0.68656,0.17088,0.87568,0.07787,0.55328,0.24590,0.13934,0.48087,g
347 | 1,0,0.83508,0.08298,0.73739,-0.14706,0.84349,-0.05567,0.90441,-0.04622,0.89391,0.13130,0.81197,0.06723,0.79307,-0.08929,1,-0.02101,0.96639,0.06618,0.87605,0.01155,0.77521,0.06618,0.95378,-0.04202,0.83479,0.00123,1,0.12815,0.86660,-0.10714,0.90546,-0.04307,g
348 | 1,0,0.95113,0.00419,0.95183,-0.02723,0.93438,-0.01920,0.94590,0.01606,0.96510,0.03281,0.94171,0.07330,0.94625,-0.01326,0.97173,0.00140,0.94834,0.06038,0.92670,0.08412,0.93124,0.10087,0.94520,0.01361,0.93522,0.04925,0.93159,0.08168,0.94066,-0.00035,0.91483,0.04712,g
349 | 1,0,0.94701,-0.00034,0.93207,-0.03227,0.95177,-0.03431,0.95584,0.02446,0.94124,0.01766,0.92595,0.04688,0.93954,-0.01461,0.94837,0.02004,0.93784,0.01393,0.91406,0.07677,0.89470,0.06148,0.93988,0.03193,0.92489,0.02542,0.92120,0.02242,0.92459,0.00442,0.92697,-0.00577,g
350 | 1,0,0.90608,-0.01657,0.98122,-0.01989,0.95691,-0.03646,0.85746,0.00110,0.89724,-0.03315,0.89061,-0.01436,0.90608,-0.04530,0.91381,-0.00884,0.80773,-0.12928,0.88729,0.01215,0.92155,-0.02320,0.91050,-0.02099,0.89147,-0.07760,0.82983,-0.17238,0.96022,-0.03757,0.87403,-0.16243,g
351 | 1,0,0.84710,0.13533,0.73638,-0.06151,0.87873,0.08260,0.88928,-0.09139,0.78735,0.06678,0.80668,-0.00351,0.79262,-0.01054,0.85764,-0.04569,0.87170,-0.03515,0.81722,-0.09490,0.71002,0.04394,0.86467,-0.15114,0.81147,-0.04822,0.78207,-0.00703,0.75747,-0.06678,0.85764,-0.06151,g
352 |
--------------------------------------------------------------------------------
/mutual_information.py:
--------------------------------------------------------------------------------
1 | '''
2 | All functions used in this notebook were obtained from:
3 |
4 | Jundong Li, Kewei Cheng, Suhang Wang, Fred Morstatter, Robert P. Trevino, Jiliang Tang, and Huan Liu. 2017. Feature Selection: A Data Perspective. ACM Comput. Surv. 50, 6, Article 94 (January 2018), 45 pages. DOI:https://doi.org/10.1145/3136625
5 |
6 | '''
7 |
8 |
9 | import entropy_estimators as ee
10 |
11 |
12 | def information_gain(f1, f2):
13 | """
14 | This function calculates the information gain, where ig(f1,f2) = H(f1) - H(f1|f2)
15 | Input
16 | -----
17 | f1: {numpy array}, shape (n_samples,)
18 | f2: {numpy array}, shape (n_samples,)
19 | Output
20 | ------
21 | ig: {float}
22 | """
23 |
24 | ig = ee.entropyd(f1) - conditional_entropy(f1, f2)
25 | return ig
26 |
27 |
28 | def conditional_entropy(f1, f2):
29 | """
30 | This function calculates the conditional entropy, where ce = H(f1) - I(f1;f2)
31 | Input
32 | -----
33 | f1: {numpy array}, shape (n_samples,)
34 | f2: {numpy array}, shape (n_samples,)
35 | Output
36 | ------
37 | ce: {float}
38 | ce is conditional entropy of f1 and f2
39 | """
40 |
41 | ce = ee.entropyd(f1) - ee.midd(f1, f2)
42 | return ce
43 |
44 |
45 | def su_calculation(f1, f2):
46 | """
47 | This function calculates the symmetrical uncertainty, where su(f1,f2) = 2*IG(f1,f2)/(H(f1)+H(f2))
48 | Input
49 | -----
50 | f1: {numpy array}, shape (n_samples,)
51 | f2: {numpy array}, shape (n_samples,)
52 | Output
53 | ------
54 | su: {float}
55 | su is the symmetrical uncertainty of f1 and f2
56 | """
57 |
58 | # calculate information gain of f1 and f2, t1 = ig(f1,f2)
59 | t1 = information_gain(f1, f2)
60 | # calculate entropy of f1, t2 = H(f1)
61 | t2 = ee.entropyd(f1)
62 | # calculate entropy of f2, t3 = H(f2)
63 | t3 = ee.entropyd(f2)
64 | # su(f1,f2) = 2*t1/(t2+t3)
65 | su = 2.0*t1/(t2+t3)
66 |
67 | return su
--------------------------------------------------------------------------------
/penguins.csv:
--------------------------------------------------------------------------------
1 | ,bill_length,bill_depth,flipper_length,body_mass,species
2 | 0,39.1,18.7,181.0,3750.0,Adelie
3 | 1,39.5,17.4,186.0,3800.0,Adelie
4 | 2,40.3,18.0,195.0,3250.0,Adelie
5 | 3,36.7,19.3,193.0,3450.0,Adelie
6 | 4,39.3,20.6,190.0,3650.0,Adelie
7 | 5,38.9,17.8,181.0,3625.0,Adelie
8 | 6,39.2,19.6,195.0,4675.0,Adelie
9 | 7,41.1,17.6,182.0,3200.0,Adelie
10 | 8,38.6,21.2,191.0,3800.0,Adelie
11 | 9,34.6,21.1,198.0,4400.0,Adelie
12 | 10,36.6,17.8,185.0,3700.0,Adelie
13 | 11,38.7,19.0,195.0,3450.0,Adelie
14 | 12,42.5,20.7,197.0,4500.0,Adelie
15 | 13,34.4,18.4,184.0,3325.0,Adelie
16 | 14,46.0,21.5,194.0,4200.0,Adelie
17 | 15,37.8,18.3,174.0,3400.0,Adelie
18 | 16,37.7,18.7,180.0,3600.0,Adelie
19 | 17,35.9,19.2,189.0,3800.0,Adelie
20 | 18,38.2,18.1,185.0,3950.0,Adelie
21 | 19,38.8,17.2,180.0,3800.0,Adelie
22 | 20,35.3,18.9,187.0,3800.0,Adelie
23 | 21,40.6,18.6,183.0,3550.0,Adelie
24 | 22,40.5,17.9,187.0,3200.0,Adelie
25 | 23,37.9,18.6,172.0,3150.0,Adelie
26 | 24,40.5,18.9,180.0,3950.0,Adelie
27 | 25,39.5,16.7,178.0,3250.0,Adelie
28 | 26,37.2,18.1,178.0,3900.0,Adelie
29 | 27,39.5,17.8,188.0,3300.0,Adelie
30 | 28,40.9,18.9,184.0,3900.0,Adelie
31 | 29,36.4,17.0,195.0,3325.0,Adelie
32 | 30,39.2,21.1,196.0,4150.0,Adelie
33 | 31,38.8,20.0,190.0,3950.0,Adelie
34 | 32,42.2,18.5,180.0,3550.0,Adelie
35 | 33,37.6,19.3,181.0,3300.0,Adelie
36 | 34,39.8,19.1,184.0,4650.0,Adelie
37 | 35,36.5,18.0,182.0,3150.0,Adelie
38 | 36,40.8,18.4,195.0,3900.0,Adelie
39 | 37,36.0,18.5,186.0,3100.0,Adelie
40 | 38,44.1,19.7,196.0,4400.0,Adelie
41 | 39,37.0,16.9,185.0,3000.0,Adelie
42 | 40,39.6,18.8,190.0,4600.0,Adelie
43 | 41,41.1,19.0,182.0,3425.0,Adelie
44 | 42,36.0,17.9,190.0,3450.0,Adelie
45 | 43,42.3,21.2,191.0,4150.0,Adelie
46 | 44,39.6,17.7,186.0,3500.0,Adelie
47 | 45,40.1,18.9,188.0,4300.0,Adelie
48 | 46,35.0,17.9,190.0,3450.0,Adelie
49 | 47,42.0,19.5,200.0,4050.0,Adelie
50 | 48,34.5,18.1,187.0,2900.0,Adelie
51 | 49,41.4,18.6,191.0,3700.0,Adelie
52 | 50,39.0,17.5,186.0,3550.0,Adelie
53 | 51,40.6,18.8,193.0,3800.0,Adelie
54 | 52,36.5,16.6,181.0,2850.0,Adelie
55 | 53,37.6,19.1,194.0,3750.0,Adelie
56 | 54,35.7,16.9,185.0,3150.0,Adelie
57 | 55,41.3,21.1,195.0,4400.0,Adelie
58 | 56,37.6,17.0,185.0,3600.0,Adelie
59 | 57,41.1,18.2,192.0,4050.0,Adelie
60 | 58,36.4,17.1,184.0,2850.0,Adelie
61 | 59,41.6,18.0,192.0,3950.0,Adelie
62 | 60,35.5,16.2,195.0,3350.0,Adelie
63 | 61,41.1,19.1,188.0,4100.0,Adelie
64 | 62,35.9,16.6,190.0,3050.0,Adelie
65 | 63,41.8,19.4,198.0,4450.0,Adelie
66 | 64,33.5,19.0,190.0,3600.0,Adelie
67 | 65,39.7,18.4,190.0,3900.0,Adelie
68 | 66,39.6,17.2,196.0,3550.0,Adelie
69 | 67,45.8,18.9,197.0,4150.0,Adelie
70 | 68,35.5,17.5,190.0,3700.0,Adelie
71 | 69,42.8,18.5,195.0,4250.0,Adelie
72 | 70,40.9,16.8,191.0,3700.0,Adelie
73 | 71,37.2,19.4,184.0,3900.0,Adelie
74 | 72,36.2,16.1,187.0,3550.0,Adelie
75 | 73,42.1,19.1,195.0,4000.0,Adelie
76 | 74,34.6,17.2,189.0,3200.0,Adelie
77 | 75,42.9,17.6,196.0,4700.0,Adelie
78 | 76,36.7,18.8,187.0,3800.0,Adelie
79 | 77,35.1,19.4,193.0,4200.0,Adelie
80 | 78,37.3,17.8,191.0,3350.0,Adelie
81 | 79,41.3,20.3,194.0,3550.0,Adelie
82 | 80,36.3,19.5,190.0,3800.0,Adelie
83 | 81,36.9,18.6,189.0,3500.0,Adelie
84 | 82,38.3,19.2,189.0,3950.0,Adelie
85 | 83,38.9,18.8,190.0,3600.0,Adelie
86 | 84,35.7,18.0,202.0,3550.0,Adelie
87 | 85,41.1,18.1,205.0,4300.0,Adelie
88 | 86,34.0,17.1,185.0,3400.0,Adelie
89 | 87,39.6,18.1,186.0,4450.0,Adelie
90 | 88,36.2,17.3,187.0,3300.0,Adelie
91 | 89,40.8,18.9,208.0,4300.0,Adelie
92 | 90,38.1,18.6,190.0,3700.0,Adelie
93 | 91,40.3,18.5,196.0,4350.0,Adelie
94 | 92,33.1,16.1,178.0,2900.0,Adelie
95 | 93,43.2,18.5,192.0,4100.0,Adelie
96 | 94,35.0,17.9,192.0,3725.0,Adelie
97 | 95,41.0,20.0,203.0,4725.0,Adelie
98 | 96,37.7,16.0,183.0,3075.0,Adelie
99 | 97,37.8,20.0,190.0,4250.0,Adelie
100 | 98,37.9,18.6,193.0,2925.0,Adelie
101 | 99,39.7,18.9,184.0,3550.0,Adelie
102 | 100,38.6,17.2,199.0,3750.0,Adelie
103 | 101,38.2,20.0,190.0,3900.0,Adelie
104 | 102,38.1,17.0,181.0,3175.0,Adelie
105 | 103,43.2,19.0,197.0,4775.0,Adelie
106 | 104,38.1,16.5,198.0,3825.0,Adelie
107 | 105,45.6,20.3,191.0,4600.0,Adelie
108 | 106,39.7,17.7,193.0,3200.0,Adelie
109 | 107,42.2,19.5,197.0,4275.0,Adelie
110 | 108,39.6,20.7,191.0,3900.0,Adelie
111 | 109,42.7,18.3,196.0,4075.0,Adelie
112 | 110,38.6,17.0,188.0,2900.0,Adelie
113 | 111,37.3,20.5,199.0,3775.0,Adelie
114 | 112,35.7,17.0,189.0,3350.0,Adelie
115 | 113,41.1,18.6,189.0,3325.0,Adelie
116 | 114,36.2,17.2,187.0,3150.0,Adelie
117 | 115,37.7,19.8,198.0,3500.0,Adelie
118 | 116,40.2,17.0,176.0,3450.0,Adelie
119 | 117,41.4,18.5,202.0,3875.0,Adelie
120 | 118,35.2,15.9,186.0,3050.0,Adelie
121 | 119,40.6,19.0,199.0,4000.0,Adelie
122 | 120,38.8,17.6,191.0,3275.0,Adelie
123 | 121,41.5,18.3,195.0,4300.0,Adelie
124 | 122,39.0,17.1,191.0,3050.0,Adelie
125 | 123,44.1,18.0,210.0,4000.0,Adelie
126 | 124,38.5,17.9,190.0,3325.0,Adelie
127 | 125,43.1,19.2,197.0,3500.0,Adelie
128 | 126,36.8,18.5,193.0,3500.0,Adelie
129 | 127,37.5,18.5,199.0,4475.0,Adelie
130 | 128,38.1,17.6,187.0,3425.0,Adelie
131 | 129,41.1,17.5,190.0,3900.0,Adelie
132 | 130,35.6,17.5,191.0,3175.0,Adelie
133 | 131,40.2,20.1,200.0,3975.0,Adelie
134 | 132,37.0,16.5,185.0,3400.0,Adelie
135 | 133,39.7,17.9,193.0,4250.0,Adelie
136 | 134,40.2,17.1,193.0,3400.0,Adelie
137 | 135,40.6,17.2,187.0,3475.0,Adelie
138 | 136,32.1,15.5,188.0,3050.0,Adelie
139 | 137,40.7,17.0,190.0,3725.0,Adelie
140 | 138,37.3,16.8,192.0,3000.0,Adelie
141 | 139,39.0,18.7,185.0,3650.0,Adelie
142 | 140,39.2,18.6,190.0,4250.0,Adelie
143 | 141,36.6,18.4,184.0,3475.0,Adelie
144 | 142,36.0,17.8,195.0,3450.0,Adelie
145 | 143,37.8,18.1,193.0,3750.0,Adelie
146 | 144,36.0,17.1,187.0,3700.0,Adelie
147 | 145,41.5,18.5,201.0,4000.0,Adelie
148 | 146,46.1,13.2,211.0,4500.0,Gentoo
149 | 147,50.0,16.3,230.0,5700.0,Gentoo
150 | 148,48.7,14.1,210.0,4450.0,Gentoo
151 | 149,50.0,15.2,218.0,5700.0,Gentoo
152 | 150,47.6,14.5,215.0,5400.0,Gentoo
153 | 151,46.5,13.5,210.0,4550.0,Gentoo
154 | 152,45.4,14.6,211.0,4800.0,Gentoo
155 | 153,46.7,15.3,219.0,5200.0,Gentoo
156 | 154,43.3,13.4,209.0,4400.0,Gentoo
157 | 155,46.8,15.4,215.0,5150.0,Gentoo
158 | 156,40.9,13.7,214.0,4650.0,Gentoo
159 | 157,49.0,16.1,216.0,5550.0,Gentoo
160 | 158,45.5,13.7,214.0,4650.0,Gentoo
161 | 159,48.4,14.6,213.0,5850.0,Gentoo
162 | 160,45.8,14.6,210.0,4200.0,Gentoo
163 | 161,49.3,15.7,217.0,5850.0,Gentoo
164 | 162,42.0,13.5,210.0,4150.0,Gentoo
165 | 163,49.2,15.2,221.0,6300.0,Gentoo
166 | 164,46.2,14.5,209.0,4800.0,Gentoo
167 | 165,48.7,15.1,222.0,5350.0,Gentoo
168 | 166,50.2,14.3,218.0,5700.0,Gentoo
169 | 167,45.1,14.5,215.0,5000.0,Gentoo
170 | 168,46.5,14.5,213.0,4400.0,Gentoo
171 | 169,46.3,15.8,215.0,5050.0,Gentoo
172 | 170,42.9,13.1,215.0,5000.0,Gentoo
173 | 171,46.1,15.1,215.0,5100.0,Gentoo
174 | 172,47.8,15.0,215.0,5650.0,Gentoo
175 | 173,48.2,14.3,210.0,4600.0,Gentoo
176 | 174,50.0,15.3,220.0,5550.0,Gentoo
177 | 175,47.3,15.3,222.0,5250.0,Gentoo
178 | 176,42.8,14.2,209.0,4700.0,Gentoo
179 | 177,45.1,14.5,207.0,5050.0,Gentoo
180 | 178,59.6,17.0,230.0,6050.0,Gentoo
181 | 179,49.1,14.8,220.0,5150.0,Gentoo
182 | 180,48.4,16.3,220.0,5400.0,Gentoo
183 | 181,42.6,13.7,213.0,4950.0,Gentoo
184 | 182,44.4,17.3,219.0,5250.0,Gentoo
185 | 183,44.0,13.6,208.0,4350.0,Gentoo
186 | 184,48.7,15.7,208.0,5350.0,Gentoo
187 | 185,42.7,13.7,208.0,3950.0,Gentoo
188 | 186,49.6,16.0,225.0,5700.0,Gentoo
189 | 187,45.3,13.7,210.0,4300.0,Gentoo
190 | 188,49.6,15.0,216.0,4750.0,Gentoo
191 | 189,50.5,15.9,222.0,5550.0,Gentoo
192 | 190,43.6,13.9,217.0,4900.0,Gentoo
193 | 191,45.5,13.9,210.0,4200.0,Gentoo
194 | 192,50.5,15.9,225.0,5400.0,Gentoo
195 | 193,44.9,13.3,213.0,5100.0,Gentoo
196 | 194,45.2,15.8,215.0,5300.0,Gentoo
197 | 195,46.6,14.2,210.0,4850.0,Gentoo
198 | 196,48.5,14.1,220.0,5300.0,Gentoo
199 | 197,45.1,14.4,210.0,4400.0,Gentoo
200 | 198,50.1,15.0,225.0,5000.0,Gentoo
201 | 199,46.5,14.4,217.0,4900.0,Gentoo
202 | 200,45.0,15.4,220.0,5050.0,Gentoo
203 | 201,43.8,13.9,208.0,4300.0,Gentoo
204 | 202,45.5,15.0,220.0,5000.0,Gentoo
205 | 203,43.2,14.5,208.0,4450.0,Gentoo
206 | 204,50.4,15.3,224.0,5550.0,Gentoo
207 | 205,45.3,13.8,208.0,4200.0,Gentoo
208 | 206,46.2,14.9,221.0,5300.0,Gentoo
209 | 207,45.7,13.9,214.0,4400.0,Gentoo
210 | 208,54.3,15.7,231.0,5650.0,Gentoo
211 | 209,45.8,14.2,219.0,4700.0,Gentoo
212 | 210,49.8,16.8,230.0,5700.0,Gentoo
213 | 211,49.5,16.2,229.0,5800.0,Gentoo
214 | 212,43.5,14.2,220.0,4700.0,Gentoo
215 | 213,50.7,15.0,223.0,5550.0,Gentoo
216 | 214,47.7,15.0,216.0,4750.0,Gentoo
217 | 215,46.4,15.6,221.0,5000.0,Gentoo
218 | 216,48.2,15.6,221.0,5100.0,Gentoo
219 | 217,46.5,14.8,217.0,5200.0,Gentoo
220 | 218,46.4,15.0,216.0,4700.0,Gentoo
221 | 219,48.6,16.0,230.0,5800.0,Gentoo
222 | 220,47.5,14.2,209.0,4600.0,Gentoo
223 | 221,51.1,16.3,220.0,6000.0,Gentoo
224 | 222,45.2,13.8,215.0,4750.0,Gentoo
225 | 223,45.2,16.4,223.0,5950.0,Gentoo
226 | 224,49.1,14.5,212.0,4625.0,Gentoo
227 | 225,52.5,15.6,221.0,5450.0,Gentoo
228 | 226,47.4,14.6,212.0,4725.0,Gentoo
229 | 227,50.0,15.9,224.0,5350.0,Gentoo
230 | 228,44.9,13.8,212.0,4750.0,Gentoo
231 | 229,50.8,17.3,228.0,5600.0,Gentoo
232 | 230,43.4,14.4,218.0,4600.0,Gentoo
233 | 231,51.3,14.2,218.0,5300.0,Gentoo
234 | 232,47.5,14.0,212.0,4875.0,Gentoo
235 | 233,52.1,17.0,230.0,5550.0,Gentoo
236 | 234,47.5,15.0,218.0,4950.0,Gentoo
237 | 235,52.2,17.1,228.0,5400.0,Gentoo
238 | 236,45.5,14.5,212.0,4750.0,Gentoo
239 | 237,49.5,16.1,224.0,5650.0,Gentoo
240 | 238,44.5,14.7,214.0,4850.0,Gentoo
241 | 239,50.8,15.7,226.0,5200.0,Gentoo
242 | 240,49.4,15.8,216.0,4925.0,Gentoo
243 | 241,46.9,14.6,222.0,4875.0,Gentoo
244 | 242,48.4,14.4,203.0,4625.0,Gentoo
245 | 243,51.1,16.5,225.0,5250.0,Gentoo
246 | 244,48.5,15.0,219.0,4850.0,Gentoo
247 | 245,55.9,17.0,228.0,5600.0,Gentoo
248 | 246,47.2,15.5,215.0,4975.0,Gentoo
249 | 247,49.1,15.0,228.0,5500.0,Gentoo
250 | 248,46.8,16.1,215.0,5500.0,Gentoo
251 | 249,41.7,14.7,210.0,4700.0,Gentoo
252 | 250,53.4,15.8,219.0,5500.0,Gentoo
253 | 251,43.3,14.0,208.0,4575.0,Gentoo
254 | 252,48.1,15.1,209.0,5500.0,Gentoo
255 | 253,50.5,15.2,216.0,5000.0,Gentoo
256 | 254,49.8,15.9,229.0,5950.0,Gentoo
257 | 255,43.5,15.2,213.0,4650.0,Gentoo
258 | 256,51.5,16.3,230.0,5500.0,Gentoo
259 | 257,46.2,14.1,217.0,4375.0,Gentoo
260 | 258,55.1,16.0,230.0,5850.0,Gentoo
261 | 259,48.8,16.2,222.0,6000.0,Gentoo
262 | 260,47.2,13.7,214.0,4925.0,Gentoo
263 | 261,46.8,14.3,215.0,4850.0,Gentoo
264 | 262,50.4,15.7,222.0,5750.0,Gentoo
265 | 263,45.2,14.8,212.0,5200.0,Gentoo
266 | 264,49.9,16.1,213.0,5400.0,Gentoo
267 | 265,46.5,17.9,192.0,3500.0,Chinstrap
268 | 266,50.0,19.5,196.0,3900.0,Chinstrap
269 | 267,51.3,19.2,193.0,3650.0,Chinstrap
270 | 268,45.4,18.7,188.0,3525.0,Chinstrap
271 | 269,52.7,19.8,197.0,3725.0,Chinstrap
272 | 270,45.2,17.8,198.0,3950.0,Chinstrap
273 | 271,46.1,18.2,178.0,3250.0,Chinstrap
274 | 272,51.3,18.2,197.0,3750.0,Chinstrap
275 | 273,46.0,18.9,195.0,4150.0,Chinstrap
276 | 274,51.3,19.9,198.0,3700.0,Chinstrap
277 | 275,46.6,17.8,193.0,3800.0,Chinstrap
278 | 276,51.7,20.3,194.0,3775.0,Chinstrap
279 | 277,47.0,17.3,185.0,3700.0,Chinstrap
280 | 278,52.0,18.1,201.0,4050.0,Chinstrap
281 | 279,45.9,17.1,190.0,3575.0,Chinstrap
282 | 280,50.5,19.6,201.0,4050.0,Chinstrap
283 | 281,50.3,20.0,197.0,3300.0,Chinstrap
284 | 282,58.0,17.8,181.0,3700.0,Chinstrap
285 | 283,46.4,18.6,190.0,3450.0,Chinstrap
286 | 284,49.2,18.2,195.0,4400.0,Chinstrap
287 | 285,42.4,17.3,181.0,3600.0,Chinstrap
288 | 286,48.5,17.5,191.0,3400.0,Chinstrap
289 | 287,43.2,16.6,187.0,2900.0,Chinstrap
290 | 288,50.6,19.4,193.0,3800.0,Chinstrap
291 | 289,46.7,17.9,195.0,3300.0,Chinstrap
292 | 290,52.0,19.0,197.0,4150.0,Chinstrap
293 | 291,50.5,18.4,200.0,3400.0,Chinstrap
294 | 292,49.5,19.0,200.0,3800.0,Chinstrap
295 | 293,46.4,17.8,191.0,3700.0,Chinstrap
296 | 294,52.8,20.0,205.0,4550.0,Chinstrap
297 | 295,40.9,16.6,187.0,3200.0,Chinstrap
298 | 296,54.2,20.8,201.0,4300.0,Chinstrap
299 | 297,42.5,16.7,187.0,3350.0,Chinstrap
300 | 298,51.0,18.8,203.0,4100.0,Chinstrap
301 | 299,49.7,18.6,195.0,3600.0,Chinstrap
302 | 300,47.5,16.8,199.0,3900.0,Chinstrap
303 | 301,47.6,18.3,195.0,3850.0,Chinstrap
304 | 302,52.0,20.7,210.0,4800.0,Chinstrap
305 | 303,46.9,16.6,192.0,2700.0,Chinstrap
306 | 304,53.5,19.9,205.0,4500.0,Chinstrap
307 | 305,49.0,19.5,210.0,3950.0,Chinstrap
308 | 306,46.2,17.5,187.0,3650.0,Chinstrap
309 | 307,50.9,19.1,196.0,3550.0,Chinstrap
310 | 308,45.5,17.0,196.0,3500.0,Chinstrap
311 | 309,50.9,17.9,196.0,3675.0,Chinstrap
312 | 310,50.8,18.5,201.0,4450.0,Chinstrap
313 | 311,50.1,17.9,190.0,3400.0,Chinstrap
314 | 312,49.0,19.6,212.0,4300.0,Chinstrap
315 | 313,51.5,18.7,187.0,3250.0,Chinstrap
316 | 314,49.8,17.3,198.0,3675.0,Chinstrap
317 | 315,48.1,16.4,199.0,3325.0,Chinstrap
318 | 316,51.4,19.0,201.0,3950.0,Chinstrap
319 | 317,45.7,17.3,193.0,3600.0,Chinstrap
320 | 318,50.7,19.7,203.0,4050.0,Chinstrap
321 | 319,42.5,17.3,187.0,3350.0,Chinstrap
322 | 320,52.2,18.8,197.0,3450.0,Chinstrap
323 | 321,45.2,16.6,191.0,3250.0,Chinstrap
324 | 322,49.3,19.9,203.0,4050.0,Chinstrap
325 | 323,50.2,18.8,202.0,3800.0,Chinstrap
326 | 324,45.6,19.4,194.0,3525.0,Chinstrap
327 | 325,51.9,19.5,206.0,3950.0,Chinstrap
328 | 326,46.8,16.5,189.0,3650.0,Chinstrap
329 | 327,45.7,17.0,195.0,3650.0,Chinstrap
330 | 328,55.8,19.8,207.0,4000.0,Chinstrap
331 | 329,43.5,18.1,202.0,3400.0,Chinstrap
332 | 330,49.6,18.2,193.0,3775.0,Chinstrap
333 | 331,50.8,19.0,210.0,4100.0,Chinstrap
334 | 332,50.2,18.7,198.0,3775.0,Chinstrap
335 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | asttokens==2.4.1
2 | comm==0.2.2
3 | contourpy==1.3.1
4 | cycler==0.12.1
5 | debugpy==1.8.8
6 | decorator==5.1.1
7 | executing==2.1.0
8 | fonttools==4.55.0
9 | ipykernel==6.29.5
10 | ipython==8.29.0
11 | jedi==0.19.2
12 | joblib==1.4.2
13 | jupyter_client==8.6.3
14 | jupyter_core==5.7.2
15 | kiwisolver==1.4.7
16 | matplotlib==3.9.2
17 | matplotlib-inline==0.1.7
18 | mlxtend==0.23.3
19 | nest-asyncio==1.6.0
20 | numpy==2.1.3
21 | packaging==24.2
22 | pandas==2.2.3
23 | parso==0.8.4
24 | pexpect==4.9.0
25 | pillow==11.0.0
26 | platformdirs==4.3.6
27 | prompt_toolkit==3.0.48
28 | psutil==6.1.0
29 | ptyprocess==0.7.0
30 | pure_eval==0.2.3
31 | Pygments==2.18.0
32 | pyparsing==3.2.0
33 | python-dateutil==2.9.0.post0
34 | pytz==2024.2
35 | pyzmq==26.2.0
36 | scikit-learn==1.5.2
37 | scipy==1.14.1
38 | six==1.16.0
39 | skrebate==0.62
40 | stack-data==0.6.3
41 | threadpoolctl==3.5.0
42 | tornado==6.4.1
43 | traitlets==5.14.3
44 | typing_extensions==4.12.2
45 | tzdata==2024.2
46 | wcwidth==0.2.13
47 |
--------------------------------------------------------------------------------