├── .gitattributes ├── .gitignore ├── quiz1 ├── Question15.m ├── Question16.m ├── Question18.m ├── Question18_2.m ├── Question19.m ├── Question2.m ├── Question20.m ├── Question3.m ├── answer.pdf ├── features.test.txt ├── features.train.txt ├── quiz1.pdf └── quiz1.tex ├── quiz2 ├── Question12.m ├── Question18.m ├── Question7.m ├── answer.pdf ├── hw2_adaboost_test.dat ├── hw2_adaboost_train.dat ├── hw2_lssvm_all.dat ├── quiz2.pdf └── quiz2.tex ├── quiz3 ├── Dtree.m ├── Dtree_predict.m ├── Question15.m ├── Question16.m ├── Question17.m ├── Question18.m ├── Question19.m ├── Question20.m ├── answer.pdf ├── check_predict.m ├── impurity.m ├── pruned_Dtree_predict.m ├── quiz3.pdf └── quiz3.tex ├── quiz4 ├── Question11.m ├── Question12.m ├── Question13.m ├── Question14.m ├── Question19.m ├── Question20.m ├── answer.pdf ├── hw4_kmeans_train.dat ├── hw4_knn_test.dat ├── hw4_knn_train.dat ├── hw4_nnet_test.dat ├── hw4_nnet_train.dat ├── kmeans_ein.m ├── kmeans_train.m ├── knn_predict_check.m ├── nn_predict_check.m ├── predict_check_nnet.m ├── predict_check_nnet2.m ├── quiz4.pdf └── quiz4.tex └── readme.md /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | 4 | # Custom for Visual Studio 5 | *.cs diff=csharp 6 | 7 | # Standard to msysgit 8 | *.doc diff=astextplain 9 | *.DOC diff=astextplain 10 | *.docx diff=astextplain 11 | *.DOCX diff=astextplain 12 | *.dot diff=astextplain 13 | *.DOT diff=astextplain 14 | *.pdf diff=astextplain 15 | *.PDF diff=astextplain 16 | *.rtf diff=astextplain 17 | *.RTF diff=astextplain 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.gitignore.io/api/tex,matlab 2 | # Edit at https://www.gitignore.io/?templates=tex,matlab 3 | 4 | ### MATLAB ### 5 | # Windows default autosave extension 6 | *.asv 7 | 8 | # OSX / *nix default autosave extension 9 | *.m~ 10 | 11 | # Compiled MEX binaries (all platforms) 12 | *.mex* 13 | 14 | # Packaged app and toolbox files 15 | *.mlappinstall 16 | *.mltbx 17 | 18 | # Generated helpsearch folders 19 | helpsearch*/ 20 | 21 | # Simulink code generation folders 22 | slprj/ 23 | sccprj/ 24 | 25 | # Matlab code generation folders 26 | codegen/ 27 | 28 | # Simulink autosave extension 29 | *.autosave 30 | 31 | # Octave session info 32 | octave-workspace 33 | 34 | ### TeX ### 35 | ## Core latex/pdflatex auxiliary files: 36 | *.aux 37 | *.lof 38 | *.log 39 | *.lot 40 | *.fls 41 | *.out 42 | *.toc 43 | *.fmt 44 | *.fot 45 | *.cb 46 | *.cb2 47 | .*.lb 48 | 49 | ## Intermediate documents: 50 | *.dvi 51 | *.xdv 52 | *-converted-to.* 53 | # these rules might exclude image files for figures etc. 54 | # *.ps 55 | # *.eps 56 | # *.pdf 57 | 58 | ## Generated if empty string is given at "Please type another file name for output:" 59 | .pdf 60 | 61 | ## Bibliography auxiliary files (bibtex/biblatex/biber): 62 | *.bbl 63 | *.bcf 64 | *.blg 65 | *-blx.aux 66 | *-blx.bib 67 | *.run.xml 68 | 69 | ## Build tool auxiliary files: 70 | *.fdb_latexmk 71 | *.synctex 72 | *.synctex(busy) 73 | *.synctex.gz 74 | *.synctex.gz(busy) 75 | *.pdfsync 76 | 77 | ## Build tool directories for auxiliary files 78 | # latexrun 79 | latex.out/ 80 | 81 | ## Auxiliary and intermediate files from other packages: 82 | # algorithms 83 | *.alg 84 | *.loa 85 | 86 | # achemso 87 | acs-*.bib 88 | 89 | # amsthm 90 | *.thm 91 | 92 | # beamer 93 | *.nav 94 | *.pre 95 | *.snm 96 | *.vrb 97 | 98 | # changes 99 | *.soc 100 | 101 | # comment 102 | *.cut 103 | 104 | # cprotect 105 | *.cpt 106 | 107 | # elsarticle (documentclass of Elsevier journals) 108 | *.spl 109 | 110 | # endnotes 111 | *.ent 112 | 113 | # fixme 114 | *.lox 115 | 116 | # feynmf/feynmp 117 | *.mf 118 | *.mp 119 | *.t[1-9] 120 | *.t[1-9][0-9] 121 | *.tfm 122 | 123 | #(r)(e)ledmac/(r)(e)ledpar 124 | *.end 125 | *.?end 126 | *.[1-9] 127 | *.[1-9][0-9] 128 | *.[1-9][0-9][0-9] 129 | *.[1-9]R 130 | *.[1-9][0-9]R 131 | *.[1-9][0-9][0-9]R 132 | *.eledsec[1-9] 133 | *.eledsec[1-9]R 134 | *.eledsec[1-9][0-9] 135 | *.eledsec[1-9][0-9]R 136 | *.eledsec[1-9][0-9][0-9] 137 | *.eledsec[1-9][0-9][0-9]R 138 | 139 | # glossaries 140 | *.acn 141 | *.acr 142 | *.glg 143 | *.glo 144 | *.gls 145 | *.glsdefs 146 | 147 | # uncomment this for glossaries-extra (will ignore makeindex's style files!) 148 | # *.ist 149 | 150 | # gnuplottex 151 | *-gnuplottex-* 152 | 153 | # gregoriotex 154 | *.gaux 155 | *.gtex 156 | 157 | # htlatex 158 | *.4ct 159 | *.4tc 160 | *.idv 161 | *.lg 162 | *.trc 163 | *.xref 164 | 165 | # hyperref 166 | *.brf 167 | 168 | # knitr 169 | *-concordance.tex 170 | # TODO Comment the next line if you want to keep your tikz graphics files 171 | *.tikz 172 | *-tikzDictionary 173 | 174 | # listings 175 | *.lol 176 | 177 | # luatexja-ruby 178 | *.ltjruby 179 | 180 | # makeidx 181 | *.idx 182 | *.ilg 183 | *.ind 184 | 185 | # minitoc 186 | *.maf 187 | *.mlf 188 | *.mlt 189 | *.mtc[0-9]* 190 | *.slf[0-9]* 191 | *.slt[0-9]* 192 | *.stc[0-9]* 193 | 194 | # minted 195 | _minted* 196 | *.pyg 197 | 198 | # morewrites 199 | *.mw 200 | 201 | # nomencl 202 | *.nlg 203 | *.nlo 204 | *.nls 205 | 206 | # pax 207 | *.pax 208 | 209 | # pdfpcnotes 210 | *.pdfpc 211 | 212 | # sagetex 213 | *.sagetex.sage 214 | *.sagetex.py 215 | *.sagetex.scmd 216 | 217 | # scrwfile 218 | *.wrt 219 | 220 | # sympy 221 | *.sout 222 | *.sympy 223 | sympy-plots-for-*.tex/ 224 | 225 | # pdfcomment 226 | *.upa 227 | *.upb 228 | 229 | # pythontex 230 | *.pytxcode 231 | pythontex-files-*/ 232 | 233 | # tcolorbox 234 | *.listing 235 | 236 | # thmtools 237 | *.loe 238 | 239 | # TikZ & PGF 240 | *.dpth 241 | *.md5 242 | *.auxlock 243 | 244 | # todonotes 245 | *.tdo 246 | 247 | # vhistory 248 | *.hst 249 | *.ver 250 | 251 | # easy-todo 252 | *.lod 253 | 254 | # xcolor 255 | *.xcp 256 | 257 | # xmpincl 258 | *.xmpi 259 | 260 | # xindy 261 | *.xdy 262 | 263 | # xypic precompiled matrices 264 | *.xyc 265 | 266 | # endfloat 267 | *.ttt 268 | *.fff 269 | 270 | # Latexian 271 | TSWLatexianTemp* 272 | 273 | ## Editors: 274 | # WinEdt 275 | *.bak 276 | *.sav 277 | 278 | # Texpad 279 | .texpadtmp 280 | 281 | # LyX 282 | *.lyx~ 283 | 284 | # Kile 285 | *.backup 286 | 287 | # KBibTeX 288 | *~[0-9]* 289 | 290 | # auto folder when using emacs and auctex 291 | ./auto/* 292 | *.el 293 | 294 | # expex forward references with \gathertags 295 | *-tags.tex 296 | 297 | # standalone packages 298 | *.sta 299 | 300 | ### TeX Patch ### 301 | # glossaries 302 | *.glstex 303 | 304 | # End of https://www.gitignore.io/api/tex,matlab 305 | -------------------------------------------------------------------------------- /quiz1/Question15.m: -------------------------------------------------------------------------------- 1 | 2 | N=length(features_train); 3 | temp=features_train; 4 | for n=1:N 5 | if features_train(n,1)~=0 6 | temp(n,1)=-1; 7 | else 8 | temp(n,1)=1; 9 | end 10 | end 11 | 12 | model=svmtrain(temp(:,1),temp(:,2:3),'-t 0 -c 0.01'); 13 | w=model.SVs' * model.sv_coef; 14 | -------------------------------------------------------------------------------- /quiz1/Question16.m: -------------------------------------------------------------------------------- 1 | 2 | N=length(features_train); 3 | % '0' versus non'0' 4 | % for n=1:N 5 | % if features_train(n,1)~=0 6 | % features_train(n,1)=1; 7 | % end 8 | % end 9 | k=1; 10 | for i=[0,2,4,6,8] 11 | temp=features_train; 12 | for n=1:N 13 | if temp(n,1) == i 14 | temp(n,1)=1; 15 | else 16 | temp(n,1)=-1; 17 | end 18 | end 19 | model=svmtrain(temp(:,1),temp(:,2:3),'-t 1 -d 2 -r 1 -c 0.01 -g 1'); 20 | alpha(k,1)=i; 21 | alpha(k,2)=sum(abs(model.sv_coef)); 22 | [predict,accuracy(k,1:3),value]=svmpredict(temp(:,1),temp(:,2:3),model); 23 | accuracy(k,4)=i; 24 | k=k+1; 25 | end 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /quiz1/Question18.m: -------------------------------------------------------------------------------- 1 | N=length(features_train); 2 | temp=features_train; 3 | test=features_test; 4 | for n=1:N 5 | if temp(n,1) ==0 6 | temp(n,1)=1; 7 | else 8 | temp(n,1)=-1; 9 | end 10 | end 11 | 12 | for n=1:length(test) 13 | if test(n,1) ==0 14 | test(n,1)=1; 15 | else 16 | test(n,1)=-1; 17 | end 18 | end 19 | violation=[0,0,0,0,0]; 20 | model(1,1)=svmtrain(temp(:,1),temp(:,2:3),'-t 2 -g 100 -c 0.001'); 21 | model(1,2)=svmtrain(temp(:,1),temp(:,2:3),'-t 2 -g 100 -c 0.01'); 22 | model(1,3)=svmtrain(temp(:,1),temp(:,2:3),'-t 2 -g 100 -c 0.1'); 23 | model(1,4)=svmtrain(temp(:,1),temp(:,2:3),'-t 2 -g 100 -c 1'); 24 | model(1,5)=svmtrain(temp(:,1),temp(:,2:3),'-t 2 -g 100 -c 10'); 25 | c=[0.001,0.01,0.1,1,10]; 26 | for i=1:5 27 | [~,acc(i,1:3),~]=svmpredict(test(:,1),test(:,2:3),model(1,i)); 28 | number_of_svs(1,i)=model(1,i).totalSV; 29 | w=0; 30 | for n=1:model(1,i).totalSV 31 | if abs(model(1,i).sv_coef(n,1))==c(1,i); 32 | wz=0; 33 | for m=1:model(1,i).totalSV 34 | wz=wz+model(1,i).sv_coef(m,1)*exp(-100*norm(model(1,i).SVs(n,1:2)-model(1,i).SVs(m,1:2))^2); 35 | end 36 | s=1-sign(model(1,i).sv_coef(n,1))*(wz-model(1,i).rho); 37 | violation(n,i)=s; 38 | end 39 | end 40 | v(1,i)=sum(violation(:,i)) 41 | end 42 | 43 | -------------------------------------------------------------------------------- /quiz1/Question18_2.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/li-xin-yi/machine_learning_techniques/a677ccb023faa16dd04841f91bbcafbf13b399b9/quiz1/Question18_2.m -------------------------------------------------------------------------------- /quiz1/Question19.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/li-xin-yi/machine_learning_techniques/a677ccb023faa16dd04841f91bbcafbf13b399b9/quiz1/Question19.m -------------------------------------------------------------------------------- /quiz1/Question2.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/li-xin-yi/machine_learning_techniques/a677ccb023faa16dd04841f91bbcafbf13b399b9/quiz1/Question2.m -------------------------------------------------------------------------------- /quiz1/Question20.m: -------------------------------------------------------------------------------- 1 | N=length(features_train); 2 | temp=features_train; 3 | count=[0,0,0,0,0]; 4 | for n=1:N 5 | if temp(n,1) ==0 6 | temp(n,1)=1; 7 | else 8 | temp(n,1)=-1; 9 | end 10 | end 11 | for k=1:10 12 | clear train 13 | clear test 14 | clear acc 15 | shuffle=randperm(N); 16 | for n=1:1000 17 | test(n,:)=temp(shuffle(1,n),:); 18 | end 19 | for n=1001:N 20 | train(n,:)=temp(shuffle(1,n),:); 21 | end 22 | clear model 23 | model(1,1)=svmtrain(train(:,1),train(:,2:3),'-t 2 -g 1 -c 0.1 -h 0'); 24 | model(1,2)=svmtrain(train(:,1),train(:,2:3),'-t 2 -g 10 -c 0.1 -h 0'); 25 | model(1,3)=svmtrain(train(:,1),train(:,2:3),'-t 2 -g 100 -c 0.1 -h 0'); 26 | model(1,4)=svmtrain(train(:,1),train(:,2:3),'-t 2 -g 1000 -c 0.1 -h 0'); 27 | model(1,5)=svmtrain(train(:,1),train(:,2:3),'-t 2 -g 10000 -c 0.1 -h 0'); 28 | for i=1:5 29 | [~,acc(i,1:3),~]=svmpredict(test(:,1),test(:,2:3),model(1,i)); 30 | end 31 | [~,c]=max(acc(:,1)); 32 | count(1,c)=count(1,c)+1; 33 | end 34 | 35 | 36 | -------------------------------------------------------------------------------- /quiz1/Question3.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/li-xin-yi/machine_learning_techniques/a677ccb023faa16dd04841f91bbcafbf13b399b9/quiz1/Question3.m -------------------------------------------------------------------------------- /quiz1/answer.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/li-xin-yi/machine_learning_techniques/a677ccb023faa16dd04841f91bbcafbf13b399b9/quiz1/answer.pdf -------------------------------------------------------------------------------- /quiz1/quiz1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/li-xin-yi/machine_learning_techniques/a677ccb023faa16dd04841f91bbcafbf13b399b9/quiz1/quiz1.pdf -------------------------------------------------------------------------------- /quiz1/quiz1.tex: -------------------------------------------------------------------------------- 1 | \documentclass[a4paper,10pt]{exam} 2 | \usepackage{amsfonts} 3 | \usepackage{amsmath} 4 | \usepackage{latexsym} 5 | \usepackage{enumitem} 6 | \usepackage[colorlinks,linkcolor=blue,anchorcolor=blue,citecolor=green]{hyperref} 7 | \title{Homework of Machine Learning Techniques: Quiz 1} 8 | \date{} 9 | \author{} 10 | \lhead{} 11 | \chead{Quiz 1} 12 | \rhead{} 13 | \headrule 14 | % \printanswers 15 | 16 | \begin{document} 17 | \maketitle 18 | \begin{questions} 19 | \question Recall that $N$ is the size of the data set and $d$ is the dimensionality of the input space. The primal formulation of the linear soft-margin support vector machine problem, without going through the Lagrangian dual problem, is 20 | \begin{checkboxes} 21 | \choice a quadratic programming problem with $N$ variables 22 | \choice a quadratic programming problem with $d+1$ variables 23 | \choice none of the other choices 24 | \choice a quadratic programming problem with $2N$ variables 25 | \CorrectChoice a quadratic programming problem with $N+d+1$ variables 26 | \end{checkboxes} 27 | 28 | \question Consider the following training data set: 29 | \[\mathbf{x}_1 = (1, 0), y_1 = -1 \quad \quad \mathbf{x}_2 = (0, 1), y_2 = -1 \quad \quad \mathbf{x}_3 = (0,-1), y_3=-1\] 30 | \[\mathbf{x}_4 = (-1, 0), y_4 = +1 \quad \quad \mathbf{x}_5 = (0, 2), y_5 = +1 \quad \quad \mathbf{x}_6 = (0,-2), y_6=+1\] 31 | \[\mathbf{x}_7 = (-2, 0), y_7 = +1\] 32 | Use following nonlinear transformation of the input vector $\mathbf{x} = (x_1, x_2)$ to the transformed vector $\mathbf{z} = (\phi_1(\mathbf{x}), \phi_2(\mathbf{x}))$: 33 | \[\phi_1(\mathbf{x}) = x_2^2 - 2x_1 + 3 \quad \quad \phi_2(\mathbf{x}) = x_1^2 - 2 x_2 - 3\] 34 | What is the equation of the optimal separating ``hyperplane'' in the $\mathcal{Z}$ space? 35 | \begin{checkboxes} 36 | \choice $z_1 + z_2 = 4.5$ 37 | \choice $z_1 - z_2 = 4.5$ 38 | \CorrectChoice $z_1 = 4.5$ 39 | \choice $z_2 = 4.5$ 40 | \choice none of the other choices 41 | \end{checkboxes} 42 | 43 | \question Consider the same training data set as Question \ref{question@2}, but instead of explicitly transforming the input space $\mathcal{X}$ to $\mathcal{Z}$, apply the hard-margin support vector machine algorithm with the kernel function 44 | \[K(\mathbf{x}, \mathbf{x}') = (1 + \mathbf{x}^T \mathbf{x}')^2,\] 45 | which corresponds to a second-order polynomial transformation. Set up the optimization problem using $(\alpha_1, \cdots, \alpha_7)$ and numerically solve for them (you can use any package you want). Which of the followings are true about the optimal ${\boldsymbol\alpha}$? 46 | \begin{checkboxes} 47 | \CorrectChoice $\sum_{n=1}^7 \alpha_n \approx 2.8148$ 48 | \CorrectChoice $\min_{1 \le n \le 7} \alpha_n = \alpha_7$ 49 | \choice there are 6 nonzero $\alpha_n$ 50 | \choice none of the other choices 51 | \choice $\max_{1 \le n \le 7} \alpha_n = \alpha_7$ 52 | \end{checkboxes} 53 | 54 | \question Following Question \ref{question@3}, what is the corresponding nonlinear curve in the $\mathcal{X}$ space? 55 | \begin{checkboxes} 56 | \CorrectChoice $\frac{1}{9}(8x_1^2-16x_1+6x_2^2 - 15) = 0$ 57 | \choice none of the other choices 58 | \choice $\frac{1}{9}(8x_2^2-16x_2+6x_1^2 + 15) = 0$ 59 | \choice $\frac{1}{9}(8x_2^2-16x_2+6x_1^2 - 15) = 0$ 60 | \choice $\frac{1}{9}(8x_1^2-16x_1+6x_2^2 + 15) = 0$ 61 | \end{checkboxes} 62 | 63 | \question Compare the two nonlinear curves found in Questions \ref{question@2} and \ref{question@4}, which of the following is true? 64 | \begin{checkboxes} 65 | \choice none of the other choices 66 | \choice The curves should be the same in the $\mathcal{X}$ space, because they are learned with respect to the same $\mathcal{Z}$ space 67 | \CorrectChoice The curves should be different in the $\mathcal{X}$ space, because they are learned with respect to different $\mathcal{Z}$ spaces 68 | \choice The curves should be different in the $\mathcal{X}$ space, because they are learned from different raw data $\{(\mathbf{x}_n, y_n)\}$ 69 | \choice The curves should be the same in the $\mathcal{X}$ space, because they are learned from the same raw data $\{(\mathbf{x}_n, y_n)\}$ 70 | \end{checkboxes} 71 | 72 | \question Recall that for support vector machines, $d_{vc}$ is upper bounded by $\frac{R^2}{\rho^2}$, where $\rho$ is the margin and $R$ is the radius of the minimum hypersphere that $\mathcal{X}$ resides in. In general, $R$ should come from our knowledge on the learning problem, but we can estimate it by looking at the minimum hypersphere that the training examples resides in. In particular, we want to seek for the optimal $R$ that solves \[(P) \; \; \; \min_{R \in \mathbb{R}, \mathbf{c} \in \mathbb{R}^d} \; \; \; R^2 \; \; \; \mbox{subject to } \|\mathbf{x}_n - \mathbf{c}\|^2 \le R^2 \mbox{ for } n = 1, 2, \cdots, N.\] 73 | Let $\lambda_n$ be the Lagrange multipliers for the n-th constraint above. Following the derivation of the dual support vector machine in class, write down $(P)$ as an equivalent optimization problem \[\min_{R \in \mathbb{R}, \mathbf{c} \in \mathbb{R}^d} \;\;\; \max_{\lambda_n \ge 0} \;\;\; L(R, \mathbf{c}, {\boldsymbol\lambda}).\] What is $L(R, \mathbf{c}, {\boldsymbol\lambda})?$ 74 | 75 | \begin{checkboxes} 76 | \CorrectChoice $R^2 + \sum_{n=1}^N \lambda_n( \|\mathbf{x}_n - \mathbf{c}\|^2 - R^2)$ 77 | \choice $R^2 - \sum_{n=1}^N \lambda_n( \|\mathbf{x}_n - \mathbf{c}\|^2 - R^2)$ 78 | \choice $R^2 + \sum_{n=1}^N \lambda_n( \|\mathbf{x}_n - \mathbf{c}\|^2 + R^2)$ 79 | \choice $R^2 - \sum_{n=1}^N \lambda_n( \|\mathbf{x}_n - \mathbf{c}\|^2 + R^2)$ 80 | \choice none of the other choices 81 | \end{checkboxes} 82 | 83 | \question Using (assuming) strong duality, the solution to $(P)$ in Question 6 would be the same as the Lagrange dual problem 84 | \[(D) \; \; \; \max_{\lambda_n \ge 0} \;\;\; \min_{R \in \mathbb{R}, \mathbf{c} \in \mathbb{R}^d} \;\;\; L(R, \mathbf{c}, {\boldsymbol\lambda}).\] 85 | Which of the following can be derived from the KKT conditions of $(P)$ and $(D)$ at the optimal $(R, \mathbf{c}, {\boldsymbol\lambda})$? 86 | \begin{checkboxes} 87 | \CorrectChoice if $\sum_{n=1}^N \lambda_n \neq 0$, then $\mathbf{c} = \left(\sum_{n=1}^N \lambda_n \mathbf{x}_n\right) \Big/ \left(\sum_{n=1}^N \lambda_n\right)$ 88 | \choice if $\lambda_n = 0$, then $\|\mathbf{x}_n - \mathbf{c}\|^2 - R^2 = 0$ 89 | \CorrectChoice if $R \neq 0$, then $R \neq 0$ 90 | \choice none of the other choices 91 | \choice if $\|\mathbf{x}_n - \mathbf{c}\|^2 - R^2 < 0$, then $\lambda_n = 0$ 92 | \end{checkboxes} 93 | 94 | \question Continue from Question \ref{question@3} and assume that all the $\mathbf{x}_n$ are different, which implies that the optimal $R>0$. Using the KKT conditions to simplify the Lagrange dual problem, and obtain a dual problem that involves only $\lambda_n$. One form of the dual problem should look like 95 | \[(D') \; \; \; \max_{\lambda_n \ge 0} \; \; \; \mbox{Objective}(\boldsymbol\lambda) \; \; \; \mbox{subject to } \sum_{n=1}^N \lambda_n = \mbox{constant}\] 96 | Which of the following is $\mbox{Objective}(\boldsymbol\lambda)$? 97 | 98 | \begin{checkboxes} 99 | \choice $\sum_{n=1}^N \lambda_n (\|\mathbf{x}_n - \sum_{m=1}^N \lambda_m \mathbf{x}_m \|^2) + 2(\sum_{n=1}^N \lambda_n \mathbf{x}_n)^2$ 100 | \choice $\sum_{n=1}^N \lambda_n (\|\mathbf{x}_n + \sum_{m=1}^N \lambda_m \mathbf{x}_m \|^2) + 2(\sum_{n=1}^N \lambda_n \mathbf{x}_n)^2$ 101 | \choice $\sum_{n=1}^N \lambda_n (\|\mathbf{x}_n + \sum_{m=1}^N \lambda_m \mathbf{x}_m \|^2)$ 102 | \choice none of the other choices 103 | \CorrectChoice $\sum_{n=1}^N \lambda_n (\|\mathbf{x}_n - \sum_{m=1}^N \lambda_m \mathbf{x}_m \|^2)$ 104 | \end{checkboxes} 105 | 106 | \question Continue from Question \ref{question@8} and consider using $\mathbf{z}_n = {\boldsymbol\phi}(\mathbf{x}_n)$ instead of $\mathbf{x}_n$ while assuming that all the $\mathbf{z}_n$ are different. Then, write down the optimization problem that uses $K(\mathbf{x}_n, \mathbf{x}_m)$ to replace $\mathbf{z}_n^T \mathbf{z}_m$---that is, the kernel trick. Which of the following is $\mbox{Objective}(\boldsymbol\lambda)$ of $(D^\prime)$ after applying the kernel trick? 107 | \begin{checkboxes} 108 | \choice $\sum_{n=1}^N \lambda_n K(\mathbf{x}_n, \mathbf{x}_n) + 3\sum_{n=1}^N \sum_{m=1}^N \lambda_n \lambda_m K(\mathbf{x}_n, \mathbf{x}_m)$ 109 | \CorrectChoice $\sum_{n=1}^N \lambda_n K(\mathbf{x}_n, \mathbf{x}_n) - 1\sum_{n=1}^N \sum_{m=1}^N \lambda_n \lambda_m K(\mathbf{x}_n, \mathbf{x}_m$ 110 | \choice $\sum_{n=1}^N \lambda_n K(\mathbf{x}_n, \mathbf{x}_n) - 3\sum_{n=1}^N \sum_{m=1}^N \lambda_n \lambda_m K(\mathbf{x}_n, \mathbf{x}_m)$ 111 | \choice none of the other choices 112 | \choice $\sum_{n=1}^N \lambda_n K(\mathbf{x}_n, \mathbf{x}_n) + 1\sum_{n=1}^N \sum_{m=1}^N \lambda_n \lambda_m K(\mathbf{x}_n, \mathbf{x}_m$ 113 | \end{checkboxes} 114 | 115 | \question Continue from Question \ref{question@9} and solve the $(D^\prime)$ that involves the kernel $K$, which of the following formula evaluates the optimal $R$? 116 | 117 | \begin{checkboxes} 118 | \choice Pick some $i$ with $\lambda_i > 0$, and $R = \sqrt{K(\mathbf{x}_i, \mathbf{x}_i) + 2 \sum_{m=1}^N \lambda_m K(\mathbf{x}_i, \mathbf{x}_m) + \sum_{n=1}^N \sum_{m=1}^N \lambda_n \lambda_m K(\mathbf{x}_n, \mathbf{x}_m)}$ 119 | \choice none of the other choices 120 | \choice Pick some $i$ with $\lambda_i = 0$, and $R = \sqrt{K(\mathbf{x}_i, \mathbf{x}_i) - 2 \sum_{m=1}^N \lambda_m K(\mathbf{x}_i, \mathbf{x}_m) + \sum_{n=1}^N \sum_{m=1}^N \lambda_n \lambda_m K(\mathbf{x}_n, \mathbf{x}_m)}$ 121 | \choice Pick some $i$ with $\lambda_i = 0$, and $R = \sqrt{K(\mathbf{x}_i, \mathbf{x}_i) + 2 \sum_{m=1}^N \lambda_m K(\mathbf{x}_i, \mathbf{x}_m) + \sum_{n=1}^N \sum_{m=1}^N \lambda_n \lambda_m K(\mathbf{x}_n, \mathbf{x}_m)}$ 122 | \CorrectChoice Pick some $i$ with $\lambda_i > 0$, and $R = \sqrt{K(\mathbf{x}_i, \mathbf{x}_i) - 2 \sum_{m=1}^N \lambda_m K(\mathbf{x}_i, \mathbf{x}_m) + \sum_{n=1}^N \sum_{m=1}^N \lambda_n \lambda_m K(\mathbf{x}_n, \mathbf{x}_m)}$ 123 | \end{checkboxes} 124 | 125 | \question In the class, we taught the soft-margin support vector machine as follows. 126 | \begin{eqnarray*} 127 | (P_1) \; \; \; \min_{\mathbf{w}, b, \boldsymbol\xi} && \frac{1}{2} \mathbf{w}^T \mathbf{w} + C \sum_{n=1}^N \xi_n\\ 128 | \mbox{subject to} && y_n \Bigl( {\mathbf{w}^T \mathbf{x}_n} +b \Bigr) \ge 1 - \xi_n,\\ 129 | && \xi_n \ge 0. 130 | \end{eqnarray*} 131 | The support vector machine (called $\ell_1$ loss) penalizes the margin violation linearly. Another popular formulation (called $\ell_2$ loss) penalizes the margin violation quadratically. In this problem, we show one simple approach for deriving the dual of such a formulation. The formulation is as follows. 132 | \begin{eqnarray*} 133 | (P_2') \; \; \; \min_{\mathbf{w}, b, \boldsymbol\xi} && \frac{1}{2} \mathbf{w}^T \mathbf{w} + C \sum_{n=1}^N \xi_n^2\\ 134 | \mbox{subject to} && y_n \Bigl( {\mathbf{w}^T \mathbf{x}_n} +b \Bigr) \ge 1 - \xi_n,\\ 135 | && \xi_n \ge 0. 136 | \end{eqnarray*} 137 | 138 | It is not hard to see that the constraints $\xi_n \ge 0$ are not necessary for the new formulation. In other words, the formulation $(P_2')$ is equivalent to the following optimization problem. 139 | \begin{eqnarray*} 140 | (P_2) \; \; \; \min_{\mathbf{w}, b, \boldsymbol\xi} && \frac{1}{2} \mathbf{w}^T \mathbf{w} + C \sum_{n=1}^N \xi_n^2\\ 141 | \mbox{subject to} && y_n \Bigl( {\mathbf{w}^T \mathbf{x}_n} +b \Bigr) \ge 1 - \xi_n. 142 | \end{eqnarray*} 143 | Problem $(P_2)$ is equivalent to a linear hard-margin support vector machine (primal problem) that takes examples $(\tilde{\mathbf{x}}_n, y_n)$ instead of $(\mathbf{x}_n, y_n)$. That is, the hard-margin dual problem that involves $\tilde{\mathbf{x}}_n$ is simply the dual problem of $(P_2)$. Use $[[ \cdot ]]$ to denote the boolean function which evaluates to 1 if and only iff the inner condition is true. Which of the following is $\tilde{\mathbf{x}}_n$? (Hint: $\tilde{\mathbf{w}} = (\mathbf{w}, \textrm{constant} \cdot \boldsymbol\xi)$) 144 | 145 | \begin{checkboxes} 146 | \CorrectChoice $\tilde{\mathbf{x}}_n = (\mathbf{x}_n, v_1, v_2, \cdots, v_N)$, where $v_i = \frac{1}{\sqrt{2C}} [[ i = n ]]$ 147 | \choice $\tilde{\mathbf{x}}_n = (\mathbf{x}_n, v, v, \cdots, v)$, where there are $N$ components of $v = \frac{1}{\sqrt{2C}}$ 148 | \choice $\tilde{\mathbf{x}}_n = (\mathbf{x}_n, v_1, v_2, \cdots, v_N)$, where $v_i = \frac{1}{\sqrt{C}} [[ i = n ]]$ 149 | \choice $\tilde{\mathbf{x}}_n = (\mathbf{x}_n, v, v, \cdots, v)$, where there are $N$ components of $v = \frac{1}{\sqrt{C}}$ 150 | \choice none of the other choices 151 | \end{checkboxes} 152 | 153 | \question Let $K_1(\mathbf{x}, \mathbf{x}') = {\boldsymbol\phi}_1(\mathbf{x}) ^T {\boldsymbol\phi}_1(\mathbf{x}')$ and $K_2(\mathbf{x}, \mathbf{x}') = {\boldsymbol\phi}_2(\mathbf{x}) ^T {\boldsymbol\phi}_2(\mathbf{x}')$ be two valid kernels. Which of the followings are always valid kernels, assuming that $K_2(\mathbf{x}, \mathbf{x}') \neq 0$ for all $x$ and $\mathbf{x}'$? 154 | \begin{checkboxes} 155 | \CorrectChoice $K(\mathbf{x}, \mathbf{x}') = K_1(\mathbf{x}, \mathbf{x}') \cdot K_2(\mathbf{x}, \mathbf{x}')$ 156 | \choice none of the other choices 157 | \choice $K(\mathbf{x}, \mathbf{x}') = K_1(\mathbf{x}, \mathbf{x}') - K_2(\mathbf{x}, \mathbf{x}')$ 158 | \CorrectChoice $K(\mathbf{x}, \mathbf{x}') = K_1(\mathbf{x}, \mathbf{x}') + K_2(\mathbf{x}, \mathbf{x}')$ 159 | \choice $K(\mathbf{x}, \mathbf{x}') = K_1(\mathbf{x}, \mathbf{x}') / K_2(\mathbf{x}, \mathbf{x}')$ 160 | \end{checkboxes} 161 | 162 | \question Let $K_1(\mathbf{x}, \mathbf{x}') = {\boldsymbol\phi}_1(\mathbf{x}) ^T {\boldsymbol\phi}_1(\mathbf{x}')$ be a valid kernel. Which of the followings are always valid kernels? 163 | \begin{checkboxes} 164 | \choice $K(\mathbf{x}, \mathbf{x}') = (1 - K_1(\mathbf{x}, \mathbf{x}'))^{2}$ 165 | \choice none of the other choices 166 | \choice $K(\mathbf{x}, \mathbf{x}') = \exp(-K_1(\mathbf{x}, \mathbf{x}'))$ 167 | \CorrectChoice $K(\mathbf{x}, \mathbf{x}') = 1126 \cdot K_1(\mathbf{x}, \mathbf{x}')$ 168 | \CorrectChoice $K(\mathbf{x}, \mathbf{x}') = (1 - K_1(\mathbf{x}, \mathbf{x}'))^{-1}$, assuming that $0 < K_1(\mathbf{x}, \mathbf{x}') < 1$ 169 | \end{checkboxes} 170 | 171 | \question For a given valid kernel $K$, consider a new kernel $\tilde{K}(\mathbf{x}, \mathbf{x}') = p K(\mathbf{x}, \mathbf{x}') + q$ for some $p>0$ and $q>0$. Which of the following statement is true? 172 | \begin{checkboxes} 173 | \choice For the dual of soft-margin support vector machine, using $\tilde{K}$ along with a new $\tilde{C} = pC + q$ instead of $K$ with the original $C$ leads to an equivalent $g_{SVM}$ classifier. 174 | \choice For the dual of soft-margin support vector machine, using $\tilde{K}$ along with a new $\tilde{C} = \frac{C}{p} + q$ instead of $K$ with the original $C$ leads to an equivalent $g_{SVM}$ classifier. 175 | \choice none of the other choices 176 | \choice For the dual of soft-margin support vector machine, using $\tilde{K}$ along with a new $\tilde{C} = pC$ instead of $K$ with the original $C$ leads to an equivalent $g_{SVM}$ classifier. 177 | \choice For the dual of soft-margin support vector machine, using $\tilde{K}$ along with a new $\tilde{C} = \frac{C}{p} + q$ instead of $K$ with the original $C$ leads to an equivalent $g_{SVM}$ classifier. 178 | \CorrectChoice For the dual of soft-margin support vector machine, using $\tilde{K}$ along with a new $\tilde{C} = \frac{C}{p}$ instead of $K$ with the original $C$ leads to an equivalent $g_{SVM}$\\ classifier. 179 | \end{checkboxes} 180 | 181 | \question For Questions \ref{question@15} to \ref{question@20}, we are going to experiment with a real-world data set. Download the processed US Postal Service Zip Code data set with extracted features of intensity and symmetry for training and testing: 182 | 183 | \url{http://www.amlbook.com/data/zip/features.train} 184 | 185 | \url{http://www.amlbook.com/data/zip/features.test} 186 | 187 | The format of each row is 188 | 189 | digit intensity symmetry 190 | 191 | We will consider binary classification problems of the form "one of the digits" (as the positive class) versus "other digits" (as the negative class). 192 | 193 | The training set contains thousands of examples, and some quadratic programming packages cannot handle this size. We recommend that you consider the LIBSVM package 194 | 195 | \url{http://www.csie.ntu.edu.tw/~cjlin/libsvm/} 196 | 197 | Regardless of the package that you choose to use, please read the manual of the package carefully to make sure that you are indeed solving the soft-margin support vector machine taught in class like the dual formulation below: 198 | \begin{eqnarray*} 199 | \min_{\boldsymbol\alpha} && 200 | \frac{1}{2} \sum_{n=1}^N \sum_{m=1}^N \alpha_n \alpha_m y_n y_m K(\mathbf{x}_n, \mathbf{x}_m) 201 | - \sum_{n=1}^N \alpha_n \\ 202 | \mathrm{s.t.} && \sum_{n=1}^N y_n \alpha_n = 0 \\ 203 | && 0 \leq \alpha_n \leq C \ \ \ n=1,\cdots,N 204 | \end{eqnarray*} 205 | In the following questions, please use the 0/1 error for evaulating $E_{\rm in}$, $E_{\rm val}$ and $E_{\rm out}$ (through the test set). Some practical remarks include 206 | \begin{enumerate}[label=(\roman*)] 207 | \item Please tell your chosen package to \textbf {not} automatically scale the data for you, lest you should change the effective kernel and get different results. 208 | \item It is your responsibility to check whether your chosen package solves the designated formulation with enough numerical precision. Please read the manual of your chosen package for software parameters whose values affect the outcome---any ML practitioner needs to deal with this kind of added uncertainty. 209 | Consider the linear soft-margin SVM. That is, either solve the primal formulation of soft-margin SVM with the given $\mathbf{x}_n$, or take the linear kernel $K(\mathbf{x}_n,\mathbf{x}_m) = \mathbf{x}_n^{\mathrm{T}} \mathbf{x}_m$ in the dual formulation. With $C=0.01$, and the binary classification problem of ``0'' versus ``not 0'', which of the following numbers is closest to $\|\mathbf{w}\|$ after solving the linear soft-margin SVM? 210 | \end{enumerate} 211 | \begin{checkboxes} 212 | \choice 0.2 213 | \CorrectChoice 0.6 214 | \choice 1.4 215 | \choice 1.8 216 | \choice 1.0 217 | \end{checkboxes} 218 | 219 | \question Consider the polynomial kernel $K(\mathbf{x}_n,\mathbf{x}_m) = (1+ \mathbf{x}_n^{\mathrm{T}} \mathbf{x}_m)^Q$, where $Q$ is the degree of the polynomial. With $C=0.01$, $Q=2$, which of the following soft-margin SVM classifiers reaches the lowest $E_{\rm in}$? 220 | \begin{checkboxes} 221 | \choice ''0'' versus ''not 0'' 222 | \choice ''2'' versus ''not 2'' 223 | \choice ''4'' versus ''not 4'' 224 | \choice ''6'' versus ''not 6'' 225 | \CorrectChoice ''8'' versus ''not 8'' 226 | \end{checkboxes} 227 | 228 | \question Following Question \ref{question@16}, which of the following numbers is closest to the maximum $\sum_{n=1}^N \alpha_n$ within those five soft-margin SVM classifiers? 229 | \begin{checkboxes} 230 | \choice 10.0 231 | \choice 25.0 232 | \CorrectChoice 20.0 233 | \choice 15.0 234 | \choice 5.0 235 | \end{checkboxes} 236 | 237 | \question Consider the Gaussian kernel $K(\mathbf{x}_n,\mathbf{x}_m)= \exp\left(-\gamma ||\mathbf{x}_n - \mathbf{x}_m||^2 \right)$. With $\gamma = 100$, and the binary classification problem of ''0'' versus ''not 0''. Consider values of $C$ within $\{0.001, 0.01, 0.1, 1, 10\}$. Which of the following properties of the soft-margin SVM classifier strictly decreases with those five $C$? 238 | \begin{checkboxes} 239 | \CorrectChoice the distance of any free support vector to the hyperplane in the (infinite-dimensional) $\mathcal{Z}$ space 240 | \CorrectChoice $\sum_{n=1}^N \xi_n$ 241 | \choice $E_{\rm out}$ 242 | \choice number of support vectors 243 | \CorrectChoice objective value of the dual problem 244 | \end{checkboxes} 245 | 246 | \question Following Question 18, when fixing $C=0.1$, which of the following values of $\gamma$ results in the lowest $E_{\rm out}$? 247 | \begin{checkboxes} 248 | \choice 10000 249 | \choice 1 250 | \choice 100 251 | \CorrectChoice 10 252 | \choice 1000 253 | \end{checkboxes} 254 | 255 | \question Following Question 18 and consider a validation procedure that randomly samples 1000 examples from the training set for validation and leaves the other examples for training $g^{-}_{SVM}$. Fix $C=0.1$ and use the validation procedure to choose the best $\gamma$ among $\{1, 10, 100, 1000, 10000\}$ according to $E_{\rm val}$. If there is a tie of $E_{\rm val}$, choose the smallest $\gamma$. Repeat the procedure 100 times. Which of the following values of $\gamma$ is selected the most number of times? 256 | 257 | \begin{checkboxes} 258 | \choice 1000 259 | \CorrectChoice 10 260 | \choice 100 261 | \choice 10000 262 | \choice 1 263 | \end{checkboxes} 264 | \end{questions} 265 | \clearpage 266 | \end{document} -------------------------------------------------------------------------------- /quiz2/Question12.m: -------------------------------------------------------------------------------- 1 | train1=sortrows(train_set,1); 2 | train2=sortrows(train_set,2); 3 | N=length(train_set); 4 | theta1=-Inf; 5 | theta2=-Inf; 6 | for n=2:N 7 | theta1(n,1)=train1(n-1,1)+train1(n,1); 8 | theta2(n,1)=train2(n-1,2)+train2(n,1); 9 | end 10 | theta=[theta1;theta1;theta2;theta2]; 11 | s=[[ones(N,1),zeros(N,1)]; 12 | [-1*ones(N,1),zeros(N,1)]; 13 | [zeros(N,1),ones(N,1)]; 14 | [zeros(N,1),-1*ones(N,1)];]; 15 | 16 | for n=1:4*N 17 | predict(n,:)=abs(sign(train_set(:,3)'-sign(s(n,1)*(train_set(:,1)'-theta(n,1)')+s(n,2)*(train_set(:,2)'-theta(n,1)')))); 18 | end 19 | t=1/N; 20 | u=ones(1,N)*t; 21 | for T=1:300 22 | U(T,1)=sum(u); 23 | search=(predict*u')'; 24 | [inc,pos(T,1)]=min(search); 25 | et=inc/sum(u); 26 | etm(T,1)=et; 27 | t(T,1)=sqrt((1-et)/et); 28 | mul=predict(pos(T,1),:)*t(T,1)+abs(predict(pos(T,1),:)-ones(1,N))/t(T,1); 29 | u=u.*mul; 30 | end 31 | 32 | 33 | for T=1:300 34 | n=pos(T,1); 35 | pred(T,:)=sign(s(n,1)*(train_set(:,1)'-theta(n,1)')+s(n,2)*(train_set(:,2)'-theta(n,1)')); 36 | end 37 | 38 | Gpred=sign(log(t)'*pred); 39 | 40 | g1pretest=sign(s(pos(1,1),1)*(test_set(:,1)'-theta(pos(1,1),1)')+s(pos(1,1),2)*(test_set(:,2)'-theta(pos(1,1),1)')); 41 | 42 | for T=1:300 43 | n=pos(T,1); 44 | Gpredt(T,:)=sign(s(n,1)*(test_set(:,1)'-theta(n,1)')+s(n,2)*(test_set(:,2)'-theta(n,1)')); 45 | end 46 | 47 | Gpredtest=sign(log(t)'*Gpredt); 48 | -------------------------------------------------------------------------------- /quiz2/Question18.m: -------------------------------------------------------------------------------- 1 | gamma=[32,2,0.125]; 2 | lambda=[0.001,1,1000]; 3 | N=length(lssvm_all); 4 | 5 | train=lssvm_all(1:400,:); 6 | test=lssvm_all(401:N,:); 7 | 8 | for i=1:3 9 | for n=1:400 10 | for m=1:400 11 | K(n,m)=exp(-gamma(1,i)*norm(train(n,1:10)-train(m,1:10))^2); 12 | end 13 | for m=1:100 14 | Kout(m,n)=exp(-gamma(1,i)*norm(train(n,1:10)-test(m,1:10))^2); 15 | end 16 | end 17 | for j=1:3 18 | beta=inv(lambda(1,j)*eye(400)+K)*train(:,11); 19 | in=sign(K*beta); 20 | ein(i,j)=sum(abs(sign(in-train(:,11))))/400; 21 | out=sign(Kout*beta); 22 | eout(i,j)=sum(abs(sign(out-test(:,11))))/100; 23 | end 24 | end 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /quiz2/Question7.m: -------------------------------------------------------------------------------- 1 | for i=1:1000 2 | x=rand(1,2); 3 | y=x.^2; 4 | p(i,:)=polyfit(x,y,1); 5 | end 6 | 7 | coef=mean(p); -------------------------------------------------------------------------------- /quiz2/answer.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/li-xin-yi/machine_learning_techniques/a677ccb023faa16dd04841f91bbcafbf13b399b9/quiz2/answer.pdf -------------------------------------------------------------------------------- /quiz2/hw2_adaboost_test.dat: -------------------------------------------------------------------------------- 1 | 0.98425 0.71261 -1 2 | 0.901491 0.462824 -1 3 | 0.872418 0.365547 -1 4 | 0.810913 0.0583381 -1 5 | 0.57723 0.203007 -1 6 | 0.397929 0.56668 1 7 | 0.120766 0.440112 1 8 | 0.62189 0.905915 1 9 | 0.870767 0.0534751 -1 10 | 0.260869 0.471334 1 11 | 0.848156 0.510922 -1 12 | 0.0672175 0.177358 1 13 | 0.359852 0.5639 1 14 | 0.254761 0.8645 1 15 | 0.145275 0.238116 1 16 | 0.64945 0.147829 -1 17 | 0.350556 0.0583089 -1 18 | 0.961156 0.62802 -1 19 | 0.52374 0.0176725 -1 20 | 0.751678 0.276548 -1 21 | 0.0121179 0.975133 1 22 | 0.575086 0.776408 1 23 | 0.597629 0.2315 -1 24 | 0.81701 0.64801 -1 25 | 0.67222 0.130962 -1 26 | 0.884317 0.598321 -1 27 | 0.907254 0.814164 -1 28 | 0.144621 0.904808 1 29 | 0.182951 0.38095 1 30 | 0.801427 0.461995 -1 31 | 0.0576698 0.695542 1 32 | 0.370336 0.718916 1 33 | 0.284968 0.48784 1 34 | 0.747471 0.346059 -1 35 | 0.0989988 0.316252 1 36 | 0.802947 0.0723942 -1 37 | 0.793592 0.422394 -1 38 | 0.555778 0.157421 -1 39 | 0.240685 0.428812 1 40 | 0.0674069 0.924555 1 41 | 0.644513 0.485768 -1 42 | 0.0234474 0.918435 1 43 | 0.0589404 0.137337 1 44 | 0.123131 0.993385 1 45 | 0.478083 0.0916119 -1 46 | 0.688719 0.29749 -1 47 | 0.863305 0.746106 -1 48 | 0.473523 0.62228 1 49 | 0.0463453 0.492613 1 50 | 0.348141 0.283225 -1 51 | 0.65955 0.126514 1 52 | 0.797067 0.280259 1 53 | 0.324693 0.158296 1 54 | 0.522975 0.301249 1 55 | 0.827513 0.104348 1 56 | 0.565372 0.840484 -1 57 | 0.509603 0.973013 -1 58 | 0.364329 0.207358 1 59 | 0.391106 0.720774 -1 60 | 0.804306 0.037063 1 61 | 0.326048 0.63548 -1 62 | 0.439476 0.685896 -1 63 | 0.399432 0.0353415 1 64 | 0.695592 0.00316783 1 65 | 0.58939 0.135341 1 66 | 0.619188 0.869285 -1 67 | 0.516558 0.262236 1 68 | 0.378535 0.555107 -1 69 | 0.45361 0.54611 -1 70 | 0.0632582 0.589237 -1 71 | 0.440477 0.316381 1 72 | 0.970637 0.392746 1 73 | 0.791685 0.393631 1 74 | 0.692567 0.984726 -1 75 | 0.777368 0.0548523 1 76 | 0.768752 0.376716 1 77 | 0.851107 0.751755 1 78 | 0.781649 0.564476 1 79 | 0.979728 0.294085 1 80 | 0.105791 0.906713 -1 81 | 0.426273 0.0884706 1 82 | 0.517458 0.303076 1 83 | 0.24519 0.892614 -1 84 | 0.0351179 0.67456 -1 85 | 0.975662 0.650291 1 86 | 0.740105 0.450251 1 87 | 0.190567 0.134182 1 88 | 0.1405 0.953379 -1 89 | 0.669583 0.888495 -1 90 | 0.361944 0.465567 -1 91 | 0.0725909 0.843209 -1 92 | 0.440298 0.308886 1 93 | 0.135841 0.399257 -1 94 | 0.808974 0.755097 1 95 | 0.933736 0.0943433 1 96 | 0.0044835 0.954192 -1 97 | 0.667539 0.00140023 1 98 | 0.694606 0.280626 1 99 | 0.290942 0.0460318 1 100 | 0.851022 0.420603 1 101 | 0.279261 0.800354 -1 102 | 0.0255174 0.917845 -1 103 | 0.0886418 0.556592 -1 104 | 0.757991 0.813873 -1 105 | 0.725832 0.534527 1 106 | 0.794236 0.713333 1 107 | 0.642727 0.00876884 1 108 | 0.834145 0.452946 1 109 | 0.0332715 0.287813 -1 110 | 0.435201 0.292522 1 111 | 0.237185 0.968602 -1 112 | 0.506852 0.198364 1 113 | 0.886797 0.494452 1 114 | 0.316651 0.214269 1 115 | 0.619185 0.538838 1 116 | 0.0249148 0.998255 -1 117 | 0.197439 0.125272 1 118 | 0.792215 0.956943 -1 119 | 0.235149 0.117684 1 120 | 0.838299 0.176926 1 121 | 0.321171 0.664178 -1 122 | 0.687561 0.121761 1 123 | 0.049748 0.127254 -1 124 | 0.747594 0.626266 1 125 | 0.524508 0.0260289 1 126 | 0.896135 0.985921 -1 127 | 0.275284 0.562364 -1 128 | 0.407732 0.355143 1 129 | 0.108068 0.721398 -1 130 | 0.966608 0.900041 1 131 | 0.460886 0.363163 1 132 | 0.080335 0.708263 -1 133 | 0.730843 0.571968 1 134 | 0.481448 0.786495 -1 135 | 0.311683 0.612242 -1 136 | 0.765848 0.357371 1 137 | 0.794206 0.858117 -1 138 | 0.291031 0.214448 1 139 | 0.507704 0.80711 -1 140 | 0.413407 0.997037 -1 141 | 0.149819 0.317245 -1 142 | 0.570457 0.724862 -1 143 | 0.940578 0.63141 1 144 | 0.102371 0.444094 -1 145 | 0.904982 0.473356 1 146 | 0.522942 0.951667 -1 147 | 0.405989 0.955523 -1 148 | 0.144762 0.746277 -1 149 | 0.235169 0.415446 -1 150 | 0.475 0.0380786 1 151 | 0.506144 0.603787 -1 152 | 0.336919 0.586477 -1 153 | 0.784459 0.337423 1 154 | 0.887597 0.836191 1 155 | 0.747387 0.593304 1 156 | 0.386111 0.642815 -1 157 | 0.62397 0.280954 1 158 | 0.65905 0.0709459 1 159 | 0.121093 0.329021 -1 160 | 0.521382 0.223003 1 161 | 0.294946 0.378603 -1 162 | 0.549646 0.856066 -1 163 | 0.946428 0.152346 1 164 | 0.455593 0.149971 1 165 | 0.031472 0.533226 -1 166 | 0.932962 0.573136 1 167 | 0.928733 0.153327 1 168 | 0.340392 0.653682 -1 169 | 0.24699 0.851744 -1 170 | 0.797517 0.743156 1 171 | 0.536003 0.251338 1 172 | 0.584401 0.298964 1 173 | 0.700554 0.174036 1 174 | 0.0877768 0.4751 -1 175 | 0.87895 0.310345 1 176 | 0.945791 0.265825 1 177 | 0.0917645 0.935176 -1 178 | 0.853588 0.379708 1 179 | 0.913926 0.360452 1 180 | 0.0107741 0.613204 -1 181 | 0.547499 0.0650814 1 182 | 0.667828 0.194976 1 183 | 0.365888 0.737437 -1 184 | 0.666837 0.54639 1 185 | 0.774711 0.190221 1 186 | 0.546902 0.851306 -1 187 | 0.650323 0.402544 1 188 | 0.264294 0.567513 -1 189 | 0.847845 0.501569 1 190 | 0.455493 0.175852 1 191 | 0.650041 0.0776052 1 192 | 0.841794 0.440889 1 193 | 0.306676 0.029027 1 194 | 0.380761 0.463235 -1 195 | 0.526542 0.579972 -1 196 | 0.421672 0.656843 -1 197 | 0.135865 0.0325065 1 198 | 0.757162 0.674405 1 199 | 0.0309847 0.210858 -1 200 | 0.408255 0.613713 -1 201 | 0.0353992 0.627662 -1 202 | 0.977008 0.733742 1 203 | 0.546223 0.0534599 1 204 | 0.914148 0.811989 1 205 | 0.364086 0.41507 -1 206 | 0.89023 0.379638 1 207 | 0.242267 0.359147 -1 208 | 0.913597 0.527621 1 209 | 0.899832 0.280649 1 210 | 0.428591 0.347814 1 211 | 0.643769 0.331596 1 212 | 0.539217 0.648713 -1 213 | 0.899786 0.63122 1 214 | 0.203458 0.992492 -1 215 | 0.0036662 0.947143 -1 216 | 0.886552 0.33283 1 217 | 0.273366 0.183348 1 218 | 0.232621 0.923597 -1 219 | 0.658871 0.274257 1 220 | 0.289298 0.573316 -1 221 | 0.78434 0.923358 -1 222 | 0.889504 0.0855461 1 223 | 0.948104 0.244811 1 224 | 0.371256 0.455696 -1 225 | 0.811254 0.548837 1 226 | 0.149258 0.904545 -1 227 | 0.643907 0.207314 1 228 | 0.664036 0.96868 -1 229 | 0.0815755 0.462282 -1 230 | 0.504621 0.795581 -1 231 | 0.894138 0.498972 1 232 | 0.250483 0.364713 -1 233 | 0.223046 0.460014 -1 234 | 0.847674 0.682007 1 235 | 0.470763 0.920401 -1 236 | 0.144748 0.895527 -1 237 | 0.26923 0.127971 1 238 | 0.675056 0.113917 1 239 | 0.534801 0.89878 -1 240 | 0.277237 0.670431 -1 241 | 0.64119 0.0382642 1 242 | 0.73011 0.47948 1 243 | 0.266694 0.899731 -1 244 | 0.44166 0.366442 1 245 | 0.394095 0.277449 1 246 | 0.792322 0.222335 1 247 | 0.128713 0.240901 -1 248 | 0.906966 0.361291 1 249 | 0.275154 0.615051 -1 250 | 0.52458 0.254967 1 251 | 0.794411 0.591321 1 252 | 0.643209 0.814339 -1 253 | 0.192321 0.0987727 1 254 | 0.661789 0.742778 -1 255 | 0.122562 0.666161 -1 256 | 0.948649 0.450922 1 257 | 0.842091 0.936267 -1 258 | 0.539254 0.0916215 1 259 | 0.695438 0.569152 1 260 | 0.977954 0.221886 1 261 | 0.169187 0.0307119 1 262 | 0.764631 0.550866 1 263 | 0.785699 0.850088 -1 264 | 0.792359 0.529052 1 265 | 0.181339 0.847889 -1 266 | 0.457209 0.194705 1 267 | 0.0137339 0.640192 -1 268 | 0.359021 0.16532 1 269 | 0.127967 0.325063 -1 270 | 0.477992 0.0175778 1 271 | 0.543661 0.716422 -1 272 | 0.752422 0.954487 -1 273 | 0.39875 0.761559 -1 274 | 0.50591 0.768891 -1 275 | 0.929419 0.514985 1 276 | 0.904128 0.26796 1 277 | 0.567592 0.232631 1 278 | 0.906068 0.272002 1 279 | 0.0492354 0.964978 -1 280 | 0.818172 0.219161 1 281 | 0.318159 0.560455 -1 282 | 0.267249 0.194758 1 283 | 0.831282 0.412339 1 284 | 0.380823 0.135101 1 285 | 0.157197 0.406805 -1 286 | 0.0267269 0.803883 -1 287 | 0.957601 0.772175 1 288 | 0.646276 0.908601 -1 289 | 0.867534 0.513357 1 290 | 0.942139 0.77697 1 291 | 0.646184 0.722996 -1 292 | 0.921553 0.0410484 1 293 | 0.293857 0.138649 1 294 | 0.891337 0.221669 1 295 | 0.924041 0.234366 1 296 | 0.33506 0.161769 1 297 | 0.0697214 0.273304 -1 298 | 0.779029 0.261688 1 299 | 0.0807046 0.954026 -1 300 | 0.742326 0.329337 1 301 | 0.475495 0.282456 1 302 | 0.668686 0.384205 1 303 | 0.161027 0.337126 -1 304 | 0.504877 0.769446 -1 305 | 0.652111 0.341757 1 306 | 0.0133912 0.168655 -1 307 | 0.446853 0.636323 -1 308 | 0.89201 0.337743 1 309 | 0.108327 0.394791 -1 310 | 0.470414 0.0650639 1 311 | 0.62657 0.873804 -1 312 | 0.280034 0.485001 -1 313 | 0.95285 0.0737346 1 314 | 0.659572 0.909675 -1 315 | 0.196466 0.317611 -1 316 | 0.527024 0.633184 -1 317 | 0.885315 0.628765 1 318 | 0.579965 0.190023 1 319 | 0.714717 0.961987 -1 320 | 0.900156 0.631851 1 321 | 0.849752 0.514235 1 322 | 0.124314 0.87552 -1 323 | 0.179696 0.579195 -1 324 | 0.195958 0.0203282 1 325 | 0.027457 0.508547 -1 326 | 0.00863065 0.817586 -1 327 | 0.566629 0.749209 -1 328 | 0.628597 0.543689 1 329 | 0.922977 0.00853561 1 330 | 0.746896 0.682366 1 331 | 0.28609 0.476955 -1 332 | 0.470796 0.676636 -1 333 | 0.245255 0.765548 -1 334 | 0.696212 0.814502 -1 335 | 0.160923 0.0276978 1 336 | 0.708739 0.790826 -1 337 | 0.8193 0.538983 1 338 | 0.757972 0.0723108 1 339 | 0.370286 0.239041 1 340 | 0.605109 0.126782 1 341 | 0.445504 0.58625 -1 342 | 0.0829018 0.563932 -1 343 | 0.837282 0.0654754 1 344 | 0.651647 0.989009 -1 345 | 0.303959 0.238541 1 346 | 0.971009 0.299699 1 347 | 0.174585 0.335624 -1 348 | 0.334944 0.0399371 1 349 | 0.390626 0.551506 -1 350 | 0.224665 0.293221 -1 351 | 0.185303 0.438274 -1 352 | 0.67011 0.950137 -1 353 | 0.171521 0.923317 -1 354 | 0.546216 0.115181 1 355 | 0.281363 0.612348 -1 356 | 0.489643 0.34167 1 357 | 0.928971 0.683342 1 358 | 0.205095 0.547374 -1 359 | 0.000958435 0.297801 -1 360 | 0.911276 0.468425 1 361 | 0.64604 0.863044 -1 362 | 0.489204 0.738186 -1 363 | 0.545965 0.189155 1 364 | 0.0977727 0.199422 -1 365 | 0.997036 0.351952 1 366 | 0.34102 0.869158 -1 367 | 0.59398 0.700524 -1 368 | 0.373698 0.652488 -1 369 | 0.00727932 0.184257 -1 370 | 0.568021 0.0753371 1 371 | 0.483655 0.882338 -1 372 | 0.23444 0.449407 -1 373 | 0.0431255 0.51328 -1 374 | 0.849643 0.437309 1 375 | 0.0458374 0.539893 -1 376 | 0.350504 0.258395 1 377 | 0.794248 0.220001 1 378 | 0.97378 0.854557 1 379 | 0.557141 0.913262 -1 380 | 0.789142 0.573461 1 381 | 0.741313 0.346975 1 382 | 0.668437 0.568661 1 383 | 0.631384 0.685063 -1 384 | 0.941841 0.849836 1 385 | 0.558327 0.0829687 1 386 | 0.378518 0.56206 -1 387 | 0.686935 0.407027 1 388 | 0.418508 0.636296 -1 389 | 0.12623 0.580932 -1 390 | 0.467244 0.214558 1 391 | 0.570261 0.0992936 1 392 | 0.42538 0.00947877 1 393 | 0.428358 0.0561956 1 394 | 0.763781 0.30652 1 395 | 0.496233 0.790503 -1 396 | 0.515911 0.318705 1 397 | 0.084245 0.63859 -1 398 | 0.238309 0.993241 -1 399 | 0.425181 0.665605 -1 400 | 0.656063 0.512787 1 401 | 0.723973 0.220642 1 402 | 0.992989 0.702841 1 403 | 0.935585 0.844543 1 404 | 0.087618 0.866817 -1 405 | 0.955863 0.567208 1 406 | 0.154656 0.672015 -1 407 | 0.971932 0.869263 1 408 | 0.617156 0.411524 1 409 | 0.228216 0.325273 -1 410 | 0.0667804 0.650288 -1 411 | 0.335039 0.280651 1 412 | 0.822868 0.590927 1 413 | 0.122874 0.0451575 1 414 | 0.336749 0.994879 -1 415 | 0.12541 0.246426 -1 416 | 0.150271 0.626996 -1 417 | 0.0590397 0.380249 -1 418 | 0.666751 0.73185 -1 419 | 0.132158 0.0265936 1 420 | 0.635948 0.0634997 1 421 | 0.165083 0.930711 -1 422 | 0.668831 0.0833832 1 423 | 0.523762 0.315779 1 424 | 0.270285 0.549872 -1 425 | 0.839032 0.23431 1 426 | 0.827442 0.515754 1 427 | 0.941261 0.741824 1 428 | 0.000309792 0.545788 -1 429 | 0.60837 0.935352 -1 430 | 0.00698791 0.189756 -1 431 | 0.1011 0.445927 -1 432 | 0.101455 0.316278 -1 433 | 0.348754 0.618195 -1 434 | 0.277627 0.0346511 1 435 | 0.378833 0.0605022 1 436 | 0.237139 0.356738 -1 437 | 0.417711 0.0934596 1 438 | 0.449986 0.161542 1 439 | 0.658073 0.352606 1 440 | 0.798175 0.0377735 1 441 | 0.256495 0.723779 -1 442 | 0.851783 0.609274 1 443 | 0.753445 0.165358 1 444 | 0.834797 0.0590443 1 445 | 0.173083 0.436326 -1 446 | 0.0621644 0.48991 -1 447 | 0.629833 0.192541 1 448 | 0.666475 0.876072 -1 449 | 0.106037 0.618043 -1 450 | 0.141033 0.0218629 1 451 | 0.567942 0.904455 -1 452 | 0.821621 0.430299 1 453 | 0.823849 0.0551249 1 454 | 0.0849651 0.150956 -1 455 | 0.694715 0.998504 -1 456 | 0.908664 0.823011 1 457 | 0.42962 0.634446 -1 458 | 0.117153 0.0255973 1 459 | 0.229214 0.356397 -1 460 | 0.745272 0.440587 1 461 | 0.038467 0.144066 -1 462 | 0.403297 0.592439 -1 463 | 0.563204 0.309581 1 464 | 0.956136 0.427968 1 465 | 0.19846 0.598952 -1 466 | 0.160515 0.0711021 1 467 | 0.663909 0.0544755 1 468 | 0.102991 0.335909 -1 469 | 0.984322 0.495635 1 470 | 0.235927 0.583786 -1 471 | 0.626038 0.229477 1 472 | 0.775861 0.917084 -1 473 | 0.838552 0.784448 1 474 | 0.514587 0.422319 1 475 | 0.722692 0.982959 -1 476 | 0.919353 0.549354 1 477 | 0.905926 0.222702 1 478 | 0.203526 0.820061 -1 479 | 0.569708 0.42871 1 480 | 0.795474 0.237471 1 481 | 0.184719 0.957666 -1 482 | 0.759545 0.135518 1 483 | 0.45178 0.536935 -1 484 | 0.554096 0.973329 -1 485 | 0.247521 0.665905 -1 486 | 0.0805813 0.633937 -1 487 | 0.283226 0.496168 -1 488 | 0.921273 0.548892 1 489 | 0.446992 0.915748 -1 490 | 0.255994 0.407409 -1 491 | 0.774822 0.217327 1 492 | 0.783563 0.402349 1 493 | 0.0202346 0.309916 -1 494 | 0.885987 0.422338 1 495 | 0.808262 0.596139 1 496 | 0.307957 0.557909 -1 497 | 0.685592 0.36617 1 498 | 0.281479 0.879778 -1 499 | 0.933454 0.285331 1 500 | 0.267862 0.430028 -1 501 | 0.160436 0.541837 -1 502 | 0.757908 0.0587335 1 503 | 0.503278 0.219799 1 504 | 0.670772 0.112562 1 505 | 0.968247 0.093841 1 506 | 0.297376 0.895762 -1 507 | 0.122617 0.0317196 1 508 | 0.751844 0.847083 -1 509 | 0.347115 0.531903 -1 510 | 0.605844 0.108031 1 511 | 0.17417 0.515923 -1 512 | 0.073073 0.61947 -1 513 | 0.19598 0.0857053 1 514 | 0.785532 0.979313 -1 515 | 0.285909 0.670368 -1 516 | 0.36469 0.488076 -1 517 | 0.618211 0.87057 -1 518 | 0.826502 0.249688 1 519 | 0.727953 0.832765 -1 520 | 0.405467 0.312527 1 521 | 0.180369 0.944236 -1 522 | 0.664177 0.231903 1 523 | 0.00342504 0.959255 -1 524 | 0.268744 0.535284 -1 525 | 0.656671 0.536351 1 526 | 0.562471 0.99403 -1 527 | 0.309025 0.782435 -1 528 | 0.989504 0.298112 1 529 | 0.167074 0.054979 1 530 | 0.749083 0.0418701 1 531 | 0.148786 0.822297 -1 532 | 0.462915 0.39147 1 533 | 0.840631 0.364955 1 534 | 0.341914 0.54697 -1 535 | 0.0724261 0.897542 -1 536 | 0.579079 0.385625 1 537 | 0.260234 0.624169 -1 538 | 0.175452 0.0900634 1 539 | 0.714672 0.319182 1 540 | 0.51161 0.1376 1 541 | 0.17358 0.282637 -1 542 | 0.0961538 0.410023 -1 543 | 0.191505 0.306804 -1 544 | 0.128009 0.254819 -1 545 | 0.3076 0.652045 -1 546 | 0.953018 0.75973 1 547 | 0.555524 0.198088 1 548 | 0.235301 0.901443 -1 549 | 0.415412 0.758424 -1 550 | 0.685633 0.509567 1 551 | 0.520311 0.971467 -1 552 | 0.628602 0.536639 1 553 | 0.869978 0.206805 1 554 | 0.361661 0.864875 -1 555 | 0.711508 0.868899 -1 556 | 0.148862 0.347637 -1 557 | 0.0552176 0.360635 -1 558 | 0.322722 0.848013 -1 559 | 0.826318 0.467906 1 560 | 0.40867 0.0493074 1 561 | 0.973975 0.739117 1 562 | 0.625053 0.405339 1 563 | 0.371324 0.265964 1 564 | 0.931585 0.316833 1 565 | 0.903503 0.348593 1 566 | 0.858171 0.755053 1 567 | 0.293952 0.0367562 1 568 | 0.271221 0.740336 -1 569 | 0.74095 0.87905 -1 570 | 0.716325 0.271524 1 571 | 0.467886 0.674007 -1 572 | 0.969284 0.220292 1 573 | 0.452353 0.640856 -1 574 | 0.99516 0.0713676 1 575 | 0.46605 0.968418 -1 576 | 0.478281 0.629471 -1 577 | 0.780334 0.364529 1 578 | 0.41868 0.0290413 1 579 | 0.430596 0.864687 -1 580 | 0.126211 0.623366 -1 581 | 0.720581 0.855448 -1 582 | 0.586975 0.479385 1 583 | 0.666205 0.0075153 1 584 | 0.758155 0.92312 -1 585 | 0.209426 0.44017 -1 586 | 0.864996 0.695747 1 587 | 0.909492 0.14355 1 588 | 0.277419 0.824472 -1 589 | 0.689576 0.914122 -1 590 | 0.322474 0.789045 -1 591 | 0.259047 0.769267 -1 592 | 0.161761 0.434938 -1 593 | 0.630817 0.716806 -1 594 | 0.509059 0.587188 -1 595 | 0.837654 0.5555 1 596 | 0.253994 0.377669 -1 597 | 0.167348 0.719022 -1 598 | 0.459627 0.15282 1 599 | 0.501872 0.00143824 1 600 | 0.79596 0.533901 1 601 | 0.124119 0.56916 -1 602 | 0.0487879 0.573286 -1 603 | 0.596131 0.70981 -1 604 | 0.0276332 0.931119 -1 605 | 0.129385 0.947 -1 606 | 0.659828 0.9081 -1 607 | 0.489895 0.623901 -1 608 | 0.515453 0.929899 -1 609 | 0.818059 0.111099 1 610 | 0.804002 0.323761 1 611 | 0.526774 0.0348939 1 612 | 0.2628 0.561451 -1 613 | 0.113682 0.750367 -1 614 | 0.892268 0.172161 1 615 | 0.971775 0.404934 1 616 | 0.9454 0.0932596 1 617 | 0.656222 0.28806 1 618 | 0.0860606 0.791116 -1 619 | 0.0709727 0.560112 -1 620 | 0.173328 0.780618 -1 621 | 0.0301033 0.656996 -1 622 | 0.688689 0.898491 -1 623 | 0.186951 0.0576559 1 624 | 0.65413 0.166983 1 625 | 0.800355 0.395515 1 626 | 0.53794 0.752802 -1 627 | 0.358543 0.898307 -1 628 | 0.830507 0.0245606 1 629 | 0.339637 0.130685 1 630 | 0.303404 0.452883 -1 631 | 0.795731 0.726338 1 632 | 0.47855 0.548215 -1 633 | 0.247947 0.766757 -1 634 | 0.745852 0.161309 1 635 | 0.154802 0.689091 -1 636 | 0.925021 0.58772 1 637 | 0.261112 0.00650378 1 638 | 0.0794283 0.753214 -1 639 | 0.0700165 0.992149 -1 640 | 0.781074 0.121772 1 641 | 0.826793 0.342554 1 642 | 0.753404 0.18152 1 643 | 0.770336 0.678678 1 644 | 0.963135 0.168974 1 645 | 0.443104 0.987506 -1 646 | 0.797647 0.0161492 1 647 | 0.45144 0.0378066 1 648 | 0.946543 0.764114 1 649 | 0.00124868 0.866803 -1 650 | 0.982205 0.53794 1 651 | 0.541684 0.766407 -1 652 | 0.329147 0.543539 -1 653 | 0.529628 0.606609 -1 654 | 0.334795 0.181196 1 655 | 0.151119 0.528269 -1 656 | 0.681993 0.0483689 1 657 | 0.699993 0.053291 1 658 | 0.802789 0.318438 1 659 | 0.331193 0.64936 -1 660 | 0.237571 0.897379 -1 661 | 0.327753 0.689727 -1 662 | 0.312201 0.701004 -1 663 | 0.931111 0.660958 1 664 | 0.718269 0.80984 -1 665 | 0.273827 0.677575 -1 666 | 0.625871 0.49394 1 667 | 0.777728 0.31515 1 668 | 0.803845 0.937464 -1 669 | 0.906882 0.234153 1 670 | 0.615922 0.485646 1 671 | 0.0651093 0.753302 -1 672 | 0.207141 0.29146 -1 673 | 0.875938 0.803306 1 674 | 0.393507 0.561247 -1 675 | 0.628149 0.914629 -1 676 | 0.794311 0.333171 1 677 | 0.235492 0.938559 -1 678 | 0.526704 0.931333 -1 679 | 0.218106 0.608315 -1 680 | 0.00656713 0.146674 -1 681 | 0.778307 0.348532 1 682 | 0.0207416 0.312053 -1 683 | 0.423448 0.252793 1 684 | 0.289913 0.215212 1 685 | 0.244006 0.00125474 1 686 | 0.629165 0.898357 -1 687 | 0.919005 0.430823 1 688 | 0.00655742 0.5995 -1 689 | 0.759102 0.299825 1 690 | 0.759687 0.994316 -1 691 | 0.862409 0.984698 -1 692 | 0.975845 0.91632 1 693 | 0.11387 0.682952 -1 694 | 0.493021 0.73985 -1 695 | 0.830876 0.31734 1 696 | 0.655172 0.496634 1 697 | 0.956215 0.259687 1 698 | 0.94958 0.68246 1 699 | 0.968275 0.739117 1 700 | 0.063977 0.127011 -1 701 | 0.465658 0.210501 1 702 | 0.354798 0.193762 1 703 | 0.127349 0.642894 -1 704 | 0.127856 0.738945 -1 705 | 0.76565 0.842646 -1 706 | 0.548526 0.0179506 1 707 | 0.947044 0.192012 1 708 | 0.988923 0.445638 1 709 | 0.439399 0.359475 1 710 | 0.402628 0.323374 1 711 | 0.265511 0.0606586 1 712 | 0.423729 0.773269 -1 713 | 0.25291 0.432529 -1 714 | 0.436142 0.690575 -1 715 | 0.41126 0.573522 -1 716 | 0.296642 0.0488843 1 717 | 0.127029 0.342121 -1 718 | 0.398267 0.967173 -1 719 | 0.481517 0.934901 -1 720 | 0.308108 0.409125 -1 721 | 0.700454 0.586787 1 722 | 0.420939 0.584407 -1 723 | 0.41388 0.839033 -1 724 | 0.753372 0.959814 -1 725 | 0.824779 0.0717304 1 726 | 0.714507 0.893623 -1 727 | 0.799793 0.534325 1 728 | 0.526204 0.184637 1 729 | 0.577423 0.21038 1 730 | 0.599212 0.720475 -1 731 | 0.0930897 0.616923 -1 732 | 0.233392 0.606981 -1 733 | 0.584939 0.970069 -1 734 | 0.289712 0.602656 -1 735 | 0.778619 0.0948103 1 736 | 0.303868 0.158273 1 737 | 0.037531 0.146989 -1 738 | 0.331734 0.138935 1 739 | 0.528547 0.687987 -1 740 | 0.970621 0.607861 1 741 | 0.704668 0.353814 1 742 | 0.731987 0.154729 1 743 | 0.581078 0.370043 1 744 | 0.506686 0.809978 -1 745 | 0.881938 0.379804 1 746 | 0.373444 0.767363 -1 747 | 0.485164 0.28045 1 748 | 0.197833 0.658006 -1 749 | 0.71941 0.890861 -1 750 | 0.569357 0.199161 1 751 | 0.339519 0.506339 -1 752 | 0.715948 0.107338 1 753 | 0.103761 0.175751 -1 754 | 0.00628023 0.110802 -1 755 | 0.617437 0.0753325 1 756 | 0.676217 0.595606 1 757 | 0.601006 0.390767 1 758 | 0.914454 0.60666 1 759 | 0.604206 0.863729 -1 760 | 0.499283 0.775396 -1 761 | 0.246642 0.977352 -1 762 | 0.0486123 0.79246 -1 763 | 0.695353 0.938804 -1 764 | 0.156077 0.752195 -1 765 | 0.316377 0.240939 1 766 | 0.246456 0.733288 -1 767 | 0.519741 0.708837 -1 768 | 0.514205 0.784653 -1 769 | 0.798708 0.709856 1 770 | 0.876125 0.302464 1 771 | 0.798164 0.484909 1 772 | 0.970956 0.734413 1 773 | 0.171445 0.594175 -1 774 | 0.884069 0.956158 -1 775 | 0.0428943 0.212994 -1 776 | 0.228614 0.911873 -1 777 | 0.00841228 0.764608 -1 778 | 0.688014 0.271866 1 779 | 0.909573 0.59604 1 780 | 0.593795 0.439687 1 781 | 0.197688 0.122967 1 782 | 0.477249 0.571915 -1 783 | 0.539346 0.971034 -1 784 | 0.178858 0.470033 -1 785 | 0.307987 0.611416 -1 786 | 0.730235 0.105422 1 787 | 0.575282 0.18986 1 788 | 0.135168 0.204816 -1 789 | 0.411345 0.0350336 1 790 | 0.426741 0.259088 1 791 | 0.597905 0.218627 1 792 | 0.0184314 0.390617 -1 793 | 0.598701 0.127626 1 794 | 0.885586 0.998665 -1 795 | 0.101529 0.537845 -1 796 | 0.745542 0.368454 1 797 | 0.971997 0.23736 1 798 | 0.628441 0.346515 1 799 | 0.733682 0.379105 1 800 | 0.287615 0.929986 -1 801 | 0.436071 0.520699 -1 802 | 0.0456272 0.628728 -1 803 | 0.416084 0.00456804 1 804 | 0.343631 0.608342 -1 805 | 0.273205 0.788589 -1 806 | 0.825877 0.624324 1 807 | 0.428002 0.551899 -1 808 | 0.766301 0.834485 -1 809 | 0.750664 0.964312 -1 810 | 0.26881 0.34805 -1 811 | 0.0648938 0.807231 -1 812 | 0.0521842 0.320917 -1 813 | 0.655647 0.0141501 1 814 | 0.453071 0.0894423 1 815 | 0.910255 0.312287 1 816 | 0.446455 0.290288 1 817 | 0.196355 0.017121 1 818 | 0.562999 0.154097 1 819 | 0.940237 0.42179 1 820 | 0.627907 0.778432 -1 821 | 0.374925 0.697193 -1 822 | 0.246041 0.72881 -1 823 | 0.855517 0.436792 1 824 | 0.947803 0.873305 1 825 | 0.35815 0.989496 -1 826 | 0.0639162 0.935971 -1 827 | 0.172503 0.488447 -1 828 | 0.54696 0.909676 -1 829 | 0.176591 0.417129 -1 830 | 0.276876 0.622866 -1 831 | 0.438581 0.582303 -1 832 | 0.329629 0.853548 -1 833 | 0.352257 0.270565 1 834 | 0.436919 0.680153 -1 835 | 0.2241 0.622145 -1 836 | 0.0263644 0.67364 -1 837 | 0.675531 0.294437 1 838 | 0.150863 0.365062 -1 839 | 0.812529 0.178787 1 840 | 0.438151 0.0956492 1 841 | 0.361022 0.272236 1 842 | 0.758714 0.337085 1 843 | 0.759034 0.508974 1 844 | 0.165257 0.991291 -1 845 | 0.161668 0.0267692 1 846 | 0.165336 0.724055 -1 847 | 0.596039 0.909899 -1 848 | 0.799796 0.872538 -1 849 | 0.534579 0.113211 1 850 | 0.157476 0.957511 -1 851 | 0.72859 0.418856 1 852 | 0.911685 0.831972 1 853 | 0.207611 0.871704 -1 854 | 0.877413 0.337393 1 855 | 0.201244 0.649589 -1 856 | 0.794832 0.871322 -1 857 | 0.866337 0.0199421 1 858 | 0.547209 0.0729792 1 859 | 0.503495 0.357879 1 860 | 0.895389 0.141734 1 861 | 0.111822 0.0323921 1 862 | 0.483983 0.793137 -1 863 | 0.639758 0.310883 1 864 | 0.696889 0.826603 -1 865 | 0.268496 0.164938 1 866 | 0.0207783 0.692312 -1 867 | 0.132039 0.88182 -1 868 | 0.781032 0.719298 1 869 | 0.971277 0.218144 1 870 | 0.417455 0.340996 1 871 | 0.993756 0.345006 1 872 | 0.360074 0.254071 1 873 | 0.130303 0.370438 -1 874 | 0.415516 0.0826126 1 875 | 0.584504 0.428982 1 876 | 0.652144 0.431931 1 877 | 0.564526 0.0275837 1 878 | 0.299244 0.655247 -1 879 | 0.20136 0.0564858 1 880 | 0.144985 0.210736 -1 881 | 0.130103 0.700446 -1 882 | 0.724313 0.526839 1 883 | 0.491621 0.00573404 1 884 | 0.107608 0.323681 -1 885 | 0.715835 0.357781 1 886 | 0.220987 0.139168 1 887 | 0.813485 0.0708069 1 888 | 0.538258 0.437706 1 889 | 0.571016 0.351875 1 890 | 0.197453 0.83333 -1 891 | 0.795359 0.510659 1 892 | 0.500165 0.82664 -1 893 | 0.592472 0.437362 1 894 | 0.450939 0.0133321 1 895 | 0.735098 0.00473404 1 896 | 0.0694814 0.207915 -1 897 | 0.615119 0.347642 1 898 | 0.868878 0.6823 1 899 | 0.476361 0.151452 1 900 | 0.669949 0.264833 1 901 | 0.288299 0.662692 -1 902 | 0.627068 0.272844 1 903 | 0.264957 0.355834 -1 904 | 0.809584 0.147646 1 905 | 0.806361 0.36828 1 906 | 0.956383 0.850946 1 907 | 0.422121 0.893379 -1 908 | 0.484039 0.373933 1 909 | 0.569826 0.691685 -1 910 | 0.850491 0.239603 1 911 | 0.0296404 0.952488 -1 912 | 0.177907 0.643448 -1 913 | 0.946415 0.196601 1 914 | 0.950526 0.104172 1 915 | 0.430335 0.587557 -1 916 | 0.820291 0.260617 1 917 | 0.932435 0.067607 1 918 | 0.689081 0.150527 1 919 | 0.403716 0.0470443 1 920 | 0.385411 0.69684 -1 921 | 0.416883 0.792668 -1 922 | 0.644717 0.816997 -1 923 | 0.491817 0.251801 1 924 | 0.955601 0.851104 1 925 | 0.336963 0.627587 -1 926 | 0.37207 0.0136808 1 927 | 0.901067 0.326827 1 928 | 0.568163 0.506981 1 929 | 0.76182 0.596374 1 930 | 0.118633 0.4744 -1 931 | 0.430573 0.82055 -1 932 | 0.071012 0.187587 -1 933 | 0.630174 0.976088 -1 934 | 0.668469 0.842072 -1 935 | 0.923003 0.323096 1 936 | 0.677068 0.585427 1 937 | 0.7056 0.288568 1 938 | 0.861037 0.0847578 1 939 | 0.444507 0.54545 -1 940 | 0.435555 0.834027 -1 941 | 0.815799 0.72573 1 942 | 0.783529 0.950257 -1 943 | 0.180914 0.36687 -1 944 | 0.0243895 0.420251 -1 945 | 0.0539742 0.192892 -1 946 | 0.135789 0.367019 -1 947 | 0.162848 0.575995 -1 948 | 0.644553 0.522779 1 949 | 0.0280394 0.2024 -1 950 | 0.498716 0.111235 1 951 | 0.694521 0.378623 1 952 | 0.10905 0.839814 -1 953 | 0.274261 0.175751 1 954 | 0.274627 0.525746 -1 955 | 0.214118 0.119999 1 956 | 0.307521 0.871397 -1 957 | 0.811055 0.728313 1 958 | 0.284652 0.586136 -1 959 | 0.786844 0.370091 1 960 | 0.575132 0.465631 1 961 | 0.526957 0.892559 -1 962 | 0.694115 0.909796 -1 963 | 0.784944 0.47347 1 964 | 0.905805 0.502766 1 965 | 0.131259 0.914216 -1 966 | 0.804373 0.549669 1 967 | 0.179943 0.786996 -1 968 | 0.0275257 0.678325 -1 969 | 0.121018 0.496174 -1 970 | 0.127049 0.258779 -1 971 | 0.121895 0.62246 -1 972 | 0.938497 0.175046 1 973 | 0.309569 0.187634 1 974 | 0.945545 0.818893 1 975 | 0.656243 0.119367 1 976 | 0.878904 0.613301 1 977 | 0.34872 0.465452 -1 978 | 0.224392 0.631975 -1 979 | 0.177952 0.750608 -1 980 | 0.568367 0.288115 1 981 | 0.292361 0.0408193 1 982 | 0.290532 0.843732 -1 983 | 0.0147712 0.400843 -1 984 | 0.558114 0.678272 -1 985 | 0.96443 0.850934 1 986 | 0.857355 0.260394 1 987 | 0.00754328 0.998004 -1 988 | 0.602324 0.842636 -1 989 | 0.341327 0.801777 -1 990 | 0.108357 0.673782 -1 991 | 0.513777 0.566489 -1 992 | 0.4456 0.367347 1 993 | 0.491244 0.83112 -1 994 | 0.891817 0.565568 1 995 | 0.141926 0.792559 -1 996 | 0.412943 0.988291 -1 997 | 0.0560869 0.798841 -1 998 | 0.361979 0.860275 -1 999 | 0.736251 0.461332 1 1000 | 0.0903633 0.880068 -1 1001 | -------------------------------------------------------------------------------- /quiz2/hw2_adaboost_train.dat: -------------------------------------------------------------------------------- 1 | 0.757222 0.633831 -1 2 | 0.847382 0.281581 -1 3 | 0.24931 0.618635 +1 4 | 0.538526 0.144259 -1 5 | 0.474435 0.414558 -1 6 | 0.374151 0.0120482 1 7 | 0.847185 0.217572 1 8 | 0.983368 0.250496 1 9 | 0.645141 0.485816 1 10 | 0.172211 0.254331 -1 11 | 0.116866 0.378804 -1 12 | 0.55097 0.760426 -1 13 | 0.312109 0.442938 -1 14 | 0.304777 0.0529649 1 15 | 0.572727 0.370527 1 16 | 0.171491 0.50076 -1 17 | 0.644567 0.834055 -1 18 | 0.0529041 0.338461 -1 19 | 0.0323543 0.830701 -1 20 | 0.272193 0.587396 -1 21 | 0.123521 0.0516625 1 22 | 0.905544 0.247013 1 23 | 0.854276 0.559648 1 24 | 0.375914 0.505747 -1 25 | 0.160755 0.238718 -1 26 | 0.45893 0.227062 1 27 | 0.395407 0.791184 -1 28 | 0.742325 0.586444 1 29 | 0.43615 0.136922 1 30 | 0.954217 0.680325 1 31 | 0.916386 0.381431 1 32 | 0.953844 0.439266 1 33 | 0.328701 0.721918 -1 34 | 0.275732 0.43115 -1 35 | 0.892366 0.0136661 1 36 | 0.249529 0.0709084 1 37 | 0.124333 0.611515 -1 38 | 0.54449 0.423701 1 39 | 0.86019 0.93029 -1 40 | 0.432404 0.0901487 1 41 | 0.204973 0.406648 -1 42 | 0.0748025 0.568699 -1 43 | 0.936407 0.106094 1 44 | 0.572728 0.90924 -1 45 | 0.358618 0.651613 -1 46 | 0.631685 0.910141 -1 47 | 0.802581 0.599025 1 48 | 0.366818 0.0135169 1 49 | 0.708026 0.300654 1 50 | 0.243625 0.106277 1 51 | 0.960778 0.59799 1 52 | 0.726241 0.057674 1 53 | 0.158561 0.690295 -1 54 | 0.420638 0.503567 -1 55 | 0.651344 0.290269 1 56 | 0.933469 0.490516 1 57 | 0.502864 0.721677 -1 58 | 0.595151 0.82293 -1 59 | 0.696778 0.300018 1 60 | 0.927038 0.295737 1 61 | 0.145192 0.377728 -1 62 | 0.385435 0.68299 -1 63 | 0.296852 0.868018 -1 64 | 0.659204 0.77369 -1 65 | 0.896153 0.832046 1 66 | 0.466137 0.877674 -1 67 | 0.815532 0.164151 1 68 | 0.310117 0.857713 -1 69 | 0.522385 0.961609 -1 70 | 0.369345 0.781697 -1 71 | 0.901988 0.831265 1 72 | 0.692314 0.0640428 1 73 | 0.836977 0.614453 1 74 | 0.104584 0.357892 -1 75 | 0.265266 0.65833 -1 76 | 0.729254 0.885763 -1 77 | 0.205254 0.404956 -1 78 | 0.032359 0.778401 -1 79 | 0.464724 0.159682 1 80 | 0.940021 0.493738 1 81 | 0.248985 0.646083 -1 82 | 0.541258 0.728218 -1 83 | 0.391575 0.291076 1 84 | 0.0254967 0.300503 -1 85 | 0.475398 0.920203 -1 86 | 0.835664 0.584283 1 87 | 0.296033 0.0885163 1 88 | 0.0435908 0.646312 -1 89 | 0.284148 0.182427 1 90 | 0.627696 0.788116 -1 91 | 0.312939 0.871275 -1 92 | 0.676521 0.316903 1 93 | 0.0123539 0.178643 -1 94 | 0.682164 0.777194 -1 95 | 0.421563 0.302683 1 96 | 0.03183 0.289761 -1 97 | 0.435715 0.190071 1 98 | 0.730492 0.0655594 1 99 | 0.92527 0.524315 1 100 | 0.984815 0.383621 1 101 | -------------------------------------------------------------------------------- /quiz2/hw2_lssvm_all.dat: -------------------------------------------------------------------------------- 1 | 4.115 5.020 -7.879 -11.780 2.004 -0.353 -0.735 3.561 2.441 -9.822 +1 2 | -3.557 0.997 2.932 7.672 5.430 -0.137 1.635 -5.190 -0.394 -7.667 +1 3 | 6.417 5.878 5.066 -7.209 -6.953 7.639 -2.937 -1.023 3.963 -11.069 +1 4 | -2.247 6.532 6.437 2.293 6.302 2.187 3.429 -3.453 9.172 -4.548 +1 5 | 3.708 5.834 3.676 -4.403 -5.296 9.080 -3.110 -3.294 3.189 -8.510 +1 6 | -1.586 1.960 -5.506 -8.767 7.871 0.613 -4.693 4.302 -1.219 -8.478 -1 7 | -4.181 -6.797 -4.187 8.622 0.771 5.851 -3.893 3.779 4.470 -9.433 +1 8 | -5.589 -7.010 -5.297 7.329 1.872 3.953 -3.425 3.097 1.677 -11.320 +1 9 | 8.124 4.813 3.519 -7.539 -4.723 8.129 -5.165 -3.411 3.552 -9.192 -1 10 | -7.536 -0.448 -10.633 3.777 0.728 -1.386 -7.756 8.166 -2.979 0.629 -1 11 | 2.963 3.801 0.763 4.595 -4.334 -4.267 -0.809 -5.677 6.073 6.625 -1 12 | 2.009 4.874 -0.200 3.469 -6.495 -5.364 -1.560 -7.233 6.703 8.024 +1 13 | 1.117 -4.978 4.810 0.904 -0.160 1.036 -4.530 -0.242 -9.777 -7.043 -1 14 | 2.500 -5.283 2.861 2.062 -1.050 -1.513 -3.469 -0.396 -9.007 -6.550 +1 15 | 4.308 5.177 -7.454 -8.630 0.831 1.020 2.347 4.464 3.176 -6.249 +1 16 | 2.139 5.880 0.415 2.731 -7.112 -9.191 -2.697 -8.236 6.473 5.862 +1 17 | 1.320 -5.006 4.664 2.597 -0.844 -3.269 -4.123 -1.583 -8.507 -6.115 +1 18 | 0.900 -4.961 5.714 3.565 -0.242 0.251 -5.140 -1.287 -8.431 -6.103 +1 19 | -8.505 -1.866 0.366 3.879 -8.945 -8.063 -0.133 7.214 4.570 -1.740 +1 20 | -3.385 -1.143 -5.207 -6.163 9.599 -1.535 -6.582 2.827 -0.883 -6.998 +1 21 | -9.586 -0.540 -9.988 2.891 0.859 -2.123 -6.347 8.064 -3.331 -3.336 +1 22 | -8.433 -1.523 -3.372 3.808 -6.863 -5.143 1.558 7.131 5.446 -1.187 +1 23 | 4.298 3.835 -8.633 -9.632 3.692 -0.160 -0.957 3.330 4.571 -6.648 +1 24 | -1.017 5.133 11.423 0.284 6.290 0.421 5.239 -3.398 9.082 -1.691 +1 25 | -10.169 -2.113 -2.671 5.976 -6.393 -4.706 1.480 6.266 4.243 -0.733 +1 26 | 5.881 4.734 4.417 -4.664 -4.853 11.036 -3.709 -3.193 2.504 -9.894 -1 27 | -3.249 -0.915 -6.549 -5.577 10.211 -0.041 -3.965 5.207 -0.911 -7.836 +1 28 | 5.191 6.167 -7.047 -9.895 0.665 0.546 1.352 5.773 -0.029 -5.556 +1 29 | -2.959 -9.453 -6.481 8.026 -0.529 4.413 -5.293 3.165 2.229 -11.325 +1 30 | 3.626 6.115 1.207 4.291 -6.699 -7.621 -1.535 -7.210 4.938 5.219 +1 31 | -4.650 0.542 2.459 7.701 5.423 0.505 1.665 -8.834 -0.976 -7.722 -1 32 | -3.955 2.397 0.266 9.077 3.636 -1.677 1.649 -6.768 -2.330 -8.549 -1 33 | -8.819 -1.605 -4.140 4.988 -6.863 -3.568 0.397 4.664 5.731 -2.408 +1 34 | 4.916 6.910 -8.017 -9.443 1.800 0.296 0.339 4.603 1.225 -4.997 -1 35 | -10.791 0.246 -10.659 4.901 1.168 -1.746 -5.982 8.414 -3.182 -0.838 +1 36 | -7.895 0.639 -10.262 2.478 -0.674 -3.111 -6.627 8.107 -4.353 -1.397 -1 37 | -0.770 5.931 7.932 1.505 7.155 -0.092 7.127 -2.159 10.771 -3.074 -1 38 | -5.626 2.599 0.068 8.718 4.228 -0.674 1.307 -7.047 -1.415 -9.168 +1 39 | 0.891 -5.484 6.637 2.720 -1.019 -3.543 -3.921 -0.897 -9.728 -4.806 -1 40 | 8.535 6.167 3.994 -5.313 -5.637 9.061 -2.521 -2.764 2.235 -12.641 +1 41 | 4.280 5.218 3.566 -5.915 -4.272 8.280 -3.916 -3.052 3.816 -8.616 +1 42 | -4.759 2.619 -1.060 7.965 5.833 -4.019 1.570 -7.312 -0.249 -7.866 -1 43 | 3.132 6.571 1.362 1.703 -6.748 -7.654 -1.628 -6.511 4.822 6.550 -1 44 | 6.782 6.938 5.336 -4.787 -4.603 7.569 -4.233 -0.615 3.159 -10.304 -1 45 | -0.583 6.900 7.912 3.730 7.027 1.934 5.372 -3.433 10.812 -2.200 -1 46 | 4.840 7.431 -7.744 -9.574 0.965 0.240 0.591 4.692 3.116 -6.623 -1 47 | -4.420 1.819 2.501 8.220 4.367 -2.095 0.769 -7.181 -0.567 -6.660 -1 48 | -0.795 -1.118 -7.311 -5.878 8.168 0.979 -3.316 4.087 -0.988 -8.003 +1 49 | 5.659 4.140 -6.471 -10.457 1.824 -0.334 0.099 2.734 1.224 -6.261 -1 50 | 7.974 5.180 4.776 -7.408 -5.707 7.341 -5.293 -1.798 3.771 -8.300 +1 51 | -2.679 0.729 -4.449 -5.961 8.414 1.169 -5.426 4.117 -1.174 -7.767 +1 52 | -0.427 5.178 9.514 0.148 6.685 0.719 4.482 -2.980 8.438 -3.062 +1 53 | -1.967 -0.026 -6.229 -5.985 9.445 1.692 -3.345 3.132 -1.094 -8.401 +1 54 | 3.131 -5.369 2.640 2.675 -0.893 -2.043 -3.904 -0.563 -10.155 -5.159 +1 55 | 2.332 5.231 1.006 3.158 -6.437 -7.038 0.362 -6.965 4.354 8.201 -1 56 | -7.347 -1.180 -10.294 5.292 1.245 -2.057 -6.797 8.518 -0.452 0.238 -1 57 | -3.133 -8.588 -3.362 8.571 0.257 4.284 -4.388 4.049 0.243 -10.081 +1 58 | -8.778 -1.450 -9.743 6.791 2.364 -1.851 -5.703 7.911 -4.481 -0.180 +1 59 | -9.865 -0.119 -7.887 4.480 3.787 -2.222 -6.296 7.716 -4.788 1.710 -1 60 | -1.084 5.909 8.700 1.866 6.083 0.312 4.922 -2.853 9.602 -2.491 +1 61 | -0.652 6.175 8.262 3.616 7.318 0.034 6.869 -3.246 8.962 -3.962 +1 62 | -9.141 -2.324 -1.272 6.891 -7.169 -5.010 1.881 5.505 5.182 -1.413 +1 63 | 3.826 6.812 -7.423 -9.002 5.015 1.454 0.677 4.932 -0.557 -7.331 +1 64 | 2.853 -6.336 6.125 4.319 -0.066 -2.087 -3.725 -1.454 -9.157 -5.885 -1 65 | 5.411 5.431 -6.787 -9.601 2.823 -0.167 0.352 5.524 1.936 -7.719 +1 66 | 7.854 5.357 -6.846 -9.678 4.764 -0.284 0.754 3.434 3.365 -6.610 +1 67 | 6.532 5.439 -6.322 -9.672 2.431 -0.004 0.321 3.740 2.993 -6.737 -1 68 | -4.732 -6.321 -4.680 8.078 -0.217 2.704 -4.915 2.855 1.712 -9.608 +1 69 | 0.064 5.604 6.813 1.402 5.402 0.742 4.310 -2.100 10.459 -2.290 -1 70 | -8.589 -1.728 -2.947 5.245 -7.021 -3.864 -1.232 6.449 5.548 -0.650 +1 71 | 6.578 4.353 -7.620 -9.929 4.814 0.504 0.980 4.633 1.697 -5.866 +1 72 | 1.573 4.288 -0.448 -0.420 -6.488 -3.770 -3.418 -4.929 6.407 5.994 -1 73 | -4.954 1.338 0.702 8.589 5.902 0.026 1.801 -6.844 -1.330 -8.314 -1 74 | -2.014 5.196 9.231 -0.425 6.895 -1.635 6.785 -2.817 10.101 -0.401 +1 75 | -0.010 -4.977 4.682 3.681 1.037 -1.826 -3.418 -1.740 -8.810 -6.386 +1 76 | -8.138 -0.487 -9.604 3.637 0.569 -1.899 -5.892 8.035 -3.460 -0.107 +1 77 | 5.045 6.222 -7.199 -9.585 3.124 0.082 0.817 3.374 1.952 -6.567 +1 78 | -4.948 0.852 0.183 8.077 4.751 0.701 2.380 -6.599 1.134 -9.094 -1 79 | -8.740 -0.320 -2.992 4.478 -6.718 -5.079 2.312 7.159 4.796 -2.332 +1 80 | 2.682 7.610 -7.683 -10.276 3.099 -1.305 1.073 4.170 0.236 -7.140 +1 81 | 3.917 3.505 0.877 5.224 -6.773 -3.140 -1.477 -5.889 6.423 7.597 +1 82 | -8.157 -1.596 -4.363 6.037 -7.769 -6.663 0.969 7.091 3.800 -1.250 +1 83 | -9.408 -1.456 -2.715 5.103 -9.686 -6.715 0.728 7.079 4.798 0.251 +1 84 | -3.641 -7.252 -3.941 7.945 2.431 4.446 -3.981 3.368 2.845 -9.809 -1 85 | 4.812 2.337 -0.376 2.483 -7.404 -5.820 -0.191 -6.761 5.570 5.013 +1 86 | -5.500 3.374 1.294 7.357 5.001 -1.921 0.990 -7.808 -1.066 -10.102 -1 87 | 2.575 2.171 2.455 5.891 -7.370 -7.409 -0.026 -7.493 7.370 6.980 -1 88 | -2.153 0.232 -7.624 -5.311 7.312 -0.809 -5.160 4.234 1.726 -6.618 +1 89 | 6.913 4.938 4.720 -5.212 -4.812 8.178 -3.251 -2.130 2.856 -8.222 +1 90 | -0.411 -4.961 6.381 1.425 0.344 -1.397 -2.604 -0.851 -9.382 -6.602 -1 91 | -2.061 -0.285 -7.629 -4.463 6.703 0.323 -2.274 3.681 -0.086 -7.429 +1 92 | -6.239 -6.682 -5.617 6.685 2.339 2.792 -3.935 3.833 1.666 -10.045 +1 93 | 4.211 7.745 -8.556 -9.207 2.470 0.298 2.335 2.817 2.374 -6.429 +1 94 | 6.645 6.441 3.767 -5.465 -5.385 6.789 -4.180 -1.660 2.092 -8.242 +1 95 | 6.610 5.905 3.981 -7.357 -5.514 8.624 -3.720 0.211 1.955 -8.851 +1 96 | -3.248 -6.134 -3.858 7.747 2.178 8.041 -4.551 4.052 2.187 -10.626 +1 97 | -2.236 -1.124 -5.879 -6.315 9.215 -1.938 -3.013 2.736 -0.864 -7.738 +1 98 | -4.392 -0.150 -10.347 5.617 1.046 -0.959 -6.793 8.693 -4.790 0.188 +1 99 | 6.872 5.701 4.602 -6.117 -4.664 10.661 -3.108 -3.640 2.973 -10.674 -1 100 | -1.755 5.482 6.142 1.632 7.135 1.635 6.834 -1.858 11.811 -2.227 +1 101 | -6.256 0.388 -10.079 3.799 2.087 -2.341 -7.282 8.459 -3.679 0.412 -1 102 | -1.276 6.384 6.348 -0.051 6.124 1.466 4.638 -3.209 10.863 -2.389 -1 103 | 5.926 6.444 3.945 -4.608 -3.603 10.439 -2.594 -2.331 2.904 -10.766 +1 104 | -8.550 -0.834 -10.248 4.996 0.687 -0.991 -7.671 8.352 -3.443 0.178 +1 105 | -1.155 5.956 7.357 0.185 7.220 1.703 6.063 -3.769 9.618 -1.495 +1 106 | 6.997 5.400 5.012 -5.799 -4.046 10.910 -2.945 -2.216 2.397 -6.441 +1 107 | -5.065 2.099 0.535 7.544 4.438 -4.473 1.442 -6.602 -0.941 -7.318 -1 108 | -1.420 6.415 8.903 1.041 7.208 1.580 5.844 -4.193 10.135 -1.924 +1 109 | 6.961 6.230 -7.001 -9.252 2.884 0.693 1.736 4.750 3.741 -5.074 -1 110 | 1.351 -5.987 3.332 2.113 0.213 -2.929 -3.772 -0.380 -7.765 -5.345 +1 111 | -1.127 5.672 9.248 1.263 7.302 1.712 6.765 -2.418 8.386 -2.086 +1 112 | -2.630 -0.051 -4.637 -5.768 8.618 -0.928 -4.783 3.790 -1.481 -6.270 -1 113 | -8.283 -1.071 -2.014 3.774 -6.100 -5.396 -0.413 4.315 4.053 0.214 +1 114 | -2.992 1.554 3.113 8.568 4.967 -3.031 2.420 -7.111 -2.040 -7.763 -1 115 | -7.837 0.615 -8.344 4.497 -1.297 -2.420 -4.729 8.042 -5.784 1.819 +1 116 | -0.243 -5.611 6.342 1.267 -1.072 -0.712 -4.068 -1.872 -8.461 -7.084 +1 117 | -6.084 1.042 -0.040 7.162 4.647 -0.347 0.560 -8.214 -1.619 -7.271 -1 118 | -3.540 -8.169 -4.066 8.252 2.373 4.306 -3.494 2.693 0.979 -9.578 +1 119 | 6.916 6.541 -6.625 -10.379 3.221 -0.509 1.724 4.288 1.466 -6.120 +1 120 | -4.577 1.145 0.018 8.636 4.904 -1.771 2.728 -7.585 -0.765 -8.252 -1 121 | -2.088 5.420 8.673 2.062 6.167 2.134 4.895 -3.544 9.727 -2.450 +1 122 | -8.332 -2.237 -1.628 4.356 -6.523 -5.951 4.414 6.667 4.411 -2.119 +1 123 | 4.118 6.508 -8.193 -10.069 0.924 0.272 0.214 2.981 2.223 -7.489 -1 124 | -3.778 2.341 -0.524 8.103 4.319 -2.082 1.083 -8.321 -1.278 -8.798 -1 125 | -7.499 -1.619 -5.535 5.272 -7.705 -4.910 1.819 6.528 4.041 -1.326 +1 126 | -4.127 -8.291 -5.695 8.201 0.255 4.760 -4.044 3.385 2.516 -9.594 +1 127 | 6.700 6.428 3.379 -5.159 -6.715 8.083 -3.434 -2.235 2.929 -9.637 +1 128 | -8.896 -0.723 -10.147 5.064 2.771 -0.994 -7.294 8.339 -3.044 1.959 +1 129 | -2.081 5.609 9.552 0.565 6.738 1.296 7.947 -2.530 8.325 -3.514 +1 130 | -4.579 1.811 0.125 8.145 4.920 -1.556 1.678 -7.921 -1.772 -7.744 -1 131 | 3.338 3.986 1.646 3.777 -6.921 -4.542 0.766 -6.226 6.967 6.841 -1 132 | -3.681 -2.079 -6.780 -5.467 9.234 -1.248 -3.114 6.309 -1.221 -8.676 -1 133 | -7.764 0.769 -10.165 4.007 3.064 -2.622 -5.701 9.786 -4.234 -0.198 -1 134 | -0.103 4.958 7.498 -0.883 6.983 0.276 6.726 -3.949 10.369 -2.753 +1 135 | -1.282 5.773 5.468 0.110 7.565 0.387 4.910 -3.717 9.294 -2.532 -1 136 | -3.644 2.526 1.554 7.596 4.930 -1.231 0.818 -7.979 -1.956 -7.666 +1 137 | 2.139 5.779 1.790 4.460 -6.577 -6.135 0.490 -6.959 7.422 5.758 -1 138 | -1.808 0.114 -7.154 -6.344 9.181 -1.016 -4.927 3.439 -0.777 -7.513 +1 139 | 2.854 -5.731 5.084 1.759 -1.098 -3.791 -2.922 -0.496 -8.222 -6.198 -1 140 | 6.471 6.798 4.050 -4.488 -5.648 11.293 -4.320 -2.142 1.470 -9.715 +1 141 | -5.485 -0.627 -9.876 3.538 1.541 -2.476 -4.259 7.926 -4.939 2.149 +1 142 | 0.916 -5.387 5.192 0.723 -1.094 -0.188 -1.759 -1.139 -8.333 -5.284 +1 143 | -5.212 1.695 0.940 8.174 4.717 -1.298 1.289 -5.919 -0.446 -8.292 +1 144 | 8.011 6.266 2.979 -4.359 -5.176 13.096 -4.099 -3.256 2.077 -9.859 +1 145 | -3.767 3.225 -2.110 7.279 3.848 -1.321 1.645 -8.115 0.427 -8.156 -1 146 | 1.888 -5.258 7.152 0.850 -1.702 -1.501 -3.574 -0.168 -8.261 -5.569 -1 147 | 9.239 5.532 4.031 -5.931 -4.377 10.680 -3.779 -2.304 2.012 -10.972 +1 148 | -2.322 6.891 6.215 1.403 6.963 0.839 6.151 -1.961 9.993 -3.200 +1 149 | -1.789 0.868 -5.404 -6.718 7.999 -0.144 -5.168 4.204 -1.345 -6.917 -1 150 | -7.952 0.002 -9.189 5.021 -0.484 -2.360 -6.115 8.219 -6.282 -1.265 +1 151 | -0.618 5.592 6.800 1.547 6.485 2.449 6.361 -2.989 10.185 -2.822 -1 152 | 7.707 5.983 4.693 -6.202 -4.484 8.751 -2.613 0.363 1.905 -8.644 -1 153 | -9.597 -0.306 -9.028 4.716 -0.849 -1.641 -6.569 8.838 -2.019 1.103 -1 154 | 7.175 5.759 -7.950 -8.947 2.314 -0.005 1.227 5.692 2.410 -5.556 +1 155 | 4.042 3.199 2.770 5.799 -6.211 -8.208 0.108 -6.156 5.386 6.490 +1 156 | -6.662 0.013 -9.189 2.850 1.247 -1.528 -6.067 7.850 -3.120 -1.400 +1 157 | 1.866 -5.467 5.208 3.228 -1.233 -6.272 -2.764 -0.942 -9.654 -5.125 -1 158 | 6.178 6.683 -6.793 -10.433 2.630 1.573 3.203 3.560 1.192 -6.760 +1 159 | 4.404 6.471 -8.628 -10.263 0.546 -0.785 -1.379 4.971 0.945 -6.517 -1 160 | 2.830 5.288 2.452 2.067 -7.073 -4.866 0.739 -6.474 5.704 5.364 -1 161 | -7.967 -2.479 -3.869 3.800 -4.500 -6.947 0.394 6.505 5.023 -0.365 -1 162 | -3.186 -5.401 -5.584 8.341 0.273 5.109 -3.846 3.769 3.690 -8.670 -1 163 | 3.042 -5.875 4.841 2.752 -0.467 -1.516 -4.943 -1.005 -9.157 -6.150 +1 164 | 0.539 -4.784 5.733 4.097 0.145 -1.556 -2.958 -1.512 -7.560 -4.535 -1 165 | 4.625 5.233 -8.120 -9.934 0.489 -0.455 -1.659 3.412 1.378 -5.683 +1 166 | -8.733 -1.835 -1.650 4.976 -6.945 -4.674 2.375 4.609 5.639 -2.290 +1 167 | -0.466 6.559 8.754 -0.231 7.224 2.998 6.898 -3.430 8.565 -1.060 -1 168 | -0.865 5.144 8.207 1.192 6.084 1.155 5.331 -3.126 9.157 -3.207 +1 169 | -0.022 5.730 8.387 1.255 7.384 1.017 4.400 -2.846 10.586 -3.489 +1 170 | 5.439 6.092 -7.100 -10.325 3.498 0.833 -0.281 3.430 1.834 -7.245 -1 171 | -7.569 0.194 -10.366 6.283 2.071 -1.023 -6.174 8.408 -4.831 -1.185 +1 172 | -1.750 0.687 -5.114 -4.529 8.394 0.770 -4.963 4.571 -0.727 -7.298 +1 173 | 4.597 5.546 -8.520 -9.138 2.789 -0.273 0.360 4.965 2.472 -5.244 +1 174 | -8.323 0.669 -11.349 2.705 1.221 -2.074 -6.373 8.807 -3.238 -1.817 +1 175 | -2.184 5.352 8.065 3.459 6.565 0.444 7.074 -4.236 9.396 -2.711 +1 176 | 3.767 3.291 -0.242 3.792 -6.161 -3.967 -0.817 -6.280 4.807 5.923 -1 177 | 4.795 3.925 6.631 -5.293 -5.935 7.521 -3.511 -3.955 3.555 -10.096 +1 178 | -9.241 -0.362 -2.991 4.013 -8.989 -6.874 3.217 4.960 5.205 -1.888 -1 179 | 0.429 4.442 8.563 0.293 6.011 0.603 5.592 -2.659 10.290 -3.131 +1 180 | 3.476 5.109 -1.095 2.931 -6.582 -6.347 -0.097 -6.013 4.376 5.678 +1 181 | 2.144 5.270 1.413 1.263 -6.007 -6.181 0.071 -6.618 5.838 6.503 -1 182 | 3.277 6.351 -0.028 3.541 -8.034 -8.531 -2.265 -6.717 6.439 6.543 -1 183 | 5.013 5.591 4.512 -5.542 -5.802 5.731 -4.263 -2.472 2.052 -9.905 -1 184 | -2.232 0.579 -5.879 -4.579 7.947 -2.397 -3.095 3.271 -3.134 -8.320 +1 185 | -2.758 -0.066 -5.145 -5.348 9.777 -1.301 -3.477 2.773 -0.874 -8.199 -1 186 | -5.129 0.227 1.746 8.025 5.438 -1.946 1.220 -6.180 0.906 -8.899 +1 187 | 4.198 5.374 4.346 -7.673 -5.763 8.559 -3.556 -1.363 2.789 -7.626 +1 188 | -9.323 0.107 -10.142 4.542 2.629 -1.358 -8.702 8.454 -4.786 -0.555 +1 189 | 2.362 -4.501 5.237 3.684 -1.139 -1.001 -2.275 -1.158 -8.785 -5.817 +1 190 | -0.764 6.681 9.425 -0.636 6.870 2.624 6.030 -2.238 12.248 -2.721 +1 191 | 2.923 3.025 1.335 3.820 -6.625 -5.580 -4.995 -7.714 6.252 5.988 +1 192 | 0.693 -5.281 5.864 2.641 -1.042 -0.757 -3.794 -1.036 -9.183 -5.905 +1 193 | -5.274 1.798 1.497 8.571 4.555 -4.900 1.830 -5.855 -0.732 -6.855 +1 194 | 1.733 -5.583 6.956 2.121 -1.803 -0.135 -5.146 -1.563 -8.930 -7.657 +1 195 | -1.927 7.339 9.159 3.224 6.826 0.894 7.225 -1.962 9.118 -2.038 +1 196 | 5.760 5.009 -8.217 -9.673 4.005 -0.549 -0.374 5.807 1.055 -6.332 +1 197 | -0.705 1.045 -6.571 -6.344 9.610 -0.464 -4.253 3.818 -0.502 -7.687 -1 198 | -8.293 -0.930 -0.633 5.220 -8.254 -3.769 0.946 6.666 4.577 -1.415 +1 199 | -1.523 -0.312 -5.636 -6.122 7.870 -0.527 -4.586 4.968 1.003 -5.981 -1 200 | 4.249 5.291 -6.698 -10.248 1.897 -0.164 1.712 2.982 0.539 -8.260 +1 201 | -2.006 6.121 8.103 2.193 5.494 2.630 5.953 -3.511 9.145 -3.904 +1 202 | 4.413 4.445 -0.586 0.890 -7.339 -6.237 -1.696 -7.711 6.161 7.545 -1 203 | -3.560 -5.024 -4.258 8.596 0.966 5.184 -2.894 2.502 2.604 -10.258 +1 204 | -2.474 2.824 -5.808 -6.550 6.573 0.008 -3.746 5.479 0.330 -7.359 +1 205 | -7.859 -1.596 1.010 5.621 -7.566 -4.637 2.103 6.376 4.889 1.402 +1 206 | -4.651 2.260 1.462 8.053 5.430 -1.623 -0.716 -8.179 1.228 -8.143 +1 207 | -0.783 6.127 4.604 2.568 6.617 1.704 6.001 -2.683 10.872 -3.133 -1 208 | 2.678 2.955 0.344 0.503 -6.516 -7.583 -0.207 -6.987 6.295 7.872 -1 209 | -2.509 1.906 -6.302 -5.880 9.403 -1.257 -3.508 3.211 -0.220 -7.552 -1 210 | -1.896 6.234 8.849 2.563 6.990 1.841 5.651 -3.253 11.076 -0.986 +1 211 | -9.713 -0.420 -10.105 6.329 3.220 -1.063 -6.538 7.911 -3.845 -1.739 +1 212 | -0.415 7.472 7.034 2.210 6.565 1.928 6.684 -1.903 11.166 -3.272 -1 213 | -7.142 -5.750 -5.846 7.481 1.999 4.381 -3.730 3.105 2.398 -10.237 +1 214 | 1.391 -5.256 6.731 2.081 -1.669 -0.324 -4.756 -1.295 -8.806 -5.846 -1 215 | 1.753 5.326 1.933 1.902 -7.083 -4.438 -1.877 -7.107 4.623 6.101 -1 216 | -2.151 -6.875 -4.433 8.770 1.118 4.888 -3.571 3.241 2.007 -7.799 +1 217 | -2.014 5.710 9.246 3.850 7.204 0.287 6.447 -2.878 8.890 -0.307 +1 218 | -7.782 0.347 -8.807 4.387 -0.647 -1.965 -8.834 8.420 -4.990 -0.948 -1 219 | 7.588 4.540 -6.787 -9.729 2.953 -0.209 1.813 3.606 1.747 -6.130 -1 220 | 7.142 4.726 -7.765 -10.490 1.607 0.368 -0.526 3.837 2.788 -8.127 +1 221 | 2.056 5.547 0.015 2.604 -6.097 -5.719 -0.314 -6.381 6.328 5.141 -1 222 | 8.677 6.093 4.703 -5.326 -4.800 7.339 -4.149 -1.857 2.699 -10.233 +1 223 | -3.324 1.317 1.473 7.126 5.022 -2.593 0.303 -7.187 0.808 -7.879 -1 224 | -3.158 -7.521 -3.963 6.835 1.673 3.166 -3.805 4.292 1.042 -10.653 +1 225 | 7.925 6.245 5.960 -6.178 -5.902 11.287 -4.209 -2.372 3.589 -10.645 -1 226 | -3.443 2.630 0.515 7.553 4.519 -2.748 2.332 -7.029 0.098 -8.239 -1 227 | -4.718 -6.824 -4.942 7.214 1.462 5.154 -4.228 3.367 -0.011 -9.100 -1 228 | 7.080 5.742 3.429 -5.993 -3.535 8.239 -2.792 0.359 2.361 -8.536 +1 229 | -0.852 -7.071 -4.359 7.536 1.957 2.280 -3.760 2.801 2.809 -9.580 +1 230 | -1.532 -0.192 -6.591 -6.911 7.075 0.778 -4.093 3.748 0.616 -7.632 +1 231 | -6.712 -7.735 -4.815 6.398 0.294 1.512 -3.349 4.214 2.039 -10.836 +1 232 | 8.033 5.119 4.766 -6.732 -4.829 9.764 -1.482 -0.846 3.586 -10.025 +1 233 | 1.706 -5.624 8.557 3.066 -1.179 -0.761 -3.236 0.002 -9.547 -7.327 -1 234 | -3.463 0.585 1.924 8.009 4.792 -0.508 1.448 -8.160 0.340 -6.830 -1 235 | -2.598 -6.845 -3.476 7.869 0.287 4.306 -4.058 3.922 1.101 -10.293 -1 236 | -3.475 2.028 2.009 7.495 4.762 -2.689 1.586 -6.900 -1.646 -7.905 +1 237 | -8.255 0.091 -9.486 3.446 -0.143 -0.807 -5.925 7.493 -5.908 -1.790 +1 238 | -6.767 0.758 -8.370 4.203 0.615 -1.988 -7.116 7.437 -4.305 -0.520 -1 239 | -3.681 0.250 -7.871 -4.880 7.221 -0.020 -5.079 4.578 -2.222 -8.052 -1 240 | -0.749 6.243 6.971 2.826 6.998 2.803 7.310 -3.696 9.884 -3.274 +1 241 | -0.829 6.678 8.453 -1.518 6.792 1.431 5.221 -2.182 9.329 -1.503 -1 242 | 1.177 -5.613 7.038 3.492 0.268 1.377 -2.086 -0.543 -9.640 -5.498 +1 243 | -7.674 -1.412 -1.973 5.062 -5.179 -4.284 1.183 6.459 4.909 -1.777 +1 244 | -0.315 6.642 8.127 -1.441 5.698 0.477 5.759 -4.455 9.716 -2.348 +1 245 | 3.525 4.083 1.552 2.874 -8.319 -4.160 -0.411 -7.347 6.096 5.876 -1 246 | -5.714 -6.678 -3.286 7.395 2.147 4.717 -3.539 3.234 1.220 -9.735 -1 247 | -1.762 5.602 7.446 -0.114 6.699 0.313 4.415 -2.354 10.878 -3.609 +1 248 | -4.299 0.706 3.162 8.094 4.145 -0.751 0.870 -6.757 -1.607 -9.485 -1 249 | 3.211 3.371 0.458 2.179 -8.394 -5.114 -1.232 -5.990 6.589 5.609 -1 250 | -8.707 -1.624 -2.606 4.438 -7.559 -3.912 1.046 6.056 4.617 -0.180 +1 251 | 5.726 4.774 -8.386 -10.067 0.897 0.524 2.056 5.007 1.272 -6.774 -1 252 | -0.832 6.785 10.273 1.464 6.606 0.518 4.346 -3.885 10.469 -1.879 -1 253 | -3.869 -6.875 -6.205 6.679 1.487 4.298 -4.438 2.890 1.197 -9.380 +1 254 | -2.399 0.387 -5.276 -4.611 7.636 0.035 -4.991 3.487 0.088 -6.565 +1 255 | -2.819 1.300 -1.810 8.040 5.078 -1.623 0.179 -8.269 -1.627 -7.077 -1 256 | -6.007 1.477 -0.287 8.175 4.708 -0.726 2.102 -7.566 0.579 -7.598 -1 257 | -9.166 -0.679 -9.957 7.168 1.583 -2.132 -7.852 8.350 -5.006 -2.450 +1 258 | 1.257 6.908 2.454 4.031 -6.557 -5.672 -2.441 -6.631 6.570 7.011 -1 259 | -6.422 2.107 3.821 7.421 5.143 -1.129 2.439 -7.326 0.459 -8.605 -1 260 | -6.344 -6.089 -4.315 6.933 1.881 3.235 -3.803 3.599 1.854 -9.430 +1 261 | 7.792 6.501 -9.372 -10.504 3.557 0.352 0.278 2.841 3.901 -6.192 -1 262 | -3.973 -9.911 -4.569 6.718 0.066 4.383 -3.652 3.386 2.113 -9.294 +1 263 | 2.416 4.027 -1.025 3.061 -7.181 -4.562 0.498 -6.607 5.378 5.695 +1 264 | 7.394 6.001 5.042 -5.549 -4.211 6.551 -3.708 -1.360 4.204 -9.432 +1 265 | 6.629 5.908 -6.680 -9.916 3.525 0.138 1.608 4.875 1.332 -7.695 +1 266 | -5.905 2.701 1.233 8.132 4.873 -2.032 0.524 -6.642 -1.207 -8.545 -1 267 | 0.609 -3.847 6.607 1.102 -0.363 -1.234 -3.521 -0.791 -9.061 -6.059 -1 268 | -4.691 -7.628 -3.542 7.226 1.089 4.458 -4.125 3.101 1.894 -8.601 -1 269 | -8.023 -0.901 0.054 3.700 -6.796 -6.696 0.442 5.998 4.674 -0.615 +1 270 | 5.572 5.276 -6.537 -9.661 2.055 1.137 1.919 3.106 2.731 -5.978 +1 271 | -6.367 0.159 -10.008 5.421 2.090 -1.276 -7.927 8.343 -5.579 -1.032 +1 272 | -6.378 1.759 0.715 7.491 4.259 -3.111 2.806 -7.839 -1.088 -9.439 +1 273 | 3.967 -1.019 2.133 5.792 -6.929 -5.928 -0.268 -6.795 5.724 4.650 -1 274 | 8.263 5.104 5.745 -5.320 -4.966 10.428 -3.132 -3.360 2.047 -9.205 +1 275 | -5.820 0.467 -10.767 2.496 -0.751 -2.411 -5.743 8.046 -5.629 1.240 +1 276 | 6.431 4.807 -7.295 -9.106 0.996 0.192 1.160 3.628 1.927 -5.878 +1 277 | -4.129 -6.545 -4.528 7.976 1.510 3.924 -4.761 3.784 2.046 -10.982 +1 278 | 3.543 4.565 1.778 2.643 -6.222 -4.868 -0.768 -6.073 6.415 6.195 -1 279 | -6.517 -4.983 -4.072 7.783 -0.166 3.811 -3.516 2.891 4.508 -8.140 -1 280 | 1.439 3.638 -0.287 2.853 -7.761 -6.776 -1.240 -6.376 5.773 6.349 +1 281 | 5.462 4.157 -8.129 -10.474 2.366 -0.722 0.657 3.097 3.652 -3.534 +1 282 | -4.106 -7.816 -5.455 6.949 0.561 2.738 -4.009 3.975 3.155 -8.936 +1 283 | -7.175 0.157 -10.808 3.818 0.389 -1.515 -7.039 8.807 -4.465 -0.701 -1 284 | -6.079 -7.477 -0.727 7.754 2.665 4.761 -3.968 3.146 4.109 -9.934 +1 285 | 6.656 5.903 4.012 -4.883 -5.636 7.255 -2.273 -1.523 2.834 -10.313 -1 286 | 2.724 -4.975 4.256 3.423 0.236 -3.708 -4.931 -1.191 -8.818 -5.169 +1 287 | 4.737 5.180 -7.778 -9.136 5.585 0.284 0.028 2.284 2.538 -6.198 +1 288 | -1.099 6.206 7.983 2.022 8.003 0.448 5.443 -3.655 8.099 -3.126 -1 289 | -1.743 0.841 -5.984 -5.825 9.020 -0.503 -3.812 2.870 -1.001 -7.226 +1 290 | 2.231 -6.136 6.364 3.448 -1.673 -0.081 -3.620 -0.143 -8.281 -6.869 +1 291 | 5.263 6.044 5.478 -6.798 -4.625 7.145 -3.986 -2.527 2.820 -7.770 -1 292 | 5.777 6.135 4.806 -4.871 -4.304 12.551 -3.372 -2.023 1.558 -10.054 +1 293 | 2.193 4.527 -1.901 5.450 -7.859 -7.979 -2.231 -5.990 5.115 6.232 -1 294 | 5.651 5.011 -8.566 -10.276 1.562 1.574 0.399 4.406 0.369 -5.681 +1 295 | 7.891 6.801 4.003 -5.973 -5.365 10.051 -3.764 -2.870 2.374 -10.327 -1 296 | 3.850 5.373 0.914 2.613 -7.433 -5.996 -0.058 -7.043 5.058 6.944 -1 297 | -2.434 0.353 -5.131 -5.942 8.648 0.984 -4.277 4.474 -1.200 -7.019 +1 298 | -1.008 6.285 7.081 1.437 7.663 1.543 5.205 -4.399 8.916 -0.203 +1 299 | 5.003 5.724 -7.364 -8.000 3.611 -0.231 -0.748 5.405 2.394 -7.743 -1 300 | -2.624 0.858 -4.845 -6.918 8.687 -0.707 -3.480 3.973 -0.891 -8.051 +1 301 | 1.464 -4.276 3.334 1.884 -0.886 -1.546 -3.126 -1.064 -9.749 -6.356 -1 302 | 6.225 7.233 -8.020 -9.274 -0.119 0.891 0.100 4.839 0.012 -8.921 -1 303 | 1.722 -4.012 5.621 3.485 0.088 -1.591 -2.811 -1.963 -7.967 -7.273 -1 304 | 3.004 -6.267 2.829 3.613 -0.396 0.921 -3.649 -0.926 -9.158 -5.312 -1 305 | 5.692 4.756 -8.852 -9.458 2.976 1.888 0.860 4.868 -0.559 -4.433 -1 306 | 2.030 -4.922 4.176 2.178 -0.307 0.196 -3.440 -1.232 -8.587 -6.708 -1 307 | -7.538 0.494 -8.725 2.961 2.802 -2.111 -7.976 8.487 -4.914 0.032 -1 308 | -8.885 0.680 -10.858 3.238 0.621 -2.390 -7.387 8.098 -2.926 2.386 +1 309 | -4.773 1.929 -0.898 7.948 4.550 -0.181 1.630 -7.243 -2.025 -7.574 +1 310 | -8.049 -4.044 -3.275 4.869 -8.505 -4.167 -0.164 6.025 4.398 0.209 +1 311 | 6.201 6.013 -8.111 -8.903 3.144 0.392 -1.349 5.022 2.839 -6.505 +1 312 | -2.798 5.791 9.271 -0.173 6.994 0.903 5.370 -3.538 7.848 -2.691 +1 313 | -7.233 -2.326 -1.252 6.438 -5.161 -1.437 1.121 6.480 4.978 -2.030 +1 314 | -1.095 5.147 9.854 1.986 7.160 3.414 5.109 -4.253 9.949 -1.311 +1 315 | -2.873 -6.619 -1.646 7.302 0.758 5.524 -3.723 4.073 2.488 -10.506 +1 316 | 4.565 8.202 -7.724 -8.328 2.532 -1.008 -1.008 4.784 2.382 -6.042 +1 317 | 4.717 5.916 -8.213 -8.970 6.478 0.875 3.326 5.233 -0.091 -8.471 +1 318 | -1.981 6.596 7.826 1.003 5.852 0.853 7.305 -2.718 10.325 -3.170 +1 319 | 7.171 4.728 -6.881 -9.610 1.548 1.813 0.491 4.645 2.687 -8.191 -1 320 | -3.926 -7.950 -4.234 6.869 2.573 3.486 -4.068 3.807 2.889 -9.677 +1 321 | -9.151 -1.903 -3.377 3.593 -7.582 -6.373 1.870 8.020 4.393 -2.483 +1 322 | -4.673 -7.102 -6.241 7.444 0.791 3.114 -3.723 4.474 1.969 -9.692 +1 323 | -1.231 5.115 7.698 0.514 5.929 0.493 5.744 -2.413 11.250 -2.470 -1 324 | -9.925 -3.662 -4.188 4.727 -6.516 -5.854 1.596 6.389 4.816 -0.917 -1 325 | 2.068 -6.360 6.958 0.813 0.766 -0.611 -4.261 -1.181 -8.017 -5.270 +1 326 | 5.827 5.652 4.328 -7.030 -3.499 7.953 -1.801 -2.024 3.255 -11.173 +1 327 | -2.298 -0.283 -7.065 -4.636 10.891 -1.194 -4.520 4.700 -1.253 -8.059 -1 328 | -8.464 -0.988 -9.429 5.043 2.038 -1.767 -5.178 8.256 -4.479 0.154 -1 329 | 3.820 7.769 -0.962 3.911 -6.438 -3.596 -3.348 -7.499 3.854 6.656 -1 330 | 3.752 4.612 0.478 -0.848 -7.932 -4.231 -3.040 -6.612 6.104 6.346 -1 331 | -0.238 6.392 10.185 2.541 6.064 0.649 4.526 -3.207 9.792 -2.512 +1 332 | 3.119 7.668 2.159 -1.783 -7.626 -6.007 -2.462 -5.530 6.918 6.069 +1 333 | -9.004 0.800 -10.256 5.447 4.221 -2.111 -7.422 8.896 -5.211 2.579 -1 334 | 6.017 4.119 -7.042 -9.737 1.957 0.235 1.421 3.272 -0.687 -6.584 +1 335 | -8.119 -1.111 -10.318 6.060 1.569 -2.663 -6.453 7.929 -1.783 -0.917 -1 336 | -8.488 -2.332 -2.476 5.459 -5.607 -3.834 2.266 6.382 4.527 -0.770 -1 337 | 0.568 -5.869 4.952 1.140 1.018 -1.235 -3.413 -0.855 -9.328 -7.212 +1 338 | -4.839 0.933 0.202 7.790 5.395 -3.289 1.227 -8.203 -1.413 -9.869 -1 339 | -8.760 -0.326 -10.529 1.910 -0.263 -0.929 -6.715 7.810 -2.706 1.433 -1 340 | -0.284 6.958 6.839 2.459 6.129 0.940 5.532 -3.432 8.715 -2.260 +1 341 | -1.624 6.873 9.028 -1.054 6.434 0.993 6.624 -3.529 9.804 -0.915 +1 342 | -4.248 1.071 -3.464 7.893 5.376 -0.436 0.950 -7.357 -0.028 -8.329 -1 343 | -0.641 6.610 8.746 0.039 6.665 0.197 6.678 -2.448 12.394 -1.590 -1 344 | 6.612 5.600 4.956 -5.216 -3.496 8.224 -4.414 -3.459 4.452 -8.653 +1 345 | -4.490 -8.382 -4.436 7.333 2.326 5.041 -3.457 2.344 3.055 -9.357 +1 346 | 3.573 -3.900 4.278 2.291 0.727 -0.775 -3.318 0.029 -10.289 -6.616 -1 347 | -7.434 -0.448 -9.703 4.605 0.944 -1.396 -5.677 7.732 -5.213 -0.343 -1 348 | -7.881 -0.390 -10.634 4.780 0.251 -1.846 -5.829 8.455 -2.621 -1.207 +1 349 | -2.095 -4.869 4.851 2.763 -1.397 0.990 -3.605 -0.876 -7.683 -7.807 -1 350 | -1.208 1.818 -5.719 -5.556 8.429 -1.426 -4.684 5.311 -0.041 -6.598 -1 351 | 4.152 5.008 -1.478 0.971 -8.751 -5.787 -2.270 -7.157 6.239 5.692 -1 352 | -6.884 0.455 -9.346 5.867 -0.062 -1.730 -7.300 8.937 -6.271 2.756 +1 353 | 7.938 5.203 4.191 -6.029 -5.208 7.718 -3.368 -1.079 1.903 -8.886 +1 354 | 0.202 6.971 7.583 1.801 6.539 1.742 5.523 -3.578 8.701 -2.527 +1 355 | -0.026 -6.426 6.069 3.574 -1.596 -0.553 -3.600 -1.192 -8.805 -6.646 +1 356 | 8.208 4.483 2.763 -7.481 -6.468 9.898 -3.304 -1.374 3.029 -8.775 +1 357 | 6.281 5.583 -7.503 -7.668 5.419 -0.110 0.965 4.756 3.741 -5.703 +1 358 | 2.958 4.436 1.801 2.774 -8.075 -7.866 -1.911 -6.808 5.985 5.736 -1 359 | 0.494 6.400 6.572 -1.288 6.775 0.781 5.259 -4.405 10.165 -1.104 -1 360 | -9.328 -0.660 -11.965 4.023 0.927 -1.703 -7.162 8.172 -5.045 -1.000 +1 361 | 1.316 -6.137 4.746 0.190 2.097 1.615 -5.116 -0.813 -9.433 -5.934 +1 362 | -7.972 -0.258 -9.187 4.383 1.665 -1.541 -5.607 7.869 -4.654 -0.795 +1 363 | -3.726 2.144 2.433 7.247 5.178 -0.869 1.515 -6.156 1.103 -8.318 -1 364 | -3.926 1.981 0.751 7.547 4.147 -3.003 0.429 -7.713 -0.384 -9.326 -1 365 | -9.833 1.469 -10.520 3.746 2.302 -2.616 -7.043 8.607 -5.090 -1.343 -1 366 | -8.008 0.987 -10.157 6.133 1.762 -2.381 -7.602 8.185 -3.957 0.542 +1 367 | -8.628 -1.474 -10.593 5.740 1.659 -1.961 -5.137 8.163 -5.490 -1.407 -1 368 | 6.922 5.927 -8.339 -9.354 2.921 -0.514 0.351 5.014 2.749 -7.578 +1 369 | -6.471 -8.202 -5.932 7.185 1.365 6.482 -3.754 2.655 1.143 -10.536 +1 370 | -8.547 0.538 -8.516 3.892 4.064 -1.982 -9.893 8.662 -4.898 0.557 +1 371 | -0.755 6.001 8.265 3.163 7.237 1.467 6.029 -2.435 11.385 -1.985 +1 372 | -8.460 0.426 -9.784 3.933 -0.685 -0.511 -5.004 8.049 -3.115 -0.186 +1 373 | 0.632 -5.854 5.718 2.577 -0.830 -1.964 -2.568 -0.845 -8.970 -6.272 +1 374 | -3.979 -1.884 -5.200 -5.006 7.716 0.570 -3.196 5.360 0.275 -8.040 -1 375 | 1.769 -4.502 3.941 2.974 0.850 -2.342 -4.477 -1.007 -9.208 -5.495 -1 376 | 6.772 6.387 5.414 -4.961 -3.682 8.962 -2.472 -3.286 3.529 -8.793 -1 377 | 0.667 -4.597 6.416 2.585 -0.327 0.590 -3.816 -0.565 -7.926 -6.556 +1 378 | -7.527 -1.550 -3.908 3.734 -7.522 -5.395 0.067 6.450 4.247 0.182 -1 379 | 5.737 8.264 -8.082 -9.077 1.485 0.076 2.216 4.065 2.803 -7.277 +1 380 | -0.610 5.862 6.308 -0.418 6.477 1.528 7.516 -4.328 12.051 -2.959 +1 381 | 5.819 5.359 -8.457 -9.039 3.058 -0.149 -0.066 4.331 2.433 -5.632 +1 382 | 2.329 4.223 2.504 4.245 -6.482 -6.946 -3.359 -7.289 8.628 7.143 -1 383 | -7.764 -0.188 -9.481 5.430 2.069 -1.953 -9.165 7.921 -5.494 1.391 +1 384 | -4.437 2.035 1.359 6.898 5.199 -2.787 0.872 -6.967 -1.535 -8.368 -1 385 | -4.284 -6.381 -3.255 8.574 0.156 4.492 -3.875 2.238 2.520 -11.350 -1 386 | -1.778 0.907 -4.992 -5.202 8.714 -0.654 -5.289 3.590 -1.355 -7.578 +1 387 | -8.197 -1.338 -11.017 5.257 3.611 -2.895 -7.278 8.837 -5.702 -0.386 -1 388 | 5.100 6.107 -8.469 -9.301 2.879 0.466 -0.628 4.598 1.798 -7.366 +1 389 | 2.383 5.946 1.216 2.582 -6.973 -5.847 -2.141 -7.296 4.821 6.592 +1 390 | 7.976 6.017 5.573 -5.967 -3.845 9.890 -3.643 -1.808 1.963 -10.200 +1 391 | -1.979 0.744 -7.093 -6.453 6.408 0.582 -4.910 2.731 -0.183 -6.814 -1 392 | -8.257 -1.609 -1.211 4.823 -6.166 -5.782 3.521 6.115 4.950 -0.718 +1 393 | 6.332 4.735 3.987 -7.360 -4.863 9.147 -3.768 -2.522 2.117 -7.705 +1 394 | -4.020 -5.722 -6.070 8.781 1.662 6.528 -3.530 3.511 3.025 -9.252 +1 395 | 3.697 6.262 0.258 1.851 -7.757 -7.813 -2.241 -7.290 3.711 6.558 -1 396 | -0.916 5.350 7.704 3.578 7.424 1.908 4.965 -3.517 8.692 -3.968 -1 397 | -4.951 -5.591 -6.133 6.554 1.579 5.645 -3.659 4.618 3.405 -7.887 +1 398 | -9.371 -1.341 -10.310 5.209 1.225 -2.096 -6.725 8.471 -2.507 1.249 +1 399 | 7.389 4.864 6.078 -5.094 -5.306 10.265 -4.797 -1.599 3.644 -10.225 +1 400 | -1.399 7.345 7.467 1.183 6.794 2.489 5.417 -3.007 11.262 -2.548 +1 401 | -2.602 -6.676 -3.787 6.973 0.450 6.402 -4.174 3.608 1.688 -10.028 +1 402 | 6.315 5.439 -6.615 -8.930 2.978 0.236 0.837 3.567 0.473 -5.990 +1 403 | 5.748 4.537 4.808 -7.896 -4.014 8.878 -5.360 -4.223 2.137 -10.975 +1 404 | 2.815 3.914 0.924 5.112 -6.903 -3.916 -2.724 -6.768 4.858 6.134 -1 405 | -3.186 1.439 -0.112 8.496 5.587 0.599 1.068 -7.244 -0.673 -7.200 -1 406 | -2.985 0.269 -7.147 -7.062 8.975 0.642 -4.198 4.277 -2.460 -6.809 +1 407 | 6.723 5.015 -7.313 -9.461 4.491 1.127 0.601 3.618 2.225 -6.731 +1 408 | -2.246 1.846 -6.451 -4.152 6.912 1.940 -5.935 3.524 -1.011 -7.674 -1 409 | -8.189 -1.936 -4.421 4.364 -5.891 -4.757 1.969 8.846 4.819 0.950 +1 410 | -7.687 -1.925 -1.511 4.249 -7.881 -4.754 2.566 5.584 4.285 -2.591 -1 411 | -1.923 6.050 9.150 2.309 6.374 2.151 6.195 -3.032 8.458 -3.974 -1 412 | -9.278 0.730 -9.168 4.078 1.689 -2.007 -6.508 8.681 -4.528 -0.626 +1 413 | -3.877 -6.702 -1.450 6.562 -0.008 6.456 -3.929 1.701 2.333 -10.360 -1 414 | -2.772 -7.337 -5.405 7.204 1.395 2.610 -4.148 3.663 2.222 -10.547 +1 415 | 8.426 7.263 4.370 -4.918 -4.880 6.835 -4.631 -1.674 4.287 -9.623 +1 416 | 2.848 4.179 -0.776 0.559 -6.803 -5.993 -1.998 -6.045 5.069 6.948 -1 417 | -2.346 1.325 -6.775 -5.354 8.871 -0.223 -4.730 4.925 -1.592 -9.209 -1 418 | -1.107 1.362 -6.235 -4.760 9.868 -0.127 -4.339 3.887 0.213 -5.986 -1 419 | -3.255 -1.193 -4.581 -4.750 9.188 -0.423 -5.169 4.176 -1.287 -8.773 +1 420 | 5.406 4.151 -7.419 -9.629 2.839 -0.320 -0.233 3.257 2.704 -7.679 +1 421 | 6.157 4.818 3.014 -4.903 -5.024 7.776 -3.339 -2.608 4.076 -9.535 -1 422 | 7.333 5.287 3.969 -6.134 -4.687 10.958 -5.056 -2.990 3.148 -10.202 +1 423 | -8.978 -1.820 -0.298 3.822 -7.972 -6.398 -0.887 6.526 3.443 -2.939 +1 424 | 7.011 6.197 -8.591 -10.281 4.432 -0.017 0.642 3.896 2.700 -6.540 -1 425 | -4.508 -5.564 -5.692 7.597 1.672 5.899 -4.852 3.051 3.326 -11.336 -1 426 | 3.501 3.104 2.915 3.175 -7.127 -6.297 -0.532 -7.251 4.377 5.666 -1 427 | -2.325 5.828 6.973 1.041 6.759 -0.008 7.280 -2.373 10.188 -1.857 +1 428 | -5.585 2.139 1.743 9.007 5.491 -0.526 1.359 -6.622 -1.792 -8.227 -1 429 | -7.238 -0.777 -10.542 2.703 3.809 -2.191 -5.534 8.675 -5.124 0.639 +1 430 | 6.985 7.546 4.855 -5.368 -5.449 8.630 -5.245 -4.093 3.327 -10.559 +1 431 | -3.400 1.947 -5.206 -7.941 7.299 1.601 -4.356 4.536 -0.094 -7.254 -1 432 | -8.155 -0.913 -3.295 3.759 -7.889 -7.449 0.437 7.043 4.393 -0.387 -1 433 | 8.029 6.076 4.258 -5.840 -3.561 11.237 -1.282 -0.491 2.298 -10.000 -1 434 | 2.325 5.310 2.094 -0.131 -6.055 -6.391 -2.123 -6.335 4.471 5.868 -1 435 | -3.616 3.189 -1.364 7.966 4.498 -2.038 1.288 -7.363 -1.927 -7.496 -1 436 | 4.172 5.403 3.183 -3.629 -5.957 6.617 -2.215 -1.497 3.177 -9.423 +1 437 | 2.516 -4.749 5.890 -0.964 0.003 -0.618 -5.018 -1.794 -8.870 -6.777 +1 438 | 2.766 -5.686 5.495 2.996 -0.256 -4.395 -3.589 -1.377 -7.795 -6.549 -1 439 | -2.631 5.882 7.515 0.171 6.128 2.501 4.804 -3.091 9.687 -2.307 -1 440 | -3.072 1.116 0.890 7.300 5.129 -2.118 1.017 -8.358 -0.821 -8.987 +1 441 | 4.481 6.090 -8.761 -9.427 3.241 -0.785 0.025 3.014 1.769 -5.664 +1 442 | -5.767 -6.401 -4.456 9.152 2.037 5.517 -3.659 3.525 2.468 -8.503 +1 443 | -5.254 1.210 1.970 8.391 4.020 0.414 0.141 -7.362 0.599 -6.180 +1 444 | -8.983 -1.921 -5.615 4.616 -6.521 -5.778 2.550 8.245 4.620 -1.686 +1 445 | 1.679 3.560 1.378 4.901 -7.076 -5.048 0.477 -6.998 4.761 5.753 -1 446 | 2.089 5.739 0.042 2.626 -7.375 -4.447 -1.055 -5.947 5.800 6.124 +1 447 | -7.341 -0.473 -9.853 3.150 2.075 -0.963 -5.240 8.133 -3.166 -0.790 +1 448 | -4.700 1.732 -0.097 7.839 4.195 -0.433 1.494 -7.624 0.768 -8.633 -1 449 | -7.245 -2.650 -5.203 5.113 -7.133 -6.440 1.239 6.388 5.199 -3.472 +1 450 | -4.696 -7.269 -2.809 8.128 -0.704 4.495 -3.487 3.535 1.141 -8.692 +1 451 | -0.868 7.298 8.406 1.586 6.952 1.469 4.918 -3.297 9.685 -3.719 -1 452 | 2.613 7.404 0.743 7.343 -9.070 -5.751 -0.347 -6.257 4.654 6.002 +1 453 | -2.196 0.028 -6.102 -5.211 9.192 1.353 -5.916 5.258 -0.831 -7.554 +1 454 | -8.562 -0.564 -9.900 2.051 2.561 -2.540 -7.320 8.916 -5.033 -0.524 -1 455 | -9.210 0.386 -10.007 2.605 1.789 -1.914 -6.501 8.372 -3.171 -0.411 +1 456 | 1.951 2.295 1.248 4.659 -6.449 -4.260 -1.396 -7.164 4.046 6.434 -1 457 | -1.237 5.728 9.585 3.185 7.608 1.700 4.164 -2.510 10.019 -3.060 -1 458 | 1.597 -5.717 6.306 1.471 -1.603 -2.057 -4.464 -1.597 -9.434 -6.684 +1 459 | -2.153 6.185 3.448 3.323 7.187 2.643 5.308 -2.746 8.950 -2.385 -1 460 | -1.791 7.605 7.723 0.477 7.137 2.647 6.859 -2.804 7.514 -2.502 +1 461 | 5.720 4.738 4.487 -6.074 -5.531 7.986 -4.861 -1.036 4.094 -10.520 -1 462 | -2.936 0.568 -7.030 -4.138 9.549 0.657 -7.552 2.945 -1.030 -7.144 -1 463 | -2.563 1.219 -7.322 -5.342 8.055 0.346 -4.104 5.030 0.676 -7.178 +1 464 | -1.262 6.264 6.586 2.458 7.020 2.959 7.156 -3.504 11.479 -4.577 +1 465 | -1.450 4.701 8.758 0.442 7.081 1.549 5.830 -2.573 9.644 -1.075 +1 466 | -4.310 -3.951 -6.428 6.515 0.257 6.089 -4.209 3.556 3.275 -9.162 +1 467 | -3.802 0.672 0.460 7.826 4.997 -2.422 0.926 -7.143 1.544 -8.899 -1 468 | -8.085 -0.869 -2.374 3.753 -9.175 -4.758 3.058 5.989 5.243 -1.452 +1 469 | 1.179 -6.293 7.136 0.532 -0.564 -0.843 -3.095 -0.569 -8.908 -5.705 -1 470 | 0.966 -5.909 5.677 2.280 -0.392 -1.216 -2.872 -1.513 -9.137 -5.727 -1 471 | -3.207 -6.391 -5.834 7.625 1.163 5.058 -4.957 4.912 2.896 -10.725 -1 472 | -8.618 -2.132 -2.646 4.822 -4.471 -5.147 0.086 6.935 4.021 -2.423 -1 473 | -2.061 -7.528 -3.036 8.384 0.205 4.713 -3.894 3.233 1.870 -9.282 +1 474 | 1.507 -4.302 4.671 4.159 -0.156 -0.131 -2.503 0.098 -9.331 -6.900 -1 475 | 5.590 5.708 -7.211 -9.058 3.256 -0.523 1.965 3.600 0.270 -7.850 -1 476 | -0.821 4.742 8.941 1.871 6.962 1.244 5.879 -2.716 8.907 -0.988 -1 477 | -9.116 0.000 -9.557 5.756 -2.005 -2.094 -7.088 8.408 -5.555 -1.148 +1 478 | -9.413 -2.707 -1.989 5.296 -8.427 -2.332 1.210 5.444 4.498 -1.140 +1 479 | -5.347 1.926 4.281 8.679 5.759 0.256 1.008 -7.487 -1.273 -8.359 +1 480 | 5.101 5.380 -6.950 -9.824 3.834 0.709 2.500 4.561 3.119 -6.207 +1 481 | -9.261 -0.965 -11.776 3.295 1.867 -0.843 -7.291 8.857 -3.044 -2.438 +1 482 | -9.510 0.413 -10.184 5.107 0.182 0.065 -8.695 9.636 -7.060 -0.664 -1 483 | 0.852 -5.187 3.388 6.810 0.153 -1.193 -3.296 0.122 -8.288 -6.800 -1 484 | -1.289 5.309 7.769 1.985 6.546 1.607 5.015 -2.370 10.150 -4.343 -1 485 | 3.821 7.446 0.699 1.459 -6.876 -6.498 -3.382 -7.236 6.155 6.578 -1 486 | -2.053 6.469 7.628 0.997 7.178 0.076 7.634 -3.355 8.093 -2.637 +1 487 | 2.731 -3.958 4.793 3.704 -1.543 -2.425 -4.486 -0.669 -8.239 -5.808 -1 488 | 0.901 -5.275 3.259 2.488 -1.280 -2.095 -2.361 -0.094 -9.157 -5.251 -1 489 | -2.324 -5.869 -4.172 6.665 1.345 6.075 -3.961 2.915 2.340 -9.644 +1 490 | -3.868 -6.295 -5.345 7.480 2.391 4.763 -4.843 2.769 2.530 -10.609 +1 491 | -1.337 6.944 8.291 2.470 7.046 2.190 6.909 -1.755 8.896 -0.916 +1 492 | 6.859 4.944 5.298 -5.680 -7.393 9.021 -2.919 -2.478 2.822 -9.414 +1 493 | -4.426 -8.947 -3.580 7.756 1.233 5.047 -3.381 2.917 3.072 -10.570 -1 494 | 7.414 6.362 4.286 -5.987 -5.187 13.042 -3.300 -1.014 2.268 -11.366 +1 495 | 4.024 4.688 1.802 1.577 -7.680 -3.861 -4.391 -6.246 6.351 6.383 -1 496 | -4.915 -6.687 -2.742 7.010 1.958 4.891 -4.081 2.878 -0.622 -10.494 +1 497 | 5.312 3.225 -7.290 -10.544 1.038 1.133 0.454 3.726 2.000 -5.998 -1 498 | -5.886 2.038 -1.136 7.281 5.380 -2.020 1.388 -7.267 -2.814 -8.637 -1 499 | -4.278 -6.023 -4.698 7.758 0.745 3.107 -3.645 3.498 1.437 -9.395 +1 500 | 5.554 7.105 3.943 -5.502 -4.628 8.411 -4.894 -1.375 3.913 -9.855 +1 501 | -------------------------------------------------------------------------------- /quiz2/quiz2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/li-xin-yi/machine_learning_techniques/a677ccb023faa16dd04841f91bbcafbf13b399b9/quiz2/quiz2.pdf -------------------------------------------------------------------------------- /quiz2/quiz2.tex: -------------------------------------------------------------------------------- 1 | \documentclass[a4paper,10pt]{exam} 2 | \usepackage{amsfonts} 3 | \usepackage{amsmath} 4 | \usepackage{latexsym} 5 | \usepackage[colorlinks,linkcolor=blue,anchorcolor=blue,citecolor=green]{hyperref} 6 | \lhead{} 7 | \chead{Quiz 2} 8 | \rhead{} 9 | \title{Homework of Machine Learning Techniques: Quiz 2} 10 | \date{} 11 | \author{} 12 | \pagestyle{headandfoot} 13 | \headrule 14 | % \printanswers 15 | 16 | \begin{document} 17 | \maketitle 18 | \begin{questions} 19 | \question Recall that the probabilistic SVM is based on solving the following optimization problem: 20 | \[\mathop {\min }\limits_{A,B} F(A,B) = \frac{1}{N}\sum\limits_{n = 1}^N {\ln } \left( {1 + \exp \left( { - {y_n}\left( {A \cdot ({\bf{w}}_{svm}^T\phi ({{\bf{x}}_n}) + {b_{svm}}) + B} \right)} \right)} \right)\] 21 | When using the gradient descent for minimizing $F(A,B)$, we need to compute the gradient first. $z_n = \mathbf{w}_{svm}^T \mathbf{\phi}(\mathbf{x}_n)+b_{svm}$, and $p_n = \theta(-y_n( A z_n + B))$, where $\theta(s) = \frac{\exp(s)}{1+\exp(s)}$ is the usual logistic function. What is the gradient $\nabla F(A,B)$? 22 | \begin{checkboxes} 23 | \CorrectChoice $\frac{1}{N}\sum_{n=1}^N [-y_np_nz_n, -y_np_n ]^T$ 24 | \choice $\frac{1}{N}\sum_{n=1}^N [ -y_np_nz_n, +y_np_n ]^T$ 25 | \choice $\frac{1}{N}\sum_{n=1}^N [ +y_np_nz_n, -y_np_n ]^T$ 26 | \choice $\frac{1}{N}\sum_{n=1}^N [ +y_np_nz_n, +y_np_n ]^T$ 27 | \choice none of the other choices\\ 28 | \end{checkboxes} 29 | 30 | \question When using the Newton method for minimizing $F(A,B)$ (see Homework 3 of Machine Learning Foundations), we need to compute $-(H(F))^{-1}\nabla F $ in each iteration, where $H(F)$ is the Hessian matrix of $F$ at $(A,B)$. Following the notations of Question 1, what is $H(F)$? 31 | \begin{checkboxes} 32 | \choice $\frac{1}{N}\sum\limits_{n = 1}^N {\left[ {\begin{array}{*{20}{c}} 33 | {z_n^2{y_n}(1 - {p_n})} & {{z_n}{y_n}(1 - {p_n})} \\ 34 | {{z_n}{y_n}(1 - {p_n})} & {{y_n}(1 - {p_n})} 35 | \end{array}} \right]} $ 36 | 37 | \choice $\frac{1}{N}\sum\limits_{n = 1}^N {\left[ {\begin{array}{*{20}{c}} 38 | {z_n^2{p_n}(1 - {y_n})} & {{z_n}{p_n}(1 - {y_n})} \\ 39 | {{z_n}{p_n}(1 - {y_n})} & {{p_n}(1 - {y_n})} 40 | \end{array}} \right]} $ 41 | 42 | \choice none of the other choices 43 | \CorrectChoice $\frac{1}{N}\sum\limits_{n = 1}^N {\left[ {\begin{array}{*{20}{c}} 44 | {z_n^2{p_n}(1 - {p_n})} & {{z_n}{p_n}(1 - {p_n})} \\ 45 | {{z_n}{p_n}(1 - {p_n})} & {{p_n}(1 - {p_n})} 46 | \end{array}} \right]} $ 47 | \choice $\frac{1}{N}\sum\limits_{n = 1}^N {\left[ {\begin{array}{*{20}{c}} 48 | {z_n^2{y_n}(1 - {y_n})} & {{z_n}{y_n}(1 - {y_n})} \\ 49 | {{z_n}{y_n}(1 - {y_n})} & {{y_n}(1 - {y_n})} 50 | \end{array}} \right]} $\\ 51 | \end{checkboxes} 52 | 53 | \question Recall that $N$ is the size of the data set and $d$ is the dimensionality of the input space. What is the size of matrix that gets inverted in kernel ridge regression? 54 | \begin{checkboxes} 55 | \choice $d \times d$ 56 | \CorrectChoice $N \times N$ 57 | \choice $Nd \times Nd$ 58 | \choice $N^2 \times N^2$ 59 | \choice none of the other choices\\ 60 | \end{checkboxes} 61 | 62 | \question The usual support vector regression model solves the following optimization problem. 63 | \[({P_1})\mathop {\min }\limits_{_{b,{\bf{w}},{{\bf{\xi }}^ \vee },{{\bf{\xi }}^ \wedge }}} \frac{1}{2}{{\bf{w}}^T}{\bf{w}} + C\sum\limits_{n = 1}^N {\left( {\xi _n^ \vee + \xi _n^ \wedge } \right)}\]\[ {\rm{ s}}.{\rm{t}}. - - \xi _n^ \vee \le {y_n} - {{\bf{w}}^T}\phi ({{\bf{x}}_n}) - b \le \epsilon + \xi _n^ \wedge \xi _n^ \vee \ge 0,\xi _n^ \wedge \ge 0.\] 64 | Usual support vector regression penalizes the violations $\xi^\vee_n$ and $\xi^\wedge_n$linearly. Another popular formulation, called $l_2$ loss support vector regression in $(P2)$, penalizes the violations quadratically, just like the $l_2$ loss SVM introduced in Homework 1 of Machine Learning Techniques. 65 | \[({P_2})\mathop {\min }\limits_{b,{\bf{w}},{{\bf{\xi }}^ \vee },{{\bf{\xi }}^ \wedge }} \frac{1}{2}{{\bf{w}}^T}{\bf{w}} + C\sum\limits_{n = 1}^N {\left( {{{(\xi _n^ \vee )}^2} + {{(\xi _n^ \wedge )}^2}} \right)} \] 66 | \[{\rm{s}}.{\rm{t}}. - - \xi _n^ \vee \le {y_n} - {{\bf{w}}^T}\phi ({{\bf{x}}_n}) - b \le \epsilon + \xi _n^ \wedge .\] 67 | Which of the following is an equivalent `unconstrained' form of $(P2)$? 68 | 69 | \begin{checkboxes} 70 | \choice none of the other choices 71 | \choice $\min_{b, \mathbf{w}} \frac{1}{2} \mathbf{w}^T \mathbf{w} + C \sum_{n=1}^N (|y_n - \mathbf{w}^T \mathbf{\phi}(\mathbf{x}_n) - b| - \epsilon)^2$ 72 | \CorrectChoice $\min_{b, \mathbf{w}} \frac{1}{2} \mathbf{w}^T \mathbf{w} + C \sum_{n=1}^N (\max(0, |y_n - \mathbf{w}^T \mathbf{\phi}(\mathbf{x}_n) - b| - \epsilon))^2$\ 73 | \choice $\min_{b, \mathbf{w}} \frac{1}{2} \mathbf{w}^T \mathbf{w} + C \sum_{n=1}^N (\max(\epsilon, |y_n - \mathbf{w}^T \mathbf{\phi}(\mathbf{x}_n) - b| ))^2$ 74 | \choice $\min_{b, \mathbf{w}} \frac{1}{2} \mathbf{w}^T \mathbf{w} + C \sum_{n=1}^N (y_n - \mathbf{w}^T \mathbf{\phi}(\mathbf{x}_n) - b)^2$\\ 75 | \end{checkboxes} 76 | 77 | \question By a slight modification of the representer theorem presented in the class, the optimal $\mathbf{w}_*$ for $(P2)$ must satisfy $\mathbf{ w}_* = \sum_{n=1}^N \beta_n \mathbf{z}_n$. We can substitute the form of the optimal $\mathbf{w}_*$ into the answer in Question 4 to derive an optimization problem that contains $\mathbf{\beta}$ (and $b$) only, which would look like 78 | \[\mathop {\min }\limits_{_{b,{\bf{\beta }}}} F(b,{\bf{\beta }}) = \frac{1}{2}\sum\limits_{m = 1}^N {\sum\limits_{n = 1}^N {{\beta _n}} } {\beta _m}K({{\bf{x}}_n},{{\bf{x}}_m}) + {\rm{ something }},\] 79 | where $K(\mathbf{x}_n, \mathbf{x}_m) = (\mathbf{\phi}(\mathbf{x}_n))^T (\mathbf{\phi}(\mathbf{x}_m))$ is the kernel function. One thing that you should see is that $F(b,\beta)$ is differentiable to $\beta_n$ (and $b$) and hence you can use gradient descent to solve for the optimal $\beta$. For any $\beta$, let $s_n = \sum_{m=1}^N \beta_m K(\mathbf{x}_n, \mathbf{x}_m) + b$. What is $\frac{\partial F(b, \mathbf{\beta} )}{\partial \beta_m}$? 80 | 81 | \begin{checkboxes} 82 | 83 | \CorrectChoice $\sum_{n=1}^N \beta_n K(\mathbf{x}_n, \mathbf{x}_m) - 2 C \sum_{n=1}^N [|y_n - s_n| \ge \epsilon] (|y_n - s_n| - \epsilon) \mbox{sign}(y_n - s_n) K(\mathbf{x}_n, \mathbf{x}_m)$ 84 | 85 | \choice $\sum_{n=1}^N \beta_n K(\mathbf{x}_n, \mathbf{x}_m) + 2 C \sum_{n=1}^N [|y_n - s_n| \ge \epsilon] (|y_n - s_n| - \epsilon) \mbox{sign}(y_n - s_n) K(\mathbf{x}_n, \mathbf{x}_m)$ 86 | 87 | \choice $\sum_{n=1}^N \beta_n K(\mathbf{x}_n, \mathbf{x}_m) - 2 C \sum_{n=1}^N [|y_n - s_n| \le \epsilon] (|y_n - s_n| - \epsilon) \mbox{sign}(y_n - s_n) K(\mathbf{x}_n, \mathbf{x}_m)$ 88 | 89 | \choice $\sum_{n=1}^N \beta_n K(\mathbf{x}_n, \mathbf{x}_m) + 2 C \sum_{n=1}^N [|y_n - s_n| \le \epsilon] (|y_n - s_n| - \epsilon) \mbox{sign}(y_n - s_n) K(\mathbf{x}_n, \mathbf{x}_m)$ 90 | 91 | \choice none of the other choices\\ 92 | 93 | \end{checkboxes} 94 | 95 | \question Consider $T+1$ hypotheses $g_0, g_1, \cdots, g_T$. Let $g_0(\mathbf{x}) = 0$ for all $x$. Assume that your boss holds a test set $\{(\tilde{\mathbf{x}}_m, \tilde{y}_m)\}_{m=1}^M$, where you know $\tilde{\mathbf{x}}_m$ but $\tilde{\mathbf{y}}_m$ is hidden. Nevertheless, you are allowed to know the squared test error ${E_{{\rm{test}}}}({g_t}) = \frac{1}{M}\sum\limits_{m = 1}^M {({g_t}(} {\widetilde {\bf{x}}_m}) - {{\tilde y}_m}{)^2} = {e_t}$ for $t = 0, 1,2, \cdots, T$. Also, assume that $\frac{1}{M} \sum_{m=1}^M (g_t(\tilde{\mathbf{x}}_m))^2 = s_t$. Which of the following allows you to calculate $\sum_{m=1}^M g_t(\tilde{\mathbf{x}}_m) \tilde{y}_m$? Note that the calculation is the key to the test set blending technique that the NTU team has used in KDDCup2011. 96 | 97 | \begin{checkboxes} 98 | \choice $\frac{M}{2} (-e_0 - s_t + e_t)$ 99 | \choice $\frac{M}{2} (+e_0 - s_t + e_t)$ 100 | \CorrectChoice $\frac{M}{2} (+e_0 + s_t - e_t)$ 101 | \choice none of the other choices 102 | \choice $\frac{M}{2} (-e_0 + s_t - e_t)$\\ 103 | \end{checkboxes} 104 | 105 | \question Consider the case where the target function $f : [0 ,1] \to \mathbb{R}$ is given by $f(x) = x^2$ and the input probability distribution is uniform on $[0,1]$. Assume that the training set has only two examples generated independently from the input probability distribution and noiselessly by $f$, and the learning model is usual linear regression that minimizes the mean squared error within all hypotheses of the form $h(x) = w_1 x + w_0$. What is $\bar{g}(x)$, the expected value of the hypothesis that the learning algorithm produces (see Page 10 of Lecture 207)? 106 | 107 | \begin{checkboxes} 108 | 109 | \choice $\bar{g}(x) = 2 x - \frac{1}{2}$ 110 | \choice $\bar{g}(x) = 2 x + \frac{1}{2}$ 111 | \CorrectChoice $\bar{g}(x) = x - \frac{1}{4}$ 112 | \choice $\bar{g}(x) = x + \frac{1}{4}$ 113 | \choice none of the other choices\\ 114 | 115 | \end{checkboxes} 116 | 117 | \question Assume that linear regression (for classification) is used within AdaBoost. That is, we need to solve the weighted-$E_{in}$ optimization problem 118 | \[\mathop {\min }\limits_{\bf{w}} E_{in}^{\bf{u}}({\bf{w}}) = \frac{1}{N}\sum\limits_{n = 1}^N {{u_n}} {({y_n} - {{\bf{w}}^T}{{\bf{x}}_n})^2}.\] 119 | 120 | \begin{checkboxes} 121 | \choice none of the other choices 122 | \CorrectChoice $(\sqrt{u_n} \mathbf{x}_n, \sqrt{u_n} y_n)$ 123 | \choice $(u_n^{-2} \mathbf{x}_n, u_n^{-2} y_n)$ 124 | \choice $(u_n^2 \mathbf{x}_n, u_n^2 y_n)$ 125 | \choice $(u_n \mathbf{x}_n, u_n y_n)$\\ 126 | \end{checkboxes} 127 | 128 | \question Consider applying the AdaBoost algorithm on a binary classification data set where 99\% of the examples are positive. Because there are so many positive examples, the base algorithm within AdaBoost returns a constant classifier $g_1(\mathbf{x}) = +1$ in the first iteration. Let $u_+^{(2)}$ be the individual example weight of each positive example in the second iteration, and $u_-^{(2)}$ be the individual example weight of each negative example in the second iteration. What is $u_+^{(2)} / u_-^{(2)}$? 129 | 130 | \begin{checkboxes} 131 | \choice none of the other choices 132 | \choice 1/100 133 | \CorrectChoice 1/99 134 | \choice 100 135 | \choice 99\\ 136 | \end{checkboxes} 137 | 138 | \question When talking about non-uniform voting in aggregation, we mentioned that $\alpha$ can be viewed as a weight vector learned from any linear algorithm coupled with the following transform: 139 | \[\mathbf{\phi}(\mathbf{x}) = \Bigl(g_1(\mathbf{x}), g_2(\mathbf{x}), \cdots, g_T(\mathbf{x})\Bigr).\] 140 | When studying kernel models, we mentioned that the kernel is simply a computational short-cut for the inner product $(\mathbf{\phi}(\mathbf{x}))^T (\mathbf{\phi}(\mathbf{x}'))$. In this problem, we mix the two topics together using the decision stumps as our $g_t(\mathbf{x})$. 141 | 142 | Assume that the input vectors contain only integers between (including) $L$ and $R$. 143 | \[{g_{s,i,\theta }}({\bf{x}}) = s \cdot {\rm{sign}}({x_i} - \theta ),{\rm{ }}\] 144 | \[{\mbox{where }}i \in \{ 1,2, \cdots ,d\} ,d{\mbox{ is the finite dimensionality of the input space}},\] 145 | \[s \in \{ - 1, + 1\} ,\theta \in \mathbb{R} ,{\mbox{ and sign}}(0) = + 1\] 146 | Two decision stumps $g$ and $\hat{g}$ are defined as the same if $g(\mathbf{x}) = \hat{g}(\mathbf{x})$ for every $\mathbf{x} \in \mathcal{X}$ Two decision stumps are different if they are not the same. Which of the followings are true? 147 | 148 | \begin{checkboxes} 149 | \choice The number of different decision stumps equals the size of $\mathcal{X}$ 150 | \choice $\mathcal{X}$ is of infinite size 151 | \CorrectChoice There are 22 different decision stumps for the case of $d=2$, $L=1$, and $R=6$ 152 | \CorrectChoice $g_{+1, 1, L-1}$ is the same as $g_{-1, 3, R+1}$ 153 | \CorrectChoice $g_{s, i, \theta}$ is the same as $g_{s, i, \rm{ceiling}(\theta)}$, where ceiling($\theta$) is the smallest integer that is greater than or equal to $\theta$ 154 | \end{checkboxes} 155 | 156 | \question Continuing from the previous question, let $\mathcal{G} = \{$ all different decision stumps for $\mathcal{X} \}$ and enumerate each hypothesis $g \in \mathcal{G}$ by some index $t$. Define 157 | \[\mathbf{\phi}_{ds}(\mathbf{x}) = \Biggl(g_1(\mathbf{x}), g_2(\mathbf{x}), \cdots, g_t(\mathbf{x}), \cdots, g_{|\mathcal{G}|}(\mathbf{x})\Biggr).\] 158 | Derive a simple equation that evaluates $K_{ds}(\mathbf{x}, \mathbf{x}') = (\mathbf{\phi}_{ds}(\mathbf{x}))^T (\mathbf{\phi}_{ds}(\mathbf{x}'))$ efficiently. Which of the following equation is correct? Here $\|\mathbf{v}\|_1$ denotes the one-norm of $\mathbf{v}$. 159 | 160 | \begin{checkboxes} 161 | \choice $K_{ds}(\mathbf{x}, \mathbf{x}') = 2d(R-L) - 4\|\mathbf{x}-\mathbf{x}'\|_1 - 2$ 162 | \choice none of the other choices 163 | \CorrectChoice $K_{ds}(\mathbf{x}, \mathbf{x}') = 2d(R-L) - 4\|\mathbf{x}-\mathbf{x}'\|_1 + 2$ 164 | \choice $K_{ds}(\mathbf{x}, \mathbf{x}') = d(R-L) - 2\|\mathbf{x}-\mathbf{x}'\|_1 - 2$ 165 | \choice $K_{ds}(\mathbf{x}, \mathbf{x}') = d(R-L) - 2\|\mathbf{x}-\mathbf{x}'\|_1 + 2$\\ 166 | \end{checkboxes} 167 | 168 | \question For Questions \ref{question@12}-\ref{question@18} implement the AdaBoost-Stump algorithm as introduced in Lecture 208. Run the algorithm on the following set for training: $\href{https://d396qusza40orc.cloudfront.net/ntumltwo/hw2_data/hw2_adaboost_train.dat}{hw2\_adaboost\_train.dat}$ and the following set for testing: $\href{https://d396qusza40orc.cloudfront.net/ntumltwo/hw2_data/hw2_adaboost_test.dat}{adaboost\_test.dat}$ 169 | 170 | Use a total of $T=300$ iterations (please do not stop earlier than 300), and calculate $E_\text{in}$ and $E_\text{out}$ with the 0/1 error. 171 | 172 | For the decision stump algorithm, please implement the following steps. Any ties can be arbitrarily broken. 173 | \begin{enumerate} 174 | \item For any feature $i$, sort all the $x_{n, i}$ values to $x_{[n], i}$ such that $x_{[n], i} \le x_{[n+1], i}$. 175 | \item Consider thresholds within $-\infty$ and all the midpoints $\frac{x_{[n], i} + x_{[n+1], i}}{2}$. Test those thresholds with $s \in \{-1, +1\}$ to determine the best $(s,\theta)$ combination that minimizes $E_{in}^u$ using feature $i$. 176 | \item Pick the best $(s,i,\theta)$ combination by enumerating over all possible $i$. 177 | 178 | \end{enumerate} 179 | For those interested, Step 2 can be carried out in $O(N)$ time only!! 180 | 181 | Which of the following is true about $E_{in}(g_1)$? 182 | 183 | \begin{checkboxes} 184 | \CorrectChoice $0.2 \le E_{in}(g_1) < 0.3$ 185 | \choice $E_{in}(g_1) > 0.3$ 186 | \choice $E_{in}(g_1) = 0$ 187 | \choice 0 188 | \choice $0.1 \le E_{in}(g_1) < 0.2$\\ 189 | \end{checkboxes} 190 | 191 | \question Which of the following is true about $E_{in}(G)$? 192 | 193 | \begin{checkboxes} 194 | \choice $0.1 \le E_{in}(G) < 0.2$ 195 | \choice $0.2 \le E_{in}(G) < 0.3$ 196 | \choice $E_{in}(G) > 0.3$ 197 | \choice $0 < E_{in}(G) < 0.1$ 198 | \CorrectChoice $E_{in}(G) = 0$\\ 199 | \end{checkboxes} 200 | 201 | \question Let $U_t = \sum_{n=1}^N u_n^{(t)}$. Which of the following is true about $U_2$? (note that $U_1=1$) 202 | 203 | \begin{checkboxes} 204 | \choice $U_2 = 0$ 205 | \choice $0 < U_2 < 0.1$ 206 | \choice $0.1 \le U_2 < 0.2$ 207 | \choice $0.2 \le U_2 < 0.3$ 208 | \CorrectChoice $U_2 > 0.3$\\ 209 | \end{checkboxes} 210 | 211 | \question Which of the following is true about $U_T$? 212 | 213 | \begin{checkboxes} 214 | \choice $U_T = 0$ 215 | \CorrectChoice $0 < U_T < 0.1$ 216 | \choice $0.1 \le U_T < 0.2$ 217 | \choice $0.2 \le U_T < 0.3$ 218 | \choice $U_T > 0.3$\\ 219 | \end{checkboxes} 220 | 221 | \question Which is the following is true about the minimum value of $\epsilon_t$ within $t = 1, 2, \cdots, 300$? 222 | 223 | \begin{checkboxes} 224 | \choice $0 < \mbox{value} < 0.1$ 225 | \choice $\mbox{value} > 0.3$ 226 | \choice $\mbox{value} = 0$ 227 | \CorrectChoice $\mbox{value} = 0$ 228 | \choice $0.2 \le \mbox{value} < 0.3$ \\ 229 | \end{checkboxes} 230 | 231 | \question Calculate $E_{out}$ with the test set. Which of the following is true about $E_{out}(g_1)$? 232 | 233 | \begin{checkboxes} 234 | \CorrectChoice $0.2 \le E_{out}(g_1) < 0.3$ 235 | \choice $E_{out}(g_1) > 0.3$ 236 | \choice $0 < E_{out}(g_1) < 0.1$ 237 | \choice $0.1 \le E_{out}(g_1) < 0.2$ 238 | \choice $E_{out}(g_1) = 0$\\ 239 | \end{checkboxes} 240 | 241 | \question Which of the following is true about $E_{out}(G)$?\label{q18} 242 | 243 | \begin{checkboxes} 244 | \CorrectChoice $0.1 \le E_{out}(G) < 0.2$ 245 | \choice $0 < E_{out}(G) < 0.1$ 246 | \choice $E_{out}(G) = 0$ 247 | \choice $E_{out}(G) > 0.3$ 248 | \choice $0.2 \le E_{out}(G) < 0.3$ 249 | \end{checkboxes} 250 | 251 | \question Write a program to implement the kernel ridge regression algorithm from Lecture 206, and use it for classification (i.e. implement LSSVM). Consider the following data set $\href{https://d396qusza40orc.cloudfront.net/ntumltwo/hw2_data/hw2_lssvm_all.dat}{hw2\_lssvm\_all.dat}$ . Use the first 400 examples for training and the remaining for testing. Calculate $E_{in}$ and $E_{out}$ with the 0/1 error. 252 | Consider the Gaussian-RBF kernel $\exp\left(-\gamma \|\mathbf{x} - \mathbf{x}'\|^2\right)$ Try all combinations of parameters $\gamma \in \{ 32, 2, 0.125\}$ and $\lambda \in \{ 0.001, 1, 1000\}$.\\ 253 | Among all parameter combinations, which of the following is the range that the minimum $E_{in}(g)$ resides in? 254 | 255 | \begin{checkboxes} 256 | \choice [0.8,1.0) 257 | \CorrectChoice [0,0.2) 258 | \choice [0.4,0.6) 259 | \choice [0.2,0.4) 260 | \choice [0.6,0.8)\\ 261 | \end{checkboxes} 262 | 263 | \question Following Question 19, among all parameter combinations, which of the following is the range that the minimum $E_{out}(g)$ resides in? 264 | 265 | \begin{checkboxes} 266 | \CorrectChoice [0.2,0.4) 267 | \choice [0.8,1.0) 268 | \choice [0.4,0.6) 269 | \choice [0.6,0.8) 270 | \choice [0,0.2) 271 | \end{checkboxes} 272 | 273 | 274 | 275 | \end{questions} 276 | 277 | 278 | \end{document} -------------------------------------------------------------------------------- /quiz3/Dtree.m: -------------------------------------------------------------------------------- 1 | function r=Dtree(train,order) %r1->s r2->theta r3->i r4->order 2 | r(1,4)=order; 3 | if impurity(train(:,3))==0 4 | r(1,1)=sign(sum(train(:,3))); 5 | else 6 | [N,~]=size(train); 7 | train1=sortrows(train,1); 8 | train2=sortrows(train,2); 9 | theta1=0; 10 | theta2=0; 11 | for n=1:N-1 12 | theta1(n,1)=(train1(n+1,1)+train1(n,1))/2; 13 | theta2(n,1)=(train2(n+1,2)+train2(n,2))/2; 14 | end 15 | for n=1:N-1 16 | imp1(n,1)=impurity(train1(1:n,3));imp1(n,2)=impurity(train1(n+1:N,3)); 17 | b1(n,1)=n*imp1(n,1)+(N-n)*imp1(n,2); 18 | end 19 | for n=1:N-1 20 | imp2(n,1)=impurity(train2(1:n,3));imp2(n,2)=impurity(train2(n+1:N,3)); 21 | b2(n,1)=n*imp2(n,1)+(N-n)*imp2(n,2); 22 | end 23 | b=[b1;b2]; 24 | [~,pos]=min(b); 25 | if pos