├── LICENSE ├── README.md ├── homework ├── Bonus1 │ ├── Bonus1.html │ ├── Bonus1.ipynb │ └── diabetes ├── Bonus2 │ ├── Bonus2.html │ ├── Bonus2.ipynb │ └── supervised_ae.png ├── Exam-Sample │ └── Sample.pdf ├── HM2 │ ├── HM2.html │ ├── HM2.ipynb │ └── diabetes ├── HM4 │ ├── HM4.html │ └── HM4.ipynb └── HM5 │ ├── HM5.html │ └── HM5.ipynb └── webpage ├── Makefile ├── index.md ├── style_chmduquesne.css └── style_chmduquesne.tex /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Shusen Wang 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CS583A: Deep Learning 2021 Spring 2 | 3 | The course webpage is at [[click here](http://wangshusen.github.io/teaching.html)]. 4 | 5 | Slides are available at [[click here](https://github.com/wangshusen/DeepLearning.git)] -------------------------------------------------------------------------------- /homework/Bonus1/Bonus1.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Bonus1: Parallel Algorithms\n", 8 | "\n", 9 | "### Name: [Your-Name?]\n" 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "## 0. You will do the following:\n", 17 | "\n", 18 | "1. Read the lecture note: [click here](https://github.com/wangshusen/DeepLearning/blob/master/LectureNotes/Parallel/Parallel.pdf)\n", 19 | "\n", 20 | "2. Implement federated averaging or decentralized optimization.\n", 21 | "\n", 22 | "3. Plot the convergence curve. (The x-axis can be ```number of epochs``` or ```number of communication```. You must make sure the label is correct.)\n", 23 | "\n", 24 | "4. Convert the .IPYNB file to .HTML file.\n", 25 | "\n", 26 | " * The HTML file must contain **the code** and **the output after execution**.\n", 27 | " \n", 28 | "5. Upload this .HTML file to your Google Drive, Dropbox, or your Github repo. (If it is submitted to Google Drive or Dropbox, you must make the file open-access.)\n", 29 | "\n", 30 | "6. Submit the link to this .HTML file to Canvas.\n", 31 | "\n", 32 | " * Example: https://github.com/wangshusen/CS583-2020S/blob/master/homework/Bonus1/Bonus1.html\n", 33 | "\n" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": null, 39 | "metadata": {}, 40 | "outputs": [], 41 | "source": [] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "metadata": {}, 46 | "source": [ 47 | "# 1. Data processing\n", 48 | "\n", 49 | "- Download the Diabete dataset from https://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/diabetes\n", 50 | "- Load the data using sklearn.\n", 51 | "- Preprocess the data." 52 | ] 53 | }, 54 | { 55 | "cell_type": "markdown", 56 | "metadata": {}, 57 | "source": [ 58 | "## 1.1. Load the data" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": null, 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "from sklearn import datasets\n", 68 | "import numpy\n", 69 | "\n", 70 | "x_sparse, y = datasets.load_svmlight_file('diabetes')\n", 71 | "x = x_sparse.todense()\n", 72 | "\n", 73 | "print('Shape of x: ' + str(x.shape))\n", 74 | "print('Shape of y: ' + str(y.shape))" 75 | ] 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "metadata": {}, 80 | "source": [ 81 | "## 1.2. Partition to training and test sets" 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": null, 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [ 90 | "# partition the data to training and test sets\n", 91 | "n = x.shape[0]\n", 92 | "n_train = 640\n", 93 | "n_test = n - n_train\n", 94 | "\n", 95 | "rand_indices = numpy.random.permutation(n)\n", 96 | "train_indices = rand_indices[0:n_train]\n", 97 | "test_indices = rand_indices[n_train:n]\n", 98 | "\n", 99 | "x_train = x[train_indices, :]\n", 100 | "x_test = x[test_indices, :]\n", 101 | "y_train = y[train_indices].reshape(n_train, 1)\n", 102 | "y_test = y[test_indices].reshape(n_test, 1)\n", 103 | "\n", 104 | "print('Shape of x_train: ' + str(x_train.shape))\n", 105 | "print('Shape of x_test: ' + str(x_test.shape))\n", 106 | "print('Shape of y_train: ' + str(y_train.shape))\n", 107 | "print('Shape of y_test: ' + str(y_test.shape))" 108 | ] 109 | }, 110 | { 111 | "cell_type": "markdown", 112 | "metadata": {}, 113 | "source": [ 114 | "## 1.3. Feature scaling" 115 | ] 116 | }, 117 | { 118 | "cell_type": "markdown", 119 | "metadata": {}, 120 | "source": [ 121 | "Use the standardization to trainsform both training and test features" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": null, 127 | "metadata": {}, 128 | "outputs": [], 129 | "source": [ 130 | "# Standardization\n", 131 | "import numpy\n", 132 | "\n", 133 | "# calculate mu and sig using the training set\n", 134 | "d = x_train.shape[1]\n", 135 | "mu = numpy.mean(x_train, axis=0).reshape(1, d)\n", 136 | "sig = numpy.std(x_train, axis=0).reshape(1, d)\n", 137 | "\n", 138 | "# transform the training features\n", 139 | "x_train = (x_train - mu) / (sig + 1E-6)\n", 140 | "\n", 141 | "# transform the test features\n", 142 | "x_test = (x_test - mu) / (sig + 1E-6)\n", 143 | "\n", 144 | "print('test mean = ')\n", 145 | "print(numpy.mean(x_test, axis=0))\n", 146 | "\n", 147 | "print('test std = ')\n", 148 | "print(numpy.std(x_test, axis=0))" 149 | ] 150 | }, 151 | { 152 | "cell_type": "markdown", 153 | "metadata": {}, 154 | "source": [ 155 | "## 1.4. Add a dimension of all ones" 156 | ] 157 | }, 158 | { 159 | "cell_type": "code", 160 | "execution_count": null, 161 | "metadata": {}, 162 | "outputs": [], 163 | "source": [ 164 | "n_train, d = x_train.shape\n", 165 | "x_train = numpy.concatenate((x_train, numpy.ones((n_train, 1))), axis=1)\n", 166 | "\n", 167 | "n_test, d = x_test.shape\n", 168 | "x_test = numpy.concatenate((x_test, numpy.ones((n_test, 1))), axis=1)\n", 169 | "\n", 170 | "print('Shape of x_train: ' + str(x_train.shape))\n", 171 | "print('Shape of x_test: ' + str(x_test.shape))" 172 | ] 173 | }, 174 | { 175 | "cell_type": "code", 176 | "execution_count": null, 177 | "metadata": {}, 178 | "outputs": [], 179 | "source": [] 180 | } 181 | ], 182 | "metadata": { 183 | "kernelspec": { 184 | "display_name": "Python 3", 185 | "language": "python", 186 | "name": "python3" 187 | }, 188 | "language_info": { 189 | "codemirror_mode": { 190 | "name": "ipython", 191 | "version": 3 192 | }, 193 | "file_extension": ".py", 194 | "mimetype": "text/x-python", 195 | "name": "python", 196 | "nbconvert_exporter": "python", 197 | "pygments_lexer": "ipython3", 198 | "version": "3.6.4" 199 | } 200 | }, 201 | "nbformat": 4, 202 | "nbformat_minor": 2 203 | } 204 | -------------------------------------------------------------------------------- /homework/Bonus1/diabetes: -------------------------------------------------------------------------------- 1 | -1 1:6.000000 2:148.000000 3:72.000000 4:35.000000 5:0.000000 6:33.599998 7:0.627000 8:50.000000 2 | +1 1:1.000000 2:85.000000 3:66.000000 4:29.000000 5:0.000000 6:26.600000 7:0.351000 8:31.000000 3 | -1 1:8.000000 2:183.000000 3:64.000000 4:0.000000 5:0.000000 6:23.299999 7:0.672000 8:32.000000 4 | +1 1:1.000000 2:89.000000 3:66.000000 4:23.000000 5:94.000000 6:28.100000 7:0.167000 8:21.000000 5 | -1 1:0.000000 2:137.000000 3:40.000000 4:35.000000 5:168.000000 6:43.099998 7:2.288000 8:33.000000 6 | +1 1:5.000000 2:116.000000 3:74.000000 4:0.000000 5:0.000000 6:25.600000 7:0.201000 8:30.000000 7 | -1 1:3.000000 2:78.000000 3:50.000000 4:32.000000 5:88.000000 6:31.000000 7:0.248000 8:26.000000 8 | +1 1:10.000000 2:115.000000 3:0.000000 4:0.000000 5:0.000000 6:35.299999 7:0.134000 8:29.000000 9 | -1 1:2.000000 2:197.000000 3:70.000000 4:45.000000 5:543.000000 6:30.500000 7:0.158000 8:53.000000 10 | -1 1:8.000000 2:125.000000 3:96.000000 4:0.000000 5:0.000000 6:0.000000 7:0.232000 8:54.000000 11 | +1 1:4.000000 2:110.000000 3:92.000000 4:0.000000 5:0.000000 6:37.599998 7:0.191000 8:30.000000 12 | -1 1:10.000000 2:168.000000 3:74.000000 4:0.000000 5:0.000000 6:38.000000 7:0.537000 8:34.000000 13 | +1 1:10.000000 2:139.000000 3:80.000000 4:0.000000 5:0.000000 6:27.100000 7:1.441000 8:57.000000 14 | -1 1:1.000000 2:189.000000 3:60.000000 4:23.000000 5:846.000000 6:30.100000 7:0.398000 8:59.000000 15 | -1 1:5.000000 2:166.000000 3:72.000000 4:19.000000 5:175.000000 6:25.799999 7:0.587000 8:51.000000 16 | -1 1:7.000000 2:100.000000 3:0.000000 4:0.000000 5:0.000000 6:30.000000 7:0.484000 8:32.000000 17 | -1 1:0.000000 2:118.000000 3:84.000000 4:47.000000 5:230.000000 6:45.799999 7:0.551000 8:31.000000 18 | -1 1:7.000000 2:107.000000 3:74.000000 4:0.000000 5:0.000000 6:29.600000 7:0.254000 8:31.000000 19 | +1 1:1.000000 2:103.000000 3:30.000000 4:38.000000 5:83.000000 6:43.299999 7:0.183000 8:33.000000 20 | -1 1:1.000000 2:115.000000 3:70.000000 4:30.000000 5:96.000000 6:34.599998 7:0.529000 8:32.000000 21 | +1 1:3.000000 2:126.000000 3:88.000000 4:41.000000 5:235.000000 6:39.299999 7:0.704000 8:27.000000 22 | +1 1:8.000000 2:99.000000 3:84.000000 4:0.000000 5:0.000000 6:35.400002 7:0.388000 8:50.000000 23 | -1 1:7.000000 2:196.000000 3:90.000000 4:0.000000 5:0.000000 6:39.799999 7:0.451000 8:41.000000 24 | -1 1:9.000000 2:119.000000 3:80.000000 4:35.000000 5:0.000000 6:29.000000 7:0.263000 8:29.000000 25 | -1 1:11.000000 2:143.000000 3:94.000000 4:33.000000 5:146.000000 6:36.599998 7:0.254000 8:51.000000 26 | -1 1:10.000000 2:125.000000 3:70.000000 4:26.000000 5:115.000000 6:31.100000 7:0.205000 8:41.000000 27 | -1 1:7.000000 2:147.000000 3:76.000000 4:0.000000 5:0.000000 6:39.400002 7:0.257000 8:43.000000 28 | +1 1:1.000000 2:97.000000 3:66.000000 4:15.000000 5:140.000000 6:23.200001 7:0.487000 8:22.000000 29 | +1 1:13.000000 2:145.000000 3:82.000000 4:19.000000 5:110.000000 6:22.200001 7:0.245000 8:57.000000 30 | +1 1:5.000000 2:117.000000 3:92.000000 4:0.000000 5:0.000000 6:34.099998 7:0.337000 8:38.000000 31 | +1 1:5.000000 2:109.000000 3:75.000000 4:26.000000 5:0.000000 6:36.000000 7:0.546000 8:60.000000 32 | -1 1:3.000000 2:158.000000 3:76.000000 4:36.000000 5:245.000000 6:31.600000 7:0.851000 8:28.000000 33 | +1 1:3.000000 2:88.000000 3:58.000000 4:11.000000 5:54.000000 6:24.799999 7:0.267000 8:22.000000 34 | +1 1:6.000000 2:92.000000 3:92.000000 4:0.000000 5:0.000000 6:19.900000 7:0.188000 8:28.000000 35 | +1 1:10.000000 2:122.000000 3:78.000000 4:31.000000 5:0.000000 6:27.600000 7:0.512000 8:45.000000 36 | +1 1:4.000000 2:103.000000 3:60.000000 4:33.000000 5:192.000000 6:24.000000 7:0.966000 8:33.000000 37 | +1 1:11.000000 2:138.000000 3:76.000000 4:0.000000 5:0.000000 6:33.200001 7:0.420000 8:35.000000 38 | -1 1:9.000000 2:102.000000 3:76.000000 4:37.000000 5:0.000000 6:32.900002 7:0.665000 8:46.000000 39 | -1 1:2.000000 2:90.000000 3:68.000000 4:42.000000 5:0.000000 6:38.200001 7:0.503000 8:27.000000 40 | -1 1:4.000000 2:111.000000 3:72.000000 4:47.000000 5:207.000000 6:37.099998 7:1.390000 8:56.000000 41 | +1 1:3.000000 2:180.000000 3:64.000000 4:25.000000 5:70.000000 6:34.000000 7:0.271000 8:26.000000 42 | +1 1:7.000000 2:133.000000 3:84.000000 4:0.000000 5:0.000000 6:40.200001 7:0.696000 8:37.000000 43 | +1 1:7.000000 2:106.000000 3:92.000000 4:18.000000 5:0.000000 6:22.700001 7:0.235000 8:48.000000 44 | -1 1:9.000000 2:171.000000 3:110.000000 4:24.000000 5:240.000000 6:45.400002 7:0.721000 8:54.000000 45 | +1 1:7.000000 2:159.000000 3:64.000000 4:0.000000 5:0.000000 6:27.400000 7:0.294000 8:40.000000 46 | -1 1:0.000000 2:180.000000 3:66.000000 4:39.000000 5:0.000000 6:42.000000 7:1.893000 8:25.000000 47 | +1 1:1.000000 2:146.000000 3:56.000000 4:0.000000 5:0.000000 6:29.700001 7:0.564000 8:29.000000 48 | +1 1:2.000000 2:71.000000 3:70.000000 4:27.000000 5:0.000000 6:28.000000 7:0.586000 8:22.000000 49 | -1 1:7.000000 2:103.000000 3:66.000000 4:32.000000 5:0.000000 6:39.099998 7:0.344000 8:31.000000 50 | +1 1:7.000000 2:105.000000 3:0.000000 4:0.000000 5:0.000000 6:0.000000 7:0.305000 8:24.000000 51 | +1 1:1.000000 2:103.000000 3:80.000000 4:11.000000 5:82.000000 6:19.400000 7:0.491000 8:22.000000 52 | +1 1:1.000000 2:101.000000 3:50.000000 4:15.000000 5:36.000000 6:24.200001 7:0.526000 8:26.000000 53 | +1 1:5.000000 2:88.000000 3:66.000000 4:21.000000 5:23.000000 6:24.400000 7:0.342000 8:30.000000 54 | -1 1:8.000000 2:176.000000 3:90.000000 4:34.000000 5:300.000000 6:33.700001 7:0.467000 8:58.000000 55 | +1 1:7.000000 2:150.000000 3:66.000000 4:42.000000 5:342.000000 6:34.700001 7:0.718000 8:42.000000 56 | +1 1:1.000000 2:73.000000 3:50.000000 4:10.000000 5:0.000000 6:23.000000 7:0.248000 8:21.000000 57 | -1 1:7.000000 2:187.000000 3:68.000000 4:39.000000 5:304.000000 6:37.700001 7:0.254000 8:41.000000 58 | +1 1:0.000000 2:100.000000 3:88.000000 4:60.000000 5:110.000000 6:46.799999 7:0.962000 8:31.000000 59 | +1 1:0.000000 2:146.000000 3:82.000000 4:0.000000 5:0.000000 6:40.500000 7:1.781000 8:44.000000 60 | +1 1:0.000000 2:105.000000 3:64.000000 4:41.000000 5:142.000000 6:41.500000 7:0.173000 8:22.000000 61 | +1 1:2.000000 2:84.000000 3:0.000000 4:0.000000 5:0.000000 6:0.000000 7:0.304000 8:21.000000 62 | -1 1:8.000000 2:133.000000 3:72.000000 4:0.000000 5:0.000000 6:32.900002 7:0.270000 8:39.000000 63 | +1 1:5.000000 2:44.000000 3:62.000000 4:0.000000 5:0.000000 6:25.000000 7:0.587000 8:36.000000 64 | +1 1:2.000000 2:141.000000 3:58.000000 4:34.000000 5:128.000000 6:25.400000 7:0.699000 8:24.000000 65 | -1 1:7.000000 2:114.000000 3:66.000000 4:0.000000 5:0.000000 6:32.799999 7:0.258000 8:42.000000 66 | +1 1:5.000000 2:99.000000 3:74.000000 4:27.000000 5:0.000000 6:29.000000 7:0.203000 8:32.000000 67 | -1 1:0.000000 2:109.000000 3:88.000000 4:30.000000 5:0.000000 6:32.500000 7:0.855000 8:38.000000 68 | +1 1:2.000000 2:109.000000 3:92.000000 4:0.000000 5:0.000000 6:42.700001 7:0.845000 8:54.000000 69 | +1 1:1.000000 2:95.000000 3:66.000000 4:13.000000 5:38.000000 6:19.600000 7:0.334000 8:25.000000 70 | +1 1:4.000000 2:146.000000 3:85.000000 4:27.000000 5:100.000000 6:28.900000 7:0.189000 8:27.000000 71 | -1 1:2.000000 2:100.000000 3:66.000000 4:20.000000 5:90.000000 6:32.900002 7:0.867000 8:28.000000 72 | +1 1:5.000000 2:139.000000 3:64.000000 4:35.000000 5:140.000000 6:28.600000 7:0.411000 8:26.000000 73 | -1 1:13.000000 2:126.000000 3:90.000000 4:0.000000 5:0.000000 6:43.400002 7:0.583000 8:42.000000 74 | +1 1:4.000000 2:129.000000 3:86.000000 4:20.000000 5:270.000000 6:35.099998 7:0.231000 8:23.000000 75 | +1 1:1.000000 2:79.000000 3:75.000000 4:30.000000 5:0.000000 6:32.000000 7:0.396000 8:22.000000 76 | +1 1:1.000000 2:0.000000 3:48.000000 4:20.000000 5:0.000000 6:24.700001 7:0.140000 8:22.000000 77 | +1 1:7.000000 2:62.000000 3:78.000000 4:0.000000 5:0.000000 6:32.599998 7:0.391000 8:41.000000 78 | +1 1:5.000000 2:95.000000 3:72.000000 4:33.000000 5:0.000000 6:37.700001 7:0.370000 8:27.000000 79 | -1 1:0.000000 2:131.000000 3:0.000000 4:0.000000 5:0.000000 6:43.200001 7:0.270000 8:26.000000 80 | +1 1:2.000000 2:112.000000 3:66.000000 4:22.000000 5:0.000000 6:25.000000 7:0.307000 8:24.000000 81 | +1 1:3.000000 2:113.000000 3:44.000000 4:13.000000 5:0.000000 6:22.400000 7:0.140000 8:22.000000 82 | +1 1:2.000000 2:74.000000 3:0.000000 4:0.000000 5:0.000000 6:0.000000 7:0.102000 8:22.000000 83 | +1 1:7.000000 2:83.000000 3:78.000000 4:26.000000 5:71.000000 6:29.299999 7:0.767000 8:36.000000 84 | +1 1:0.000000 2:101.000000 3:65.000000 4:28.000000 5:0.000000 6:24.600000 7:0.237000 8:22.000000 85 | -1 1:5.000000 2:137.000000 3:108.000000 4:0.000000 5:0.000000 6:48.799999 7:0.227000 8:37.000000 86 | +1 1:2.000000 2:110.000000 3:74.000000 4:29.000000 5:125.000000 6:32.400002 7:0.698000 8:27.000000 87 | +1 1:13.000000 2:106.000000 3:72.000000 4:54.000000 5:0.000000 6:36.599998 7:0.178000 8:45.000000 88 | +1 1:2.000000 2:100.000000 3:68.000000 4:25.000000 5:71.000000 6:38.500000 7:0.324000 8:26.000000 89 | -1 1:15.000000 2:136.000000 3:70.000000 4:32.000000 5:110.000000 6:37.099998 7:0.153000 8:43.000000 90 | +1 1:1.000000 2:107.000000 3:68.000000 4:19.000000 5:0.000000 6:26.500000 7:0.165000 8:24.000000 91 | +1 1:1.000000 2:80.000000 3:55.000000 4:0.000000 5:0.000000 6:19.100000 7:0.258000 8:21.000000 92 | +1 1:4.000000 2:123.000000 3:80.000000 4:15.000000 5:176.000000 6:32.000000 7:0.443000 8:34.000000 93 | +1 1:7.000000 2:81.000000 3:78.000000 4:40.000000 5:48.000000 6:46.700001 7:0.261000 8:42.000000 94 | -1 1:4.000000 2:134.000000 3:72.000000 4:0.000000 5:0.000000 6:23.799999 7:0.277000 8:60.000000 95 | +1 1:2.000000 2:142.000000 3:82.000000 4:18.000000 5:64.000000 6:24.700001 7:0.761000 8:21.000000 96 | +1 1:6.000000 2:144.000000 3:72.000000 4:27.000000 5:228.000000 6:33.900002 7:0.255000 8:40.000000 97 | +1 1:2.000000 2:92.000000 3:62.000000 4:28.000000 5:0.000000 6:31.600000 7:0.130000 8:24.000000 98 | +1 1:1.000000 2:71.000000 3:48.000000 4:18.000000 5:76.000000 6:20.400000 7:0.323000 8:22.000000 99 | +1 1:6.000000 2:93.000000 3:50.000000 4:30.000000 5:64.000000 6:28.700001 7:0.356000 8:23.000000 100 | -1 1:1.000000 2:122.000000 3:90.000000 4:51.000000 5:220.000000 6:49.700001 7:0.325000 8:31.000000 101 | -1 1:1.000000 2:163.000000 3:72.000000 4:0.000000 5:0.000000 6:39.000000 7:1.222000 8:33.000000 102 | +1 1:1.000000 2:151.000000 3:60.000000 4:0.000000 5:0.000000 6:26.100000 7:0.179000 8:22.000000 103 | +1 1:0.000000 2:125.000000 3:96.000000 4:0.000000 5:0.000000 6:22.500000 7:0.262000 8:21.000000 104 | +1 1:1.000000 2:81.000000 3:72.000000 4:18.000000 5:40.000000 6:26.600000 7:0.283000 8:24.000000 105 | +1 1:2.000000 2:85.000000 3:65.000000 4:0.000000 5:0.000000 6:39.599998 7:0.930000 8:27.000000 106 | +1 1:1.000000 2:126.000000 3:56.000000 4:29.000000 5:152.000000 6:28.700001 7:0.801000 8:21.000000 107 | +1 1:1.000000 2:96.000000 3:122.000000 4:0.000000 5:0.000000 6:22.400000 7:0.207000 8:27.000000 108 | +1 1:4.000000 2:144.000000 3:58.000000 4:28.000000 5:140.000000 6:29.500000 7:0.287000 8:37.000000 109 | +1 1:3.000000 2:83.000000 3:58.000000 4:31.000000 5:18.000000 6:34.299999 7:0.336000 8:25.000000 110 | -1 1:0.000000 2:95.000000 3:85.000000 4:25.000000 5:36.000000 6:37.400002 7:0.247000 8:24.000000 111 | -1 1:3.000000 2:171.000000 3:72.000000 4:33.000000 5:135.000000 6:33.299999 7:0.199000 8:24.000000 112 | -1 1:8.000000 2:155.000000 3:62.000000 4:26.000000 5:495.000000 6:34.000000 7:0.543000 8:46.000000 113 | +1 1:1.000000 2:89.000000 3:76.000000 4:34.000000 5:37.000000 6:31.200001 7:0.192000 8:23.000000 114 | +1 1:4.000000 2:76.000000 3:62.000000 4:0.000000 5:0.000000 6:34.000000 7:0.391000 8:25.000000 115 | -1 1:7.000000 2:160.000000 3:54.000000 4:32.000000 5:175.000000 6:30.500000 7:0.588000 8:39.000000 116 | -1 1:4.000000 2:146.000000 3:92.000000 4:0.000000 5:0.000000 6:31.200001 7:0.539000 8:61.000000 117 | -1 1:5.000000 2:124.000000 3:74.000000 4:0.000000 5:0.000000 6:34.000000 7:0.220000 8:38.000000 118 | +1 1:5.000000 2:78.000000 3:48.000000 4:0.000000 5:0.000000 6:33.700001 7:0.654000 8:25.000000 119 | +1 1:4.000000 2:97.000000 3:60.000000 4:23.000000 5:0.000000 6:28.200001 7:0.443000 8:22.000000 120 | +1 1:4.000000 2:99.000000 3:76.000000 4:15.000000 5:51.000000 6:23.200001 7:0.223000 8:21.000000 121 | -1 1:0.000000 2:162.000000 3:76.000000 4:56.000000 5:100.000000 6:53.200001 7:0.759000 8:25.000000 122 | +1 1:6.000000 2:111.000000 3:64.000000 4:39.000000 5:0.000000 6:34.200001 7:0.260000 8:24.000000 123 | +1 1:2.000000 2:107.000000 3:74.000000 4:30.000000 5:100.000000 6:33.599998 7:0.404000 8:23.000000 124 | +1 1:5.000000 2:132.000000 3:80.000000 4:0.000000 5:0.000000 6:26.799999 7:0.186000 8:69.000000 125 | -1 1:0.000000 2:113.000000 3:76.000000 4:0.000000 5:0.000000 6:33.299999 7:0.278000 8:23.000000 126 | -1 1:1.000000 2:88.000000 3:30.000000 4:42.000000 5:99.000000 6:55.000000 7:0.496000 8:26.000000 127 | +1 1:3.000000 2:120.000000 3:70.000000 4:30.000000 5:135.000000 6:42.900002 7:0.452000 8:30.000000 128 | +1 1:1.000000 2:118.000000 3:58.000000 4:36.000000 5:94.000000 6:33.299999 7:0.261000 8:23.000000 129 | -1 1:1.000000 2:117.000000 3:88.000000 4:24.000000 5:145.000000 6:34.500000 7:0.403000 8:40.000000 130 | -1 1:0.000000 2:105.000000 3:84.000000 4:0.000000 5:0.000000 6:27.900000 7:0.741000 8:62.000000 131 | -1 1:4.000000 2:173.000000 3:70.000000 4:14.000000 5:168.000000 6:29.700001 7:0.361000 8:33.000000 132 | -1 1:9.000000 2:122.000000 3:56.000000 4:0.000000 5:0.000000 6:33.299999 7:1.114000 8:33.000000 133 | -1 1:3.000000 2:170.000000 3:64.000000 4:37.000000 5:225.000000 6:34.500000 7:0.356000 8:30.000000 134 | +1 1:8.000000 2:84.000000 3:74.000000 4:31.000000 5:0.000000 6:38.299999 7:0.457000 8:39.000000 135 | +1 1:2.000000 2:96.000000 3:68.000000 4:13.000000 5:49.000000 6:21.100000 7:0.647000 8:26.000000 136 | +1 1:2.000000 2:125.000000 3:60.000000 4:20.000000 5:140.000000 6:33.799999 7:0.088000 8:31.000000 137 | +1 1:0.000000 2:100.000000 3:70.000000 4:26.000000 5:50.000000 6:30.799999 7:0.597000 8:21.000000 138 | +1 1:0.000000 2:93.000000 3:60.000000 4:25.000000 5:92.000000 6:28.700001 7:0.532000 8:22.000000 139 | +1 1:0.000000 2:129.000000 3:80.000000 4:0.000000 5:0.000000 6:31.200001 7:0.703000 8:29.000000 140 | +1 1:5.000000 2:105.000000 3:72.000000 4:29.000000 5:325.000000 6:36.900002 7:0.159000 8:28.000000 141 | +1 1:3.000000 2:128.000000 3:78.000000 4:0.000000 5:0.000000 6:21.100000 7:0.268000 8:55.000000 142 | +1 1:5.000000 2:106.000000 3:82.000000 4:30.000000 5:0.000000 6:39.500000 7:0.286000 8:38.000000 143 | +1 1:2.000000 2:108.000000 3:52.000000 4:26.000000 5:63.000000 6:32.500000 7:0.318000 8:22.000000 144 | -1 1:10.000000 2:108.000000 3:66.000000 4:0.000000 5:0.000000 6:32.400002 7:0.272000 8:42.000000 145 | +1 1:4.000000 2:154.000000 3:62.000000 4:31.000000 5:284.000000 6:32.799999 7:0.237000 8:23.000000 146 | +1 1:0.000000 2:102.000000 3:75.000000 4:23.000000 5:0.000000 6:0.000000 7:0.572000 8:21.000000 147 | +1 1:9.000000 2:57.000000 3:80.000000 4:37.000000 5:0.000000 6:32.799999 7:0.096000 8:41.000000 148 | +1 1:2.000000 2:106.000000 3:64.000000 4:35.000000 5:119.000000 6:30.500000 7:1.400000 8:34.000000 149 | +1 1:5.000000 2:147.000000 3:78.000000 4:0.000000 5:0.000000 6:33.700001 7:0.218000 8:65.000000 150 | +1 1:2.000000 2:90.000000 3:70.000000 4:17.000000 5:0.000000 6:27.299999 7:0.085000 8:22.000000 151 | +1 1:1.000000 2:136.000000 3:74.000000 4:50.000000 5:204.000000 6:37.400002 7:0.399000 8:24.000000 152 | +1 1:4.000000 2:114.000000 3:65.000000 4:0.000000 5:0.000000 6:21.900000 7:0.432000 8:37.000000 153 | -1 1:9.000000 2:156.000000 3:86.000000 4:28.000000 5:155.000000 6:34.299999 7:1.189000 8:42.000000 154 | +1 1:1.000000 2:153.000000 3:82.000000 4:42.000000 5:485.000000 6:40.599998 7:0.687000 8:23.000000 155 | -1 1:8.000000 2:188.000000 3:78.000000 4:0.000000 5:0.000000 6:47.900002 7:0.137000 8:43.000000 156 | -1 1:7.000000 2:152.000000 3:88.000000 4:44.000000 5:0.000000 6:50.000000 7:0.337000 8:36.000000 157 | +1 1:2.000000 2:99.000000 3:52.000000 4:15.000000 5:94.000000 6:24.600000 7:0.637000 8:21.000000 158 | +1 1:1.000000 2:109.000000 3:56.000000 4:21.000000 5:135.000000 6:25.200001 7:0.833000 8:23.000000 159 | +1 1:2.000000 2:88.000000 3:74.000000 4:19.000000 5:53.000000 6:29.000000 7:0.229000 8:22.000000 160 | -1 1:17.000000 2:163.000000 3:72.000000 4:41.000000 5:114.000000 6:40.900002 7:0.817000 8:47.000000 161 | +1 1:4.000000 2:151.000000 3:90.000000 4:38.000000 5:0.000000 6:29.700001 7:0.294000 8:36.000000 162 | +1 1:7.000000 2:102.000000 3:74.000000 4:40.000000 5:105.000000 6:37.200001 7:0.204000 8:45.000000 163 | +1 1:0.000000 2:114.000000 3:80.000000 4:34.000000 5:285.000000 6:44.200001 7:0.167000 8:27.000000 164 | +1 1:2.000000 2:100.000000 3:64.000000 4:23.000000 5:0.000000 6:29.700001 7:0.368000 8:21.000000 165 | -1 1:0.000000 2:131.000000 3:88.000000 4:0.000000 5:0.000000 6:31.600000 7:0.743000 8:32.000000 166 | -1 1:6.000000 2:104.000000 3:74.000000 4:18.000000 5:156.000000 6:29.900000 7:0.722000 8:41.000000 167 | +1 1:3.000000 2:148.000000 3:66.000000 4:25.000000 5:0.000000 6:32.500000 7:0.256000 8:22.000000 168 | +1 1:4.000000 2:120.000000 3:68.000000 4:0.000000 5:0.000000 6:29.600000 7:0.709000 8:34.000000 169 | +1 1:4.000000 2:110.000000 3:66.000000 4:0.000000 5:0.000000 6:31.900000 7:0.471000 8:29.000000 170 | +1 1:3.000000 2:111.000000 3:90.000000 4:12.000000 5:78.000000 6:28.400000 7:0.495000 8:29.000000 171 | -1 1:6.000000 2:102.000000 3:82.000000 4:0.000000 5:0.000000 6:30.799999 7:0.180000 8:36.000000 172 | -1 1:6.000000 2:134.000000 3:70.000000 4:23.000000 5:130.000000 6:35.400002 7:0.542000 8:29.000000 173 | +1 1:2.000000 2:87.000000 3:0.000000 4:23.000000 5:0.000000 6:28.900000 7:0.773000 8:25.000000 174 | +1 1:1.000000 2:79.000000 3:60.000000 4:42.000000 5:48.000000 6:43.500000 7:0.678000 8:23.000000 175 | +1 1:2.000000 2:75.000000 3:64.000000 4:24.000000 5:55.000000 6:29.700001 7:0.370000 8:33.000000 176 | -1 1:8.000000 2:179.000000 3:72.000000 4:42.000000 5:130.000000 6:32.700001 7:0.719000 8:36.000000 177 | +1 1:6.000000 2:85.000000 3:78.000000 4:0.000000 5:0.000000 6:31.200001 7:0.382000 8:42.000000 178 | -1 1:0.000000 2:129.000000 3:110.000000 4:46.000000 5:130.000000 6:67.099998 7:0.319000 8:26.000000 179 | +1 1:5.000000 2:143.000000 3:78.000000 4:0.000000 5:0.000000 6:45.000000 7:0.190000 8:47.000000 180 | -1 1:5.000000 2:130.000000 3:82.000000 4:0.000000 5:0.000000 6:39.099998 7:0.956000 8:37.000000 181 | +1 1:6.000000 2:87.000000 3:80.000000 4:0.000000 5:0.000000 6:23.200001 7:0.084000 8:32.000000 182 | +1 1:0.000000 2:119.000000 3:64.000000 4:18.000000 5:92.000000 6:34.900002 7:0.725000 8:23.000000 183 | +1 1:1.000000 2:0.000000 3:74.000000 4:20.000000 5:23.000000 6:27.700001 7:0.299000 8:21.000000 184 | +1 1:5.000000 2:73.000000 3:60.000000 4:0.000000 5:0.000000 6:26.799999 7:0.268000 8:27.000000 185 | +1 1:4.000000 2:141.000000 3:74.000000 4:0.000000 5:0.000000 6:27.600000 7:0.244000 8:40.000000 186 | -1 1:7.000000 2:194.000000 3:68.000000 4:28.000000 5:0.000000 6:35.900002 7:0.745000 8:41.000000 187 | -1 1:8.000000 2:181.000000 3:68.000000 4:36.000000 5:495.000000 6:30.100000 7:0.615000 8:60.000000 188 | -1 1:1.000000 2:128.000000 3:98.000000 4:41.000000 5:58.000000 6:32.000000 7:1.321000 8:33.000000 189 | -1 1:8.000000 2:109.000000 3:76.000000 4:39.000000 5:114.000000 6:27.900000 7:0.640000 8:31.000000 190 | -1 1:5.000000 2:139.000000 3:80.000000 4:35.000000 5:160.000000 6:31.600000 7:0.361000 8:25.000000 191 | +1 1:3.000000 2:111.000000 3:62.000000 4:0.000000 5:0.000000 6:22.600000 7:0.142000 8:21.000000 192 | +1 1:9.000000 2:123.000000 3:70.000000 4:44.000000 5:94.000000 6:33.099998 7:0.374000 8:40.000000 193 | -1 1:7.000000 2:159.000000 3:66.000000 4:0.000000 5:0.000000 6:30.400000 7:0.383000 8:36.000000 194 | -1 1:11.000000 2:135.000000 3:0.000000 4:0.000000 5:0.000000 6:52.299999 7:0.578000 8:40.000000 195 | +1 1:8.000000 2:85.000000 3:55.000000 4:20.000000 5:0.000000 6:24.400000 7:0.136000 8:42.000000 196 | -1 1:5.000000 2:158.000000 3:84.000000 4:41.000000 5:210.000000 6:39.400002 7:0.395000 8:29.000000 197 | +1 1:1.000000 2:105.000000 3:58.000000 4:0.000000 5:0.000000 6:24.299999 7:0.187000 8:21.000000 198 | -1 1:3.000000 2:107.000000 3:62.000000 4:13.000000 5:48.000000 6:22.900000 7:0.678000 8:23.000000 199 | -1 1:4.000000 2:109.000000 3:64.000000 4:44.000000 5:99.000000 6:34.799999 7:0.905000 8:26.000000 200 | -1 1:4.000000 2:148.000000 3:60.000000 4:27.000000 5:318.000000 6:30.900000 7:0.150000 8:29.000000 201 | +1 1:0.000000 2:113.000000 3:80.000000 4:16.000000 5:0.000000 6:31.000000 7:0.874000 8:21.000000 202 | +1 1:1.000000 2:138.000000 3:82.000000 4:0.000000 5:0.000000 6:40.099998 7:0.236000 8:28.000000 203 | +1 1:0.000000 2:108.000000 3:68.000000 4:20.000000 5:0.000000 6:27.299999 7:0.787000 8:32.000000 204 | +1 1:2.000000 2:99.000000 3:70.000000 4:16.000000 5:44.000000 6:20.400000 7:0.235000 8:27.000000 205 | +1 1:6.000000 2:103.000000 3:72.000000 4:32.000000 5:190.000000 6:37.700001 7:0.324000 8:55.000000 206 | +1 1:5.000000 2:111.000000 3:72.000000 4:28.000000 5:0.000000 6:23.900000 7:0.407000 8:27.000000 207 | -1 1:8.000000 2:196.000000 3:76.000000 4:29.000000 5:280.000000 6:37.500000 7:0.605000 8:57.000000 208 | -1 1:5.000000 2:162.000000 3:104.000000 4:0.000000 5:0.000000 6:37.700001 7:0.151000 8:52.000000 209 | +1 1:1.000000 2:96.000000 3:64.000000 4:27.000000 5:87.000000 6:33.200001 7:0.289000 8:21.000000 210 | -1 1:7.000000 2:184.000000 3:84.000000 4:33.000000 5:0.000000 6:35.500000 7:0.355000 8:41.000000 211 | +1 1:2.000000 2:81.000000 3:60.000000 4:22.000000 5:0.000000 6:27.700001 7:0.290000 8:25.000000 212 | +1 1:0.000000 2:147.000000 3:85.000000 4:54.000000 5:0.000000 6:42.799999 7:0.375000 8:24.000000 213 | +1 1:7.000000 2:179.000000 3:95.000000 4:31.000000 5:0.000000 6:34.200001 7:0.164000 8:60.000000 214 | -1 1:0.000000 2:140.000000 3:65.000000 4:26.000000 5:130.000000 6:42.599998 7:0.431000 8:24.000000 215 | -1 1:9.000000 2:112.000000 3:82.000000 4:32.000000 5:175.000000 6:34.200001 7:0.260000 8:36.000000 216 | -1 1:12.000000 2:151.000000 3:70.000000 4:40.000000 5:271.000000 6:41.799999 7:0.742000 8:38.000000 217 | -1 1:5.000000 2:109.000000 3:62.000000 4:41.000000 5:129.000000 6:35.799999 7:0.514000 8:25.000000 218 | +1 1:6.000000 2:125.000000 3:68.000000 4:30.000000 5:120.000000 6:30.000000 7:0.464000 8:32.000000 219 | -1 1:5.000000 2:85.000000 3:74.000000 4:22.000000 5:0.000000 6:29.000000 7:1.224000 8:32.000000 220 | -1 1:5.000000 2:112.000000 3:66.000000 4:0.000000 5:0.000000 6:37.799999 7:0.261000 8:41.000000 221 | -1 1:0.000000 2:177.000000 3:60.000000 4:29.000000 5:478.000000 6:34.599998 7:1.072000 8:21.000000 222 | -1 1:2.000000 2:158.000000 3:90.000000 4:0.000000 5:0.000000 6:31.600000 7:0.805000 8:66.000000 223 | +1 1:7.000000 2:119.000000 3:0.000000 4:0.000000 5:0.000000 6:25.200001 7:0.209000 8:37.000000 224 | +1 1:7.000000 2:142.000000 3:60.000000 4:33.000000 5:190.000000 6:28.799999 7:0.687000 8:61.000000 225 | +1 1:1.000000 2:100.000000 3:66.000000 4:15.000000 5:56.000000 6:23.600000 7:0.666000 8:26.000000 226 | +1 1:1.000000 2:87.000000 3:78.000000 4:27.000000 5:32.000000 6:34.599998 7:0.101000 8:22.000000 227 | +1 1:0.000000 2:101.000000 3:76.000000 4:0.000000 5:0.000000 6:35.700001 7:0.198000 8:26.000000 228 | -1 1:3.000000 2:162.000000 3:52.000000 4:38.000000 5:0.000000 6:37.200001 7:0.652000 8:24.000000 229 | +1 1:4.000000 2:197.000000 3:70.000000 4:39.000000 5:744.000000 6:36.700001 7:2.329000 8:31.000000 230 | +1 1:0.000000 2:117.000000 3:80.000000 4:31.000000 5:53.000000 6:45.200001 7:0.089000 8:24.000000 231 | -1 1:4.000000 2:142.000000 3:86.000000 4:0.000000 5:0.000000 6:44.000000 7:0.645000 8:22.000000 232 | -1 1:6.000000 2:134.000000 3:80.000000 4:37.000000 5:370.000000 6:46.200001 7:0.238000 8:46.000000 233 | +1 1:1.000000 2:79.000000 3:80.000000 4:25.000000 5:37.000000 6:25.400000 7:0.583000 8:22.000000 234 | +1 1:4.000000 2:122.000000 3:68.000000 4:0.000000 5:0.000000 6:35.000000 7:0.394000 8:29.000000 235 | +1 1:3.000000 2:74.000000 3:68.000000 4:28.000000 5:45.000000 6:29.700001 7:0.293000 8:23.000000 236 | -1 1:4.000000 2:171.000000 3:72.000000 4:0.000000 5:0.000000 6:43.599998 7:0.479000 8:26.000000 237 | -1 1:7.000000 2:181.000000 3:84.000000 4:21.000000 5:192.000000 6:35.900002 7:0.586000 8:51.000000 238 | -1 1:0.000000 2:179.000000 3:90.000000 4:27.000000 5:0.000000 6:44.099998 7:0.686000 8:23.000000 239 | -1 1:9.000000 2:164.000000 3:84.000000 4:21.000000 5:0.000000 6:30.799999 7:0.831000 8:32.000000 240 | +1 1:0.000000 2:104.000000 3:76.000000 4:0.000000 5:0.000000 6:18.400000 7:0.582000 8:27.000000 241 | +1 1:1.000000 2:91.000000 3:64.000000 4:24.000000 5:0.000000 6:29.200001 7:0.192000 8:21.000000 242 | +1 1:4.000000 2:91.000000 3:70.000000 4:32.000000 5:88.000000 6:33.099998 7:0.446000 8:22.000000 243 | -1 1:3.000000 2:139.000000 3:54.000000 4:0.000000 5:0.000000 6:25.600000 7:0.402000 8:22.000000 244 | -1 1:6.000000 2:119.000000 3:50.000000 4:22.000000 5:176.000000 6:27.100000 7:1.318000 8:33.000000 245 | +1 1:2.000000 2:146.000000 3:76.000000 4:35.000000 5:194.000000 6:38.200001 7:0.329000 8:29.000000 246 | -1 1:9.000000 2:184.000000 3:85.000000 4:15.000000 5:0.000000 6:30.000000 7:1.213000 8:49.000000 247 | +1 1:10.000000 2:122.000000 3:68.000000 4:0.000000 5:0.000000 6:31.200001 7:0.258000 8:41.000000 248 | +1 1:0.000000 2:165.000000 3:90.000000 4:33.000000 5:680.000000 6:52.299999 7:0.427000 8:23.000000 249 | +1 1:9.000000 2:124.000000 3:70.000000 4:33.000000 5:402.000000 6:35.400002 7:0.282000 8:34.000000 250 | +1 1:1.000000 2:111.000000 3:86.000000 4:19.000000 5:0.000000 6:30.100000 7:0.143000 8:23.000000 251 | +1 1:9.000000 2:106.000000 3:52.000000 4:0.000000 5:0.000000 6:31.200001 7:0.380000 8:42.000000 252 | +1 1:2.000000 2:129.000000 3:84.000000 4:0.000000 5:0.000000 6:28.000000 7:0.284000 8:27.000000 253 | +1 1:2.000000 2:90.000000 3:80.000000 4:14.000000 5:55.000000 6:24.400000 7:0.249000 8:24.000000 254 | +1 1:0.000000 2:86.000000 3:68.000000 4:32.000000 5:0.000000 6:35.799999 7:0.238000 8:25.000000 255 | -1 1:12.000000 2:92.000000 3:62.000000 4:7.000000 5:258.000000 6:27.600000 7:0.926000 8:44.000000 256 | -1 1:1.000000 2:113.000000 3:64.000000 4:35.000000 5:0.000000 6:33.599998 7:0.543000 8:21.000000 257 | +1 1:3.000000 2:111.000000 3:56.000000 4:39.000000 5:0.000000 6:30.100000 7:0.557000 8:30.000000 258 | +1 1:2.000000 2:114.000000 3:68.000000 4:22.000000 5:0.000000 6:28.700001 7:0.092000 8:25.000000 259 | +1 1:1.000000 2:193.000000 3:50.000000 4:16.000000 5:375.000000 6:25.900000 7:0.655000 8:24.000000 260 | -1 1:11.000000 2:155.000000 3:76.000000 4:28.000000 5:150.000000 6:33.299999 7:1.353000 8:51.000000 261 | +1 1:3.000000 2:191.000000 3:68.000000 4:15.000000 5:130.000000 6:30.900000 7:0.299000 8:34.000000 262 | -1 1:3.000000 2:141.000000 3:0.000000 4:0.000000 5:0.000000 6:30.000000 7:0.761000 8:27.000000 263 | +1 1:4.000000 2:95.000000 3:70.000000 4:32.000000 5:0.000000 6:32.099998 7:0.612000 8:24.000000 264 | +1 1:3.000000 2:142.000000 3:80.000000 4:15.000000 5:0.000000 6:32.400002 7:0.200000 8:63.000000 265 | -1 1:4.000000 2:123.000000 3:62.000000 4:0.000000 5:0.000000 6:32.000000 7:0.226000 8:35.000000 266 | +1 1:5.000000 2:96.000000 3:74.000000 4:18.000000 5:67.000000 6:33.599998 7:0.997000 8:43.000000 267 | -1 1:0.000000 2:138.000000 3:0.000000 4:0.000000 5:0.000000 6:36.299999 7:0.933000 8:25.000000 268 | +1 1:2.000000 2:128.000000 3:64.000000 4:42.000000 5:0.000000 6:40.000000 7:1.101000 8:24.000000 269 | +1 1:0.000000 2:102.000000 3:52.000000 4:0.000000 5:0.000000 6:25.100000 7:0.078000 8:21.000000 270 | -1 1:2.000000 2:146.000000 3:0.000000 4:0.000000 5:0.000000 6:27.500000 7:0.240000 8:28.000000 271 | -1 1:10.000000 2:101.000000 3:86.000000 4:37.000000 5:0.000000 6:45.599998 7:1.136000 8:38.000000 272 | +1 1:2.000000 2:108.000000 3:62.000000 4:32.000000 5:56.000000 6:25.200001 7:0.128000 8:21.000000 273 | +1 1:3.000000 2:122.000000 3:78.000000 4:0.000000 5:0.000000 6:23.000000 7:0.254000 8:40.000000 274 | +1 1:1.000000 2:71.000000 3:78.000000 4:50.000000 5:45.000000 6:33.200001 7:0.422000 8:21.000000 275 | +1 1:13.000000 2:106.000000 3:70.000000 4:0.000000 5:0.000000 6:34.200001 7:0.251000 8:52.000000 276 | +1 1:2.000000 2:100.000000 3:70.000000 4:52.000000 5:57.000000 6:40.500000 7:0.677000 8:25.000000 277 | -1 1:7.000000 2:106.000000 3:60.000000 4:24.000000 5:0.000000 6:26.500000 7:0.296000 8:29.000000 278 | +1 1:0.000000 2:104.000000 3:64.000000 4:23.000000 5:116.000000 6:27.799999 7:0.454000 8:23.000000 279 | +1 1:5.000000 2:114.000000 3:74.000000 4:0.000000 5:0.000000 6:24.900000 7:0.744000 8:57.000000 280 | +1 1:2.000000 2:108.000000 3:62.000000 4:10.000000 5:278.000000 6:25.299999 7:0.881000 8:22.000000 281 | -1 1:0.000000 2:146.000000 3:70.000000 4:0.000000 5:0.000000 6:37.900002 7:0.334000 8:28.000000 282 | +1 1:10.000000 2:129.000000 3:76.000000 4:28.000000 5:122.000000 6:35.900002 7:0.280000 8:39.000000 283 | +1 1:7.000000 2:133.000000 3:88.000000 4:15.000000 5:155.000000 6:32.400002 7:0.262000 8:37.000000 284 | -1 1:7.000000 2:161.000000 3:86.000000 4:0.000000 5:0.000000 6:30.400000 7:0.165000 8:47.000000 285 | -1 1:2.000000 2:108.000000 3:80.000000 4:0.000000 5:0.000000 6:27.000000 7:0.259000 8:52.000000 286 | +1 1:7.000000 2:136.000000 3:74.000000 4:26.000000 5:135.000000 6:26.000000 7:0.647000 8:51.000000 287 | +1 1:5.000000 2:155.000000 3:84.000000 4:44.000000 5:545.000000 6:38.700001 7:0.619000 8:34.000000 288 | -1 1:1.000000 2:119.000000 3:86.000000 4:39.000000 5:220.000000 6:45.599998 7:0.808000 8:29.000000 289 | +1 1:4.000000 2:96.000000 3:56.000000 4:17.000000 5:49.000000 6:20.799999 7:0.340000 8:26.000000 290 | +1 1:5.000000 2:108.000000 3:72.000000 4:43.000000 5:75.000000 6:36.099998 7:0.263000 8:33.000000 291 | +1 1:0.000000 2:78.000000 3:88.000000 4:29.000000 5:40.000000 6:36.900002 7:0.434000 8:21.000000 292 | -1 1:0.000000 2:107.000000 3:62.000000 4:30.000000 5:74.000000 6:36.599998 7:0.757000 8:25.000000 293 | -1 1:2.000000 2:128.000000 3:78.000000 4:37.000000 5:182.000000 6:43.299999 7:1.224000 8:31.000000 294 | -1 1:1.000000 2:128.000000 3:48.000000 4:45.000000 5:194.000000 6:40.500000 7:0.613000 8:24.000000 295 | +1 1:0.000000 2:161.000000 3:50.000000 4:0.000000 5:0.000000 6:21.900000 7:0.254000 8:65.000000 296 | +1 1:6.000000 2:151.000000 3:62.000000 4:31.000000 5:120.000000 6:35.500000 7:0.692000 8:28.000000 297 | -1 1:2.000000 2:146.000000 3:70.000000 4:38.000000 5:360.000000 6:28.000000 7:0.337000 8:29.000000 298 | +1 1:0.000000 2:126.000000 3:84.000000 4:29.000000 5:215.000000 6:30.700001 7:0.520000 8:24.000000 299 | -1 1:14.000000 2:100.000000 3:78.000000 4:25.000000 5:184.000000 6:36.599998 7:0.412000 8:46.000000 300 | +1 1:8.000000 2:112.000000 3:72.000000 4:0.000000 5:0.000000 6:23.600000 7:0.840000 8:58.000000 301 | -1 1:0.000000 2:167.000000 3:0.000000 4:0.000000 5:0.000000 6:32.299999 7:0.839000 8:30.000000 302 | -1 1:2.000000 2:144.000000 3:58.000000 4:33.000000 5:135.000000 6:31.600000 7:0.422000 8:25.000000 303 | +1 1:5.000000 2:77.000000 3:82.000000 4:41.000000 5:42.000000 6:35.799999 7:0.156000 8:35.000000 304 | -1 1:5.000000 2:115.000000 3:98.000000 4:0.000000 5:0.000000 6:52.900002 7:0.209000 8:28.000000 305 | +1 1:3.000000 2:150.000000 3:76.000000 4:0.000000 5:0.000000 6:21.000000 7:0.207000 8:37.000000 306 | +1 1:2.000000 2:120.000000 3:76.000000 4:37.000000 5:105.000000 6:39.700001 7:0.215000 8:29.000000 307 | -1 1:10.000000 2:161.000000 3:68.000000 4:23.000000 5:132.000000 6:25.500000 7:0.326000 8:47.000000 308 | +1 1:0.000000 2:137.000000 3:68.000000 4:14.000000 5:148.000000 6:24.799999 7:0.143000 8:21.000000 309 | -1 1:0.000000 2:128.000000 3:68.000000 4:19.000000 5:180.000000 6:30.500000 7:1.391000 8:25.000000 310 | -1 1:2.000000 2:124.000000 3:68.000000 4:28.000000 5:205.000000 6:32.900002 7:0.875000 8:30.000000 311 | +1 1:6.000000 2:80.000000 3:66.000000 4:30.000000 5:0.000000 6:26.200001 7:0.313000 8:41.000000 312 | +1 1:0.000000 2:106.000000 3:70.000000 4:37.000000 5:148.000000 6:39.400002 7:0.605000 8:22.000000 313 | -1 1:2.000000 2:155.000000 3:74.000000 4:17.000000 5:96.000000 6:26.600000 7:0.433000 8:27.000000 314 | +1 1:3.000000 2:113.000000 3:50.000000 4:10.000000 5:85.000000 6:29.500000 7:0.626000 8:25.000000 315 | -1 1:7.000000 2:109.000000 3:80.000000 4:31.000000 5:0.000000 6:35.900002 7:1.127000 8:43.000000 316 | +1 1:2.000000 2:112.000000 3:68.000000 4:22.000000 5:94.000000 6:34.099998 7:0.315000 8:26.000000 317 | +1 1:3.000000 2:99.000000 3:80.000000 4:11.000000 5:64.000000 6:19.299999 7:0.284000 8:30.000000 318 | -1 1:3.000000 2:182.000000 3:74.000000 4:0.000000 5:0.000000 6:30.500000 7:0.345000 8:29.000000 319 | +1 1:3.000000 2:115.000000 3:66.000000 4:39.000000 5:140.000000 6:38.099998 7:0.150000 8:28.000000 320 | -1 1:6.000000 2:194.000000 3:78.000000 4:0.000000 5:0.000000 6:23.500000 7:0.129000 8:59.000000 321 | +1 1:4.000000 2:129.000000 3:60.000000 4:12.000000 5:231.000000 6:27.500000 7:0.527000 8:31.000000 322 | -1 1:3.000000 2:112.000000 3:74.000000 4:30.000000 5:0.000000 6:31.600000 7:0.197000 8:25.000000 323 | -1 1:0.000000 2:124.000000 3:70.000000 4:20.000000 5:0.000000 6:27.400000 7:0.254000 8:36.000000 324 | -1 1:13.000000 2:152.000000 3:90.000000 4:33.000000 5:29.000000 6:26.799999 7:0.731000 8:43.000000 325 | +1 1:2.000000 2:112.000000 3:75.000000 4:32.000000 5:0.000000 6:35.700001 7:0.148000 8:21.000000 326 | +1 1:1.000000 2:157.000000 3:72.000000 4:21.000000 5:168.000000 6:25.600000 7:0.123000 8:24.000000 327 | -1 1:1.000000 2:122.000000 3:64.000000 4:32.000000 5:156.000000 6:35.099998 7:0.692000 8:30.000000 328 | +1 1:10.000000 2:179.000000 3:70.000000 4:0.000000 5:0.000000 6:35.099998 7:0.200000 8:37.000000 329 | -1 1:2.000000 2:102.000000 3:86.000000 4:36.000000 5:120.000000 6:45.500000 7:0.127000 8:23.000000 330 | +1 1:6.000000 2:105.000000 3:70.000000 4:32.000000 5:68.000000 6:30.799999 7:0.122000 8:37.000000 331 | +1 1:8.000000 2:118.000000 3:72.000000 4:19.000000 5:0.000000 6:23.100000 7:1.476000 8:46.000000 332 | +1 1:2.000000 2:87.000000 3:58.000000 4:16.000000 5:52.000000 6:32.700001 7:0.166000 8:25.000000 333 | -1 1:1.000000 2:180.000000 3:0.000000 4:0.000000 5:0.000000 6:43.299999 7:0.282000 8:41.000000 334 | +1 1:12.000000 2:106.000000 3:80.000000 4:0.000000 5:0.000000 6:23.600000 7:0.137000 8:44.000000 335 | +1 1:1.000000 2:95.000000 3:60.000000 4:18.000000 5:58.000000 6:23.900000 7:0.260000 8:22.000000 336 | +1 1:0.000000 2:165.000000 3:76.000000 4:43.000000 5:255.000000 6:47.900002 7:0.259000 8:26.000000 337 | +1 1:0.000000 2:117.000000 3:0.000000 4:0.000000 5:0.000000 6:33.799999 7:0.932000 8:44.000000 338 | -1 1:5.000000 2:115.000000 3:76.000000 4:0.000000 5:0.000000 6:31.200001 7:0.343000 8:44.000000 339 | -1 1:9.000000 2:152.000000 3:78.000000 4:34.000000 5:171.000000 6:34.200001 7:0.893000 8:33.000000 340 | -1 1:7.000000 2:178.000000 3:84.000000 4:0.000000 5:0.000000 6:39.900002 7:0.331000 8:41.000000 341 | +1 1:1.000000 2:130.000000 3:70.000000 4:13.000000 5:105.000000 6:25.900000 7:0.472000 8:22.000000 342 | +1 1:1.000000 2:95.000000 3:74.000000 4:21.000000 5:73.000000 6:25.900000 7:0.673000 8:36.000000 343 | +1 1:1.000000 2:0.000000 3:68.000000 4:35.000000 5:0.000000 6:32.000000 7:0.389000 8:22.000000 344 | +1 1:5.000000 2:122.000000 3:86.000000 4:0.000000 5:0.000000 6:34.700001 7:0.290000 8:33.000000 345 | +1 1:8.000000 2:95.000000 3:72.000000 4:0.000000 5:0.000000 6:36.799999 7:0.485000 8:57.000000 346 | +1 1:8.000000 2:126.000000 3:88.000000 4:36.000000 5:108.000000 6:38.500000 7:0.349000 8:49.000000 347 | +1 1:1.000000 2:139.000000 3:46.000000 4:19.000000 5:83.000000 6:28.700001 7:0.654000 8:22.000000 348 | +1 1:3.000000 2:116.000000 3:0.000000 4:0.000000 5:0.000000 6:23.500000 7:0.187000 8:23.000000 349 | +1 1:3.000000 2:99.000000 3:62.000000 4:19.000000 5:74.000000 6:21.799999 7:0.279000 8:26.000000 350 | -1 1:5.000000 2:0.000000 3:80.000000 4:32.000000 5:0.000000 6:41.000000 7:0.346000 8:37.000000 351 | +1 1:4.000000 2:92.000000 3:80.000000 4:0.000000 5:0.000000 6:42.200001 7:0.237000 8:29.000000 352 | +1 1:4.000000 2:137.000000 3:84.000000 4:0.000000 5:0.000000 6:31.200001 7:0.252000 8:30.000000 353 | +1 1:3.000000 2:61.000000 3:82.000000 4:28.000000 5:0.000000 6:34.400002 7:0.243000 8:46.000000 354 | +1 1:1.000000 2:90.000000 3:62.000000 4:12.000000 5:43.000000 6:27.200001 7:0.580000 8:24.000000 355 | +1 1:3.000000 2:90.000000 3:78.000000 4:0.000000 5:0.000000 6:42.700001 7:0.559000 8:21.000000 356 | -1 1:9.000000 2:165.000000 3:88.000000 4:0.000000 5:0.000000 6:30.400000 7:0.302000 8:49.000000 357 | -1 1:1.000000 2:125.000000 3:50.000000 4:40.000000 5:167.000000 6:33.299999 7:0.962000 8:28.000000 358 | -1 1:13.000000 2:129.000000 3:0.000000 4:30.000000 5:0.000000 6:39.900002 7:0.569000 8:44.000000 359 | +1 1:12.000000 2:88.000000 3:74.000000 4:40.000000 5:54.000000 6:35.299999 7:0.378000 8:48.000000 360 | -1 1:1.000000 2:196.000000 3:76.000000 4:36.000000 5:249.000000 6:36.500000 7:0.875000 8:29.000000 361 | -1 1:5.000000 2:189.000000 3:64.000000 4:33.000000 5:325.000000 6:31.200001 7:0.583000 8:29.000000 362 | +1 1:5.000000 2:158.000000 3:70.000000 4:0.000000 5:0.000000 6:29.799999 7:0.207000 8:63.000000 363 | +1 1:5.000000 2:103.000000 3:108.000000 4:37.000000 5:0.000000 6:39.200001 7:0.305000 8:65.000000 364 | -1 1:4.000000 2:146.000000 3:78.000000 4:0.000000 5:0.000000 6:38.500000 7:0.520000 8:67.000000 365 | +1 1:4.000000 2:147.000000 3:74.000000 4:25.000000 5:293.000000 6:34.900002 7:0.385000 8:30.000000 366 | +1 1:5.000000 2:99.000000 3:54.000000 4:28.000000 5:83.000000 6:34.000000 7:0.499000 8:30.000000 367 | -1 1:6.000000 2:124.000000 3:72.000000 4:0.000000 5:0.000000 6:27.600000 7:0.368000 8:29.000000 368 | +1 1:0.000000 2:101.000000 3:64.000000 4:17.000000 5:0.000000 6:21.000000 7:0.252000 8:21.000000 369 | +1 1:3.000000 2:81.000000 3:86.000000 4:16.000000 5:66.000000 6:27.500000 7:0.306000 8:22.000000 370 | -1 1:1.000000 2:133.000000 3:102.000000 4:28.000000 5:140.000000 6:32.799999 7:0.234000 8:45.000000 371 | -1 1:3.000000 2:173.000000 3:82.000000 4:48.000000 5:465.000000 6:38.400002 7:2.137000 8:25.000000 372 | +1 1:0.000000 2:118.000000 3:64.000000 4:23.000000 5:89.000000 6:0.000000 7:1.731000 8:21.000000 373 | +1 1:0.000000 2:84.000000 3:64.000000 4:22.000000 5:66.000000 6:35.799999 7:0.545000 8:21.000000 374 | +1 1:2.000000 2:105.000000 3:58.000000 4:40.000000 5:94.000000 6:34.900002 7:0.225000 8:25.000000 375 | +1 1:2.000000 2:122.000000 3:52.000000 4:43.000000 5:158.000000 6:36.200001 7:0.816000 8:28.000000 376 | -1 1:12.000000 2:140.000000 3:82.000000 4:43.000000 5:325.000000 6:39.200001 7:0.528000 8:58.000000 377 | +1 1:0.000000 2:98.000000 3:82.000000 4:15.000000 5:84.000000 6:25.200001 7:0.299000 8:22.000000 378 | +1 1:1.000000 2:87.000000 3:60.000000 4:37.000000 5:75.000000 6:37.200001 7:0.509000 8:22.000000 379 | -1 1:4.000000 2:156.000000 3:75.000000 4:0.000000 5:0.000000 6:48.299999 7:0.238000 8:32.000000 380 | +1 1:0.000000 2:93.000000 3:100.000000 4:39.000000 5:72.000000 6:43.400002 7:1.021000 8:35.000000 381 | +1 1:1.000000 2:107.000000 3:72.000000 4:30.000000 5:82.000000 6:30.799999 7:0.821000 8:24.000000 382 | +1 1:0.000000 2:105.000000 3:68.000000 4:22.000000 5:0.000000 6:20.000000 7:0.236000 8:22.000000 383 | +1 1:1.000000 2:109.000000 3:60.000000 4:8.000000 5:182.000000 6:25.400000 7:0.947000 8:21.000000 384 | +1 1:1.000000 2:90.000000 3:62.000000 4:18.000000 5:59.000000 6:25.100000 7:1.268000 8:25.000000 385 | +1 1:1.000000 2:125.000000 3:70.000000 4:24.000000 5:110.000000 6:24.299999 7:0.221000 8:25.000000 386 | +1 1:1.000000 2:119.000000 3:54.000000 4:13.000000 5:50.000000 6:22.299999 7:0.205000 8:24.000000 387 | -1 1:5.000000 2:116.000000 3:74.000000 4:29.000000 5:0.000000 6:32.299999 7:0.660000 8:35.000000 388 | -1 1:8.000000 2:105.000000 3:100.000000 4:36.000000 5:0.000000 6:43.299999 7:0.239000 8:45.000000 389 | -1 1:5.000000 2:144.000000 3:82.000000 4:26.000000 5:285.000000 6:32.000000 7:0.452000 8:58.000000 390 | +1 1:3.000000 2:100.000000 3:68.000000 4:23.000000 5:81.000000 6:31.600000 7:0.949000 8:28.000000 391 | +1 1:1.000000 2:100.000000 3:66.000000 4:29.000000 5:196.000000 6:32.000000 7:0.444000 8:42.000000 392 | -1 1:5.000000 2:166.000000 3:76.000000 4:0.000000 5:0.000000 6:45.700001 7:0.340000 8:27.000000 393 | +1 1:1.000000 2:131.000000 3:64.000000 4:14.000000 5:415.000000 6:23.700001 7:0.389000 8:21.000000 394 | +1 1:4.000000 2:116.000000 3:72.000000 4:12.000000 5:87.000000 6:22.100000 7:0.463000 8:37.000000 395 | -1 1:4.000000 2:158.000000 3:78.000000 4:0.000000 5:0.000000 6:32.900002 7:0.803000 8:31.000000 396 | +1 1:2.000000 2:127.000000 3:58.000000 4:24.000000 5:275.000000 6:27.700001 7:1.600000 8:25.000000 397 | +1 1:3.000000 2:96.000000 3:56.000000 4:34.000000 5:115.000000 6:24.700001 7:0.944000 8:39.000000 398 | -1 1:0.000000 2:131.000000 3:66.000000 4:40.000000 5:0.000000 6:34.299999 7:0.196000 8:22.000000 399 | +1 1:3.000000 2:82.000000 3:70.000000 4:0.000000 5:0.000000 6:21.100000 7:0.389000 8:25.000000 400 | -1 1:3.000000 2:193.000000 3:70.000000 4:31.000000 5:0.000000 6:34.900002 7:0.241000 8:25.000000 401 | -1 1:4.000000 2:95.000000 3:64.000000 4:0.000000 5:0.000000 6:32.000000 7:0.161000 8:31.000000 402 | +1 1:6.000000 2:137.000000 3:61.000000 4:0.000000 5:0.000000 6:24.200001 7:0.151000 8:55.000000 403 | -1 1:5.000000 2:136.000000 3:84.000000 4:41.000000 5:88.000000 6:35.000000 7:0.286000 8:35.000000 404 | +1 1:9.000000 2:72.000000 3:78.000000 4:25.000000 5:0.000000 6:31.600000 7:0.280000 8:38.000000 405 | -1 1:5.000000 2:168.000000 3:64.000000 4:0.000000 5:0.000000 6:32.900002 7:0.135000 8:41.000000 406 | +1 1:2.000000 2:123.000000 3:48.000000 4:32.000000 5:165.000000 6:42.099998 7:0.520000 8:26.000000 407 | -1 1:4.000000 2:115.000000 3:72.000000 4:0.000000 5:0.000000 6:28.900000 7:0.376000 8:46.000000 408 | +1 1:0.000000 2:101.000000 3:62.000000 4:0.000000 5:0.000000 6:21.900000 7:0.336000 8:25.000000 409 | -1 1:8.000000 2:197.000000 3:74.000000 4:0.000000 5:0.000000 6:25.900000 7:1.191000 8:39.000000 410 | -1 1:1.000000 2:172.000000 3:68.000000 4:49.000000 5:579.000000 6:42.400002 7:0.702000 8:28.000000 411 | +1 1:6.000000 2:102.000000 3:90.000000 4:39.000000 5:0.000000 6:35.700001 7:0.674000 8:28.000000 412 | +1 1:1.000000 2:112.000000 3:72.000000 4:30.000000 5:176.000000 6:34.400002 7:0.528000 8:25.000000 413 | +1 1:1.000000 2:143.000000 3:84.000000 4:23.000000 5:310.000000 6:42.400002 7:1.076000 8:22.000000 414 | +1 1:1.000000 2:143.000000 3:74.000000 4:22.000000 5:61.000000 6:26.200001 7:0.256000 8:21.000000 415 | -1 1:0.000000 2:138.000000 3:60.000000 4:35.000000 5:167.000000 6:34.599998 7:0.534000 8:21.000000 416 | -1 1:3.000000 2:173.000000 3:84.000000 4:33.000000 5:474.000000 6:35.700001 7:0.258000 8:22.000000 417 | +1 1:1.000000 2:97.000000 3:68.000000 4:21.000000 5:0.000000 6:27.200001 7:1.095000 8:22.000000 418 | -1 1:4.000000 2:144.000000 3:82.000000 4:32.000000 5:0.000000 6:38.500000 7:0.554000 8:37.000000 419 | +1 1:1.000000 2:83.000000 3:68.000000 4:0.000000 5:0.000000 6:18.200001 7:0.624000 8:27.000000 420 | -1 1:3.000000 2:129.000000 3:64.000000 4:29.000000 5:115.000000 6:26.400000 7:0.219000 8:28.000000 421 | +1 1:1.000000 2:119.000000 3:88.000000 4:41.000000 5:170.000000 6:45.299999 7:0.507000 8:26.000000 422 | +1 1:2.000000 2:94.000000 3:68.000000 4:18.000000 5:76.000000 6:26.000000 7:0.561000 8:21.000000 423 | +1 1:0.000000 2:102.000000 3:64.000000 4:46.000000 5:78.000000 6:40.599998 7:0.496000 8:21.000000 424 | +1 1:2.000000 2:115.000000 3:64.000000 4:22.000000 5:0.000000 6:30.799999 7:0.421000 8:21.000000 425 | -1 1:8.000000 2:151.000000 3:78.000000 4:32.000000 5:210.000000 6:42.900002 7:0.516000 8:36.000000 426 | -1 1:4.000000 2:184.000000 3:78.000000 4:39.000000 5:277.000000 6:37.000000 7:0.264000 8:31.000000 427 | +1 1:0.000000 2:94.000000 3:0.000000 4:0.000000 5:0.000000 6:0.000000 7:0.256000 8:25.000000 428 | -1 1:1.000000 2:181.000000 3:64.000000 4:30.000000 5:180.000000 6:34.099998 7:0.328000 8:38.000000 429 | +1 1:0.000000 2:135.000000 3:94.000000 4:46.000000 5:145.000000 6:40.599998 7:0.284000 8:26.000000 430 | -1 1:1.000000 2:95.000000 3:82.000000 4:25.000000 5:180.000000 6:35.000000 7:0.233000 8:43.000000 431 | +1 1:2.000000 2:99.000000 3:0.000000 4:0.000000 5:0.000000 6:22.200001 7:0.108000 8:23.000000 432 | +1 1:3.000000 2:89.000000 3:74.000000 4:16.000000 5:85.000000 6:30.400000 7:0.551000 8:38.000000 433 | +1 1:1.000000 2:80.000000 3:74.000000 4:11.000000 5:60.000000 6:30.000000 7:0.527000 8:22.000000 434 | +1 1:2.000000 2:139.000000 3:75.000000 4:0.000000 5:0.000000 6:25.600000 7:0.167000 8:29.000000 435 | +1 1:1.000000 2:90.000000 3:68.000000 4:8.000000 5:0.000000 6:24.500000 7:1.138000 8:36.000000 436 | -1 1:0.000000 2:141.000000 3:0.000000 4:0.000000 5:0.000000 6:42.400002 7:0.205000 8:29.000000 437 | +1 1:12.000000 2:140.000000 3:85.000000 4:33.000000 5:0.000000 6:37.400002 7:0.244000 8:41.000000 438 | +1 1:5.000000 2:147.000000 3:75.000000 4:0.000000 5:0.000000 6:29.900000 7:0.434000 8:28.000000 439 | +1 1:1.000000 2:97.000000 3:70.000000 4:15.000000 5:0.000000 6:18.200001 7:0.147000 8:21.000000 440 | +1 1:6.000000 2:107.000000 3:88.000000 4:0.000000 5:0.000000 6:36.799999 7:0.727000 8:31.000000 441 | -1 1:0.000000 2:189.000000 3:104.000000 4:25.000000 5:0.000000 6:34.299999 7:0.435000 8:41.000000 442 | +1 1:2.000000 2:83.000000 3:66.000000 4:23.000000 5:50.000000 6:32.200001 7:0.497000 8:22.000000 443 | +1 1:4.000000 2:117.000000 3:64.000000 4:27.000000 5:120.000000 6:33.200001 7:0.230000 8:24.000000 444 | -1 1:8.000000 2:108.000000 3:70.000000 4:0.000000 5:0.000000 6:30.500000 7:0.955000 8:33.000000 445 | -1 1:4.000000 2:117.000000 3:62.000000 4:12.000000 5:0.000000 6:29.700001 7:0.380000 8:30.000000 446 | -1 1:0.000000 2:180.000000 3:78.000000 4:63.000000 5:14.000000 6:59.400002 7:2.420000 8:25.000000 447 | +1 1:1.000000 2:100.000000 3:72.000000 4:12.000000 5:70.000000 6:25.299999 7:0.658000 8:28.000000 448 | +1 1:0.000000 2:95.000000 3:80.000000 4:45.000000 5:92.000000 6:36.500000 7:0.330000 8:26.000000 449 | -1 1:0.000000 2:104.000000 3:64.000000 4:37.000000 5:64.000000 6:33.599998 7:0.510000 8:22.000000 450 | +1 1:0.000000 2:120.000000 3:74.000000 4:18.000000 5:63.000000 6:30.500000 7:0.285000 8:26.000000 451 | +1 1:1.000000 2:82.000000 3:64.000000 4:13.000000 5:95.000000 6:21.200001 7:0.415000 8:23.000000 452 | -1 1:2.000000 2:134.000000 3:70.000000 4:0.000000 5:0.000000 6:28.900000 7:0.542000 8:23.000000 453 | +1 1:0.000000 2:91.000000 3:68.000000 4:32.000000 5:210.000000 6:39.900002 7:0.381000 8:25.000000 454 | +1 1:2.000000 2:119.000000 3:0.000000 4:0.000000 5:0.000000 6:19.600000 7:0.832000 8:72.000000 455 | +1 1:2.000000 2:100.000000 3:54.000000 4:28.000000 5:105.000000 6:37.799999 7:0.498000 8:24.000000 456 | -1 1:14.000000 2:175.000000 3:62.000000 4:30.000000 5:0.000000 6:33.599998 7:0.212000 8:38.000000 457 | +1 1:1.000000 2:135.000000 3:54.000000 4:0.000000 5:0.000000 6:26.700001 7:0.687000 8:62.000000 458 | +1 1:5.000000 2:86.000000 3:68.000000 4:28.000000 5:71.000000 6:30.200001 7:0.364000 8:24.000000 459 | -1 1:10.000000 2:148.000000 3:84.000000 4:48.000000 5:237.000000 6:37.599998 7:1.001000 8:51.000000 460 | +1 1:9.000000 2:134.000000 3:74.000000 4:33.000000 5:60.000000 6:25.900000 7:0.460000 8:81.000000 461 | +1 1:9.000000 2:120.000000 3:72.000000 4:22.000000 5:56.000000 6:20.799999 7:0.733000 8:48.000000 462 | +1 1:1.000000 2:71.000000 3:62.000000 4:0.000000 5:0.000000 6:21.799999 7:0.416000 8:26.000000 463 | +1 1:8.000000 2:74.000000 3:70.000000 4:40.000000 5:49.000000 6:35.299999 7:0.705000 8:39.000000 464 | +1 1:5.000000 2:88.000000 3:78.000000 4:30.000000 5:0.000000 6:27.600000 7:0.258000 8:37.000000 465 | +1 1:10.000000 2:115.000000 3:98.000000 4:0.000000 5:0.000000 6:24.000000 7:1.022000 8:34.000000 466 | +1 1:0.000000 2:124.000000 3:56.000000 4:13.000000 5:105.000000 6:21.799999 7:0.452000 8:21.000000 467 | +1 1:0.000000 2:74.000000 3:52.000000 4:10.000000 5:36.000000 6:27.799999 7:0.269000 8:22.000000 468 | +1 1:0.000000 2:97.000000 3:64.000000 4:36.000000 5:100.000000 6:36.799999 7:0.600000 8:25.000000 469 | -1 1:8.000000 2:120.000000 3:0.000000 4:0.000000 5:0.000000 6:30.000000 7:0.183000 8:38.000000 470 | +1 1:6.000000 2:154.000000 3:78.000000 4:41.000000 5:140.000000 6:46.099998 7:0.571000 8:27.000000 471 | +1 1:1.000000 2:144.000000 3:82.000000 4:40.000000 5:0.000000 6:41.299999 7:0.607000 8:28.000000 472 | +1 1:0.000000 2:137.000000 3:70.000000 4:38.000000 5:0.000000 6:33.200001 7:0.170000 8:22.000000 473 | +1 1:0.000000 2:119.000000 3:66.000000 4:27.000000 5:0.000000 6:38.799999 7:0.259000 8:22.000000 474 | +1 1:7.000000 2:136.000000 3:90.000000 4:0.000000 5:0.000000 6:29.900000 7:0.210000 8:50.000000 475 | +1 1:4.000000 2:114.000000 3:64.000000 4:0.000000 5:0.000000 6:28.900000 7:0.126000 8:24.000000 476 | +1 1:0.000000 2:137.000000 3:84.000000 4:27.000000 5:0.000000 6:27.299999 7:0.231000 8:59.000000 477 | -1 1:2.000000 2:105.000000 3:80.000000 4:45.000000 5:191.000000 6:33.700001 7:0.711000 8:29.000000 478 | +1 1:7.000000 2:114.000000 3:76.000000 4:17.000000 5:110.000000 6:23.799999 7:0.466000 8:31.000000 479 | +1 1:8.000000 2:126.000000 3:74.000000 4:38.000000 5:75.000000 6:25.900000 7:0.162000 8:39.000000 480 | +1 1:4.000000 2:132.000000 3:86.000000 4:31.000000 5:0.000000 6:28.000000 7:0.419000 8:63.000000 481 | -1 1:3.000000 2:158.000000 3:70.000000 4:30.000000 5:328.000000 6:35.500000 7:0.344000 8:35.000000 482 | +1 1:0.000000 2:123.000000 3:88.000000 4:37.000000 5:0.000000 6:35.200001 7:0.197000 8:29.000000 483 | +1 1:4.000000 2:85.000000 3:58.000000 4:22.000000 5:49.000000 6:27.799999 7:0.306000 8:28.000000 484 | +1 1:0.000000 2:84.000000 3:82.000000 4:31.000000 5:125.000000 6:38.200001 7:0.233000 8:23.000000 485 | -1 1:0.000000 2:145.000000 3:0.000000 4:0.000000 5:0.000000 6:44.200001 7:0.630000 8:31.000000 486 | -1 1:0.000000 2:135.000000 3:68.000000 4:42.000000 5:250.000000 6:42.299999 7:0.365000 8:24.000000 487 | +1 1:1.000000 2:139.000000 3:62.000000 4:41.000000 5:480.000000 6:40.700001 7:0.536000 8:21.000000 488 | +1 1:0.000000 2:173.000000 3:78.000000 4:32.000000 5:265.000000 6:46.500000 7:1.159000 8:58.000000 489 | +1 1:4.000000 2:99.000000 3:72.000000 4:17.000000 5:0.000000 6:25.600000 7:0.294000 8:28.000000 490 | +1 1:8.000000 2:194.000000 3:80.000000 4:0.000000 5:0.000000 6:26.100000 7:0.551000 8:67.000000 491 | +1 1:2.000000 2:83.000000 3:65.000000 4:28.000000 5:66.000000 6:36.799999 7:0.629000 8:24.000000 492 | +1 1:2.000000 2:89.000000 3:90.000000 4:30.000000 5:0.000000 6:33.500000 7:0.292000 8:42.000000 493 | +1 1:4.000000 2:99.000000 3:68.000000 4:38.000000 5:0.000000 6:32.799999 7:0.145000 8:33.000000 494 | -1 1:4.000000 2:125.000000 3:70.000000 4:18.000000 5:122.000000 6:28.900000 7:1.144000 8:45.000000 495 | +1 1:3.000000 2:80.000000 3:0.000000 4:0.000000 5:0.000000 6:0.000000 7:0.174000 8:22.000000 496 | +1 1:6.000000 2:166.000000 3:74.000000 4:0.000000 5:0.000000 6:26.600000 7:0.304000 8:66.000000 497 | +1 1:5.000000 2:110.000000 3:68.000000 4:0.000000 5:0.000000 6:26.000000 7:0.292000 8:30.000000 498 | +1 1:2.000000 2:81.000000 3:72.000000 4:15.000000 5:76.000000 6:30.100000 7:0.547000 8:25.000000 499 | -1 1:7.000000 2:195.000000 3:70.000000 4:33.000000 5:145.000000 6:25.100000 7:0.163000 8:55.000000 500 | +1 1:6.000000 2:154.000000 3:74.000000 4:32.000000 5:193.000000 6:29.299999 7:0.839000 8:39.000000 501 | +1 1:2.000000 2:117.000000 3:90.000000 4:19.000000 5:71.000000 6:25.200001 7:0.313000 8:21.000000 502 | +1 1:3.000000 2:84.000000 3:72.000000 4:32.000000 5:0.000000 6:37.200001 7:0.267000 8:28.000000 503 | -1 1:6.000000 2:0.000000 3:68.000000 4:41.000000 5:0.000000 6:39.000000 7:0.727000 8:41.000000 504 | +1 1:7.000000 2:94.000000 3:64.000000 4:25.000000 5:79.000000 6:33.299999 7:0.738000 8:41.000000 505 | +1 1:3.000000 2:96.000000 3:78.000000 4:39.000000 5:0.000000 6:37.299999 7:0.238000 8:40.000000 506 | +1 1:10.000000 2:75.000000 3:82.000000 4:0.000000 5:0.000000 6:33.299999 7:0.263000 8:38.000000 507 | -1 1:0.000000 2:180.000000 3:90.000000 4:26.000000 5:90.000000 6:36.500000 7:0.314000 8:35.000000 508 | +1 1:1.000000 2:130.000000 3:60.000000 4:23.000000 5:170.000000 6:28.600000 7:0.692000 8:21.000000 509 | +1 1:2.000000 2:84.000000 3:50.000000 4:23.000000 5:76.000000 6:30.400000 7:0.968000 8:21.000000 510 | +1 1:8.000000 2:120.000000 3:78.000000 4:0.000000 5:0.000000 6:25.000000 7:0.409000 8:64.000000 511 | -1 1:12.000000 2:84.000000 3:72.000000 4:31.000000 5:0.000000 6:29.700001 7:0.297000 8:46.000000 512 | +1 1:0.000000 2:139.000000 3:62.000000 4:17.000000 5:210.000000 6:22.100000 7:0.207000 8:21.000000 513 | +1 1:9.000000 2:91.000000 3:68.000000 4:0.000000 5:0.000000 6:24.200001 7:0.200000 8:58.000000 514 | +1 1:2.000000 2:91.000000 3:62.000000 4:0.000000 5:0.000000 6:27.299999 7:0.525000 8:22.000000 515 | +1 1:3.000000 2:99.000000 3:54.000000 4:19.000000 5:86.000000 6:25.600000 7:0.154000 8:24.000000 516 | -1 1:3.000000 2:163.000000 3:70.000000 4:18.000000 5:105.000000 6:31.600000 7:0.268000 8:28.000000 517 | -1 1:9.000000 2:145.000000 3:88.000000 4:34.000000 5:165.000000 6:30.299999 7:0.771000 8:53.000000 518 | +1 1:7.000000 2:125.000000 3:86.000000 4:0.000000 5:0.000000 6:37.599998 7:0.304000 8:51.000000 519 | +1 1:13.000000 2:76.000000 3:60.000000 4:0.000000 5:0.000000 6:32.799999 7:0.180000 8:41.000000 520 | +1 1:6.000000 2:129.000000 3:90.000000 4:7.000000 5:326.000000 6:19.600000 7:0.582000 8:60.000000 521 | +1 1:2.000000 2:68.000000 3:70.000000 4:32.000000 5:66.000000 6:25.000000 7:0.187000 8:25.000000 522 | +1 1:3.000000 2:124.000000 3:80.000000 4:33.000000 5:130.000000 6:33.200001 7:0.305000 8:26.000000 523 | +1 1:6.000000 2:114.000000 3:0.000000 4:0.000000 5:0.000000 6:0.000000 7:0.189000 8:26.000000 524 | -1 1:9.000000 2:130.000000 3:70.000000 4:0.000000 5:0.000000 6:34.200001 7:0.652000 8:45.000000 525 | +1 1:3.000000 2:125.000000 3:58.000000 4:0.000000 5:0.000000 6:31.600000 7:0.151000 8:24.000000 526 | +1 1:3.000000 2:87.000000 3:60.000000 4:18.000000 5:0.000000 6:21.799999 7:0.444000 8:21.000000 527 | +1 1:1.000000 2:97.000000 3:64.000000 4:19.000000 5:82.000000 6:18.200001 7:0.299000 8:21.000000 528 | +1 1:3.000000 2:116.000000 3:74.000000 4:15.000000 5:105.000000 6:26.299999 7:0.107000 8:24.000000 529 | +1 1:0.000000 2:117.000000 3:66.000000 4:31.000000 5:188.000000 6:30.799999 7:0.493000 8:22.000000 530 | +1 1:0.000000 2:111.000000 3:65.000000 4:0.000000 5:0.000000 6:24.600000 7:0.660000 8:31.000000 531 | +1 1:2.000000 2:122.000000 3:60.000000 4:18.000000 5:106.000000 6:29.799999 7:0.717000 8:22.000000 532 | +1 1:0.000000 2:107.000000 3:76.000000 4:0.000000 5:0.000000 6:45.299999 7:0.686000 8:24.000000 533 | +1 1:1.000000 2:86.000000 3:66.000000 4:52.000000 5:65.000000 6:41.299999 7:0.917000 8:29.000000 534 | +1 1:6.000000 2:91.000000 3:0.000000 4:0.000000 5:0.000000 6:29.799999 7:0.501000 8:31.000000 535 | +1 1:1.000000 2:77.000000 3:56.000000 4:30.000000 5:56.000000 6:33.299999 7:1.251000 8:24.000000 536 | -1 1:4.000000 2:132.000000 3:0.000000 4:0.000000 5:0.000000 6:32.900002 7:0.302000 8:23.000000 537 | +1 1:0.000000 2:105.000000 3:90.000000 4:0.000000 5:0.000000 6:29.600000 7:0.197000 8:46.000000 538 | +1 1:0.000000 2:57.000000 3:60.000000 4:0.000000 5:0.000000 6:21.700001 7:0.735000 8:67.000000 539 | +1 1:0.000000 2:127.000000 3:80.000000 4:37.000000 5:210.000000 6:36.299999 7:0.804000 8:23.000000 540 | -1 1:3.000000 2:129.000000 3:92.000000 4:49.000000 5:155.000000 6:36.400002 7:0.968000 8:32.000000 541 | -1 1:8.000000 2:100.000000 3:74.000000 4:40.000000 5:215.000000 6:39.400002 7:0.661000 8:43.000000 542 | -1 1:3.000000 2:128.000000 3:72.000000 4:25.000000 5:190.000000 6:32.400002 7:0.549000 8:27.000000 543 | -1 1:10.000000 2:90.000000 3:85.000000 4:32.000000 5:0.000000 6:34.900002 7:0.825000 8:56.000000 544 | +1 1:4.000000 2:84.000000 3:90.000000 4:23.000000 5:56.000000 6:39.500000 7:0.159000 8:25.000000 545 | +1 1:1.000000 2:88.000000 3:78.000000 4:29.000000 5:76.000000 6:32.000000 7:0.365000 8:29.000000 546 | -1 1:8.000000 2:186.000000 3:90.000000 4:35.000000 5:225.000000 6:34.500000 7:0.423000 8:37.000000 547 | -1 1:5.000000 2:187.000000 3:76.000000 4:27.000000 5:207.000000 6:43.599998 7:1.034000 8:53.000000 548 | +1 1:4.000000 2:131.000000 3:68.000000 4:21.000000 5:166.000000 6:33.099998 7:0.160000 8:28.000000 549 | +1 1:1.000000 2:164.000000 3:82.000000 4:43.000000 5:67.000000 6:32.799999 7:0.341000 8:50.000000 550 | +1 1:4.000000 2:189.000000 3:110.000000 4:31.000000 5:0.000000 6:28.500000 7:0.680000 8:37.000000 551 | +1 1:1.000000 2:116.000000 3:70.000000 4:28.000000 5:0.000000 6:27.400000 7:0.204000 8:21.000000 552 | +1 1:3.000000 2:84.000000 3:68.000000 4:30.000000 5:106.000000 6:31.900000 7:0.591000 8:25.000000 553 | +1 1:6.000000 2:114.000000 3:88.000000 4:0.000000 5:0.000000 6:27.799999 7:0.247000 8:66.000000 554 | +1 1:1.000000 2:88.000000 3:62.000000 4:24.000000 5:44.000000 6:29.900000 7:0.422000 8:23.000000 555 | +1 1:1.000000 2:84.000000 3:64.000000 4:23.000000 5:115.000000 6:36.900002 7:0.471000 8:28.000000 556 | +1 1:7.000000 2:124.000000 3:70.000000 4:33.000000 5:215.000000 6:25.500000 7:0.161000 8:37.000000 557 | +1 1:1.000000 2:97.000000 3:70.000000 4:40.000000 5:0.000000 6:38.099998 7:0.218000 8:30.000000 558 | +1 1:8.000000 2:110.000000 3:76.000000 4:0.000000 5:0.000000 6:27.799999 7:0.237000 8:58.000000 559 | +1 1:11.000000 2:103.000000 3:68.000000 4:40.000000 5:0.000000 6:46.200001 7:0.126000 8:42.000000 560 | +1 1:11.000000 2:85.000000 3:74.000000 4:0.000000 5:0.000000 6:30.100000 7:0.300000 8:35.000000 561 | -1 1:6.000000 2:125.000000 3:76.000000 4:0.000000 5:0.000000 6:33.799999 7:0.121000 8:54.000000 562 | -1 1:0.000000 2:198.000000 3:66.000000 4:32.000000 5:274.000000 6:41.299999 7:0.502000 8:28.000000 563 | +1 1:1.000000 2:87.000000 3:68.000000 4:34.000000 5:77.000000 6:37.599998 7:0.401000 8:24.000000 564 | +1 1:6.000000 2:99.000000 3:60.000000 4:19.000000 5:54.000000 6:26.900000 7:0.497000 8:32.000000 565 | +1 1:0.000000 2:91.000000 3:80.000000 4:0.000000 5:0.000000 6:32.400002 7:0.601000 8:27.000000 566 | +1 1:2.000000 2:95.000000 3:54.000000 4:14.000000 5:88.000000 6:26.100000 7:0.748000 8:22.000000 567 | +1 1:1.000000 2:99.000000 3:72.000000 4:30.000000 5:18.000000 6:38.599998 7:0.412000 8:21.000000 568 | +1 1:6.000000 2:92.000000 3:62.000000 4:32.000000 5:126.000000 6:32.000000 7:0.085000 8:46.000000 569 | +1 1:4.000000 2:154.000000 3:72.000000 4:29.000000 5:126.000000 6:31.299999 7:0.338000 8:37.000000 570 | -1 1:0.000000 2:121.000000 3:66.000000 4:30.000000 5:165.000000 6:34.299999 7:0.203000 8:33.000000 571 | +1 1:3.000000 2:78.000000 3:70.000000 4:0.000000 5:0.000000 6:32.500000 7:0.270000 8:39.000000 572 | +1 1:2.000000 2:130.000000 3:96.000000 4:0.000000 5:0.000000 6:22.600000 7:0.268000 8:21.000000 573 | +1 1:3.000000 2:111.000000 3:58.000000 4:31.000000 5:44.000000 6:29.500000 7:0.430000 8:22.000000 574 | +1 1:2.000000 2:98.000000 3:60.000000 4:17.000000 5:120.000000 6:34.700001 7:0.198000 8:22.000000 575 | +1 1:1.000000 2:143.000000 3:86.000000 4:30.000000 5:330.000000 6:30.100000 7:0.892000 8:23.000000 576 | +1 1:1.000000 2:119.000000 3:44.000000 4:47.000000 5:63.000000 6:35.500000 7:0.280000 8:25.000000 577 | +1 1:6.000000 2:108.000000 3:44.000000 4:20.000000 5:130.000000 6:24.000000 7:0.813000 8:35.000000 578 | -1 1:2.000000 2:118.000000 3:80.000000 4:0.000000 5:0.000000 6:42.900002 7:0.693000 8:21.000000 579 | +1 1:10.000000 2:133.000000 3:68.000000 4:0.000000 5:0.000000 6:27.000000 7:0.245000 8:36.000000 580 | -1 1:2.000000 2:197.000000 3:70.000000 4:99.000000 5:0.000000 6:34.700001 7:0.575000 8:62.000000 581 | -1 1:0.000000 2:151.000000 3:90.000000 4:46.000000 5:0.000000 6:42.099998 7:0.371000 8:21.000000 582 | +1 1:6.000000 2:109.000000 3:60.000000 4:27.000000 5:0.000000 6:25.000000 7:0.206000 8:27.000000 583 | +1 1:12.000000 2:121.000000 3:78.000000 4:17.000000 5:0.000000 6:26.500000 7:0.259000 8:62.000000 584 | +1 1:8.000000 2:100.000000 3:76.000000 4:0.000000 5:0.000000 6:38.700001 7:0.190000 8:42.000000 585 | -1 1:8.000000 2:124.000000 3:76.000000 4:24.000000 5:600.000000 6:28.700001 7:0.687000 8:52.000000 586 | +1 1:1.000000 2:93.000000 3:56.000000 4:11.000000 5:0.000000 6:22.500000 7:0.417000 8:22.000000 587 | -1 1:8.000000 2:143.000000 3:66.000000 4:0.000000 5:0.000000 6:34.900002 7:0.129000 8:41.000000 588 | +1 1:6.000000 2:103.000000 3:66.000000 4:0.000000 5:0.000000 6:24.299999 7:0.249000 8:29.000000 589 | -1 1:3.000000 2:176.000000 3:86.000000 4:27.000000 5:156.000000 6:33.299999 7:1.154000 8:52.000000 590 | +1 1:0.000000 2:73.000000 3:0.000000 4:0.000000 5:0.000000 6:21.100000 7:0.342000 8:25.000000 591 | -1 1:11.000000 2:111.000000 3:84.000000 4:40.000000 5:0.000000 6:46.799999 7:0.925000 8:45.000000 592 | +1 1:2.000000 2:112.000000 3:78.000000 4:50.000000 5:140.000000 6:39.400002 7:0.175000 8:24.000000 593 | -1 1:3.000000 2:132.000000 3:80.000000 4:0.000000 5:0.000000 6:34.400002 7:0.402000 8:44.000000 594 | +1 1:2.000000 2:82.000000 3:52.000000 4:22.000000 5:115.000000 6:28.500000 7:1.699000 8:25.000000 595 | +1 1:6.000000 2:123.000000 3:72.000000 4:45.000000 5:230.000000 6:33.599998 7:0.733000 8:34.000000 596 | -1 1:0.000000 2:188.000000 3:82.000000 4:14.000000 5:185.000000 6:32.000000 7:0.682000 8:22.000000 597 | +1 1:0.000000 2:67.000000 3:76.000000 4:0.000000 5:0.000000 6:45.299999 7:0.194000 8:46.000000 598 | +1 1:1.000000 2:89.000000 3:24.000000 4:19.000000 5:25.000000 6:27.799999 7:0.559000 8:21.000000 599 | -1 1:1.000000 2:173.000000 3:74.000000 4:0.000000 5:0.000000 6:36.799999 7:0.088000 8:38.000000 600 | +1 1:1.000000 2:109.000000 3:38.000000 4:18.000000 5:120.000000 6:23.100000 7:0.407000 8:26.000000 601 | +1 1:1.000000 2:108.000000 3:88.000000 4:19.000000 5:0.000000 6:27.100000 7:0.400000 8:24.000000 602 | +1 1:6.000000 2:96.000000 3:0.000000 4:0.000000 5:0.000000 6:23.700001 7:0.190000 8:28.000000 603 | +1 1:1.000000 2:124.000000 3:74.000000 4:36.000000 5:0.000000 6:27.799999 7:0.100000 8:30.000000 604 | -1 1:7.000000 2:150.000000 3:78.000000 4:29.000000 5:126.000000 6:35.200001 7:0.692000 8:54.000000 605 | -1 1:4.000000 2:183.000000 3:0.000000 4:0.000000 5:0.000000 6:28.400000 7:0.212000 8:36.000000 606 | +1 1:1.000000 2:124.000000 3:60.000000 4:32.000000 5:0.000000 6:35.799999 7:0.514000 8:21.000000 607 | -1 1:1.000000 2:181.000000 3:78.000000 4:42.000000 5:293.000000 6:40.000000 7:1.258000 8:22.000000 608 | +1 1:1.000000 2:92.000000 3:62.000000 4:25.000000 5:41.000000 6:19.500000 7:0.482000 8:25.000000 609 | +1 1:0.000000 2:152.000000 3:82.000000 4:39.000000 5:272.000000 6:41.500000 7:0.270000 8:27.000000 610 | +1 1:1.000000 2:111.000000 3:62.000000 4:13.000000 5:182.000000 6:24.000000 7:0.138000 8:23.000000 611 | +1 1:3.000000 2:106.000000 3:54.000000 4:21.000000 5:158.000000 6:30.900000 7:0.292000 8:24.000000 612 | -1 1:3.000000 2:174.000000 3:58.000000 4:22.000000 5:194.000000 6:32.900002 7:0.593000 8:36.000000 613 | -1 1:7.000000 2:168.000000 3:88.000000 4:42.000000 5:321.000000 6:38.200001 7:0.787000 8:40.000000 614 | +1 1:6.000000 2:105.000000 3:80.000000 4:28.000000 5:0.000000 6:32.500000 7:0.878000 8:26.000000 615 | -1 1:11.000000 2:138.000000 3:74.000000 4:26.000000 5:144.000000 6:36.099998 7:0.557000 8:50.000000 616 | +1 1:3.000000 2:106.000000 3:72.000000 4:0.000000 5:0.000000 6:25.799999 7:0.207000 8:27.000000 617 | +1 1:6.000000 2:117.000000 3:96.000000 4:0.000000 5:0.000000 6:28.700001 7:0.157000 8:30.000000 618 | +1 1:2.000000 2:68.000000 3:62.000000 4:13.000000 5:15.000000 6:20.100000 7:0.257000 8:23.000000 619 | -1 1:9.000000 2:112.000000 3:82.000000 4:24.000000 5:0.000000 6:28.200001 7:1.282000 8:50.000000 620 | -1 1:0.000000 2:119.000000 3:0.000000 4:0.000000 5:0.000000 6:32.400002 7:0.141000 8:24.000000 621 | +1 1:2.000000 2:112.000000 3:86.000000 4:42.000000 5:160.000000 6:38.400002 7:0.246000 8:28.000000 622 | +1 1:2.000000 2:92.000000 3:76.000000 4:20.000000 5:0.000000 6:24.200001 7:1.698000 8:28.000000 623 | +1 1:6.000000 2:183.000000 3:94.000000 4:0.000000 5:0.000000 6:40.799999 7:1.461000 8:45.000000 624 | +1 1:0.000000 2:94.000000 3:70.000000 4:27.000000 5:115.000000 6:43.500000 7:0.347000 8:21.000000 625 | +1 1:2.000000 2:108.000000 3:64.000000 4:0.000000 5:0.000000 6:30.799999 7:0.158000 8:21.000000 626 | +1 1:4.000000 2:90.000000 3:88.000000 4:47.000000 5:54.000000 6:37.700001 7:0.362000 8:29.000000 627 | +1 1:0.000000 2:125.000000 3:68.000000 4:0.000000 5:0.000000 6:24.700001 7:0.206000 8:21.000000 628 | +1 1:0.000000 2:132.000000 3:78.000000 4:0.000000 5:0.000000 6:32.400002 7:0.393000 8:21.000000 629 | +1 1:5.000000 2:128.000000 3:80.000000 4:0.000000 5:0.000000 6:34.599998 7:0.144000 8:45.000000 630 | +1 1:4.000000 2:94.000000 3:65.000000 4:22.000000 5:0.000000 6:24.700001 7:0.148000 8:21.000000 631 | -1 1:7.000000 2:114.000000 3:64.000000 4:0.000000 5:0.000000 6:27.400000 7:0.732000 8:34.000000 632 | +1 1:0.000000 2:102.000000 3:78.000000 4:40.000000 5:90.000000 6:34.500000 7:0.238000 8:24.000000 633 | +1 1:2.000000 2:111.000000 3:60.000000 4:0.000000 5:0.000000 6:26.200001 7:0.343000 8:23.000000 634 | +1 1:1.000000 2:128.000000 3:82.000000 4:17.000000 5:183.000000 6:27.500000 7:0.115000 8:22.000000 635 | +1 1:10.000000 2:92.000000 3:62.000000 4:0.000000 5:0.000000 6:25.900000 7:0.167000 8:31.000000 636 | -1 1:13.000000 2:104.000000 3:72.000000 4:0.000000 5:0.000000 6:31.200001 7:0.465000 8:38.000000 637 | +1 1:5.000000 2:104.000000 3:74.000000 4:0.000000 5:0.000000 6:28.799999 7:0.153000 8:48.000000 638 | +1 1:2.000000 2:94.000000 3:76.000000 4:18.000000 5:66.000000 6:31.600000 7:0.649000 8:23.000000 639 | -1 1:7.000000 2:97.000000 3:76.000000 4:32.000000 5:91.000000 6:40.900002 7:0.871000 8:32.000000 640 | +1 1:1.000000 2:100.000000 3:74.000000 4:12.000000 5:46.000000 6:19.500000 7:0.149000 8:28.000000 641 | +1 1:0.000000 2:102.000000 3:86.000000 4:17.000000 5:105.000000 6:29.299999 7:0.695000 8:27.000000 642 | +1 1:4.000000 2:128.000000 3:70.000000 4:0.000000 5:0.000000 6:34.299999 7:0.303000 8:24.000000 643 | -1 1:6.000000 2:147.000000 3:80.000000 4:0.000000 5:0.000000 6:29.500000 7:0.178000 8:50.000000 644 | +1 1:4.000000 2:90.000000 3:0.000000 4:0.000000 5:0.000000 6:28.000000 7:0.610000 8:31.000000 645 | +1 1:3.000000 2:103.000000 3:72.000000 4:30.000000 5:152.000000 6:27.600000 7:0.730000 8:27.000000 646 | +1 1:2.000000 2:157.000000 3:74.000000 4:35.000000 5:440.000000 6:39.400002 7:0.134000 8:30.000000 647 | -1 1:1.000000 2:167.000000 3:74.000000 4:17.000000 5:144.000000 6:23.400000 7:0.447000 8:33.000000 648 | -1 1:0.000000 2:179.000000 3:50.000000 4:36.000000 5:159.000000 6:37.799999 7:0.455000 8:22.000000 649 | -1 1:11.000000 2:136.000000 3:84.000000 4:35.000000 5:130.000000 6:28.299999 7:0.260000 8:42.000000 650 | +1 1:0.000000 2:107.000000 3:60.000000 4:25.000000 5:0.000000 6:26.400000 7:0.133000 8:23.000000 651 | +1 1:1.000000 2:91.000000 3:54.000000 4:25.000000 5:100.000000 6:25.200001 7:0.234000 8:23.000000 652 | +1 1:1.000000 2:117.000000 3:60.000000 4:23.000000 5:106.000000 6:33.799999 7:0.466000 8:27.000000 653 | +1 1:5.000000 2:123.000000 3:74.000000 4:40.000000 5:77.000000 6:34.099998 7:0.269000 8:28.000000 654 | +1 1:2.000000 2:120.000000 3:54.000000 4:0.000000 5:0.000000 6:26.799999 7:0.455000 8:27.000000 655 | +1 1:1.000000 2:106.000000 3:70.000000 4:28.000000 5:135.000000 6:34.200001 7:0.142000 8:22.000000 656 | -1 1:2.000000 2:155.000000 3:52.000000 4:27.000000 5:540.000000 6:38.700001 7:0.240000 8:25.000000 657 | +1 1:2.000000 2:101.000000 3:58.000000 4:35.000000 5:90.000000 6:21.799999 7:0.155000 8:22.000000 658 | +1 1:1.000000 2:120.000000 3:80.000000 4:48.000000 5:200.000000 6:38.900002 7:1.162000 8:41.000000 659 | +1 1:11.000000 2:127.000000 3:106.000000 4:0.000000 5:0.000000 6:39.000000 7:0.190000 8:51.000000 660 | -1 1:3.000000 2:80.000000 3:82.000000 4:31.000000 5:70.000000 6:34.200001 7:1.292000 8:27.000000 661 | +1 1:10.000000 2:162.000000 3:84.000000 4:0.000000 5:0.000000 6:27.700001 7:0.182000 8:54.000000 662 | -1 1:1.000000 2:199.000000 3:76.000000 4:43.000000 5:0.000000 6:42.900002 7:1.394000 8:22.000000 663 | -1 1:8.000000 2:167.000000 3:106.000000 4:46.000000 5:231.000000 6:37.599998 7:0.165000 8:43.000000 664 | -1 1:9.000000 2:145.000000 3:80.000000 4:46.000000 5:130.000000 6:37.900002 7:0.637000 8:40.000000 665 | -1 1:6.000000 2:115.000000 3:60.000000 4:39.000000 5:0.000000 6:33.700001 7:0.245000 8:40.000000 666 | +1 1:1.000000 2:112.000000 3:80.000000 4:45.000000 5:132.000000 6:34.799999 7:0.217000 8:24.000000 667 | -1 1:4.000000 2:145.000000 3:82.000000 4:18.000000 5:0.000000 6:32.500000 7:0.235000 8:70.000000 668 | -1 1:10.000000 2:111.000000 3:70.000000 4:27.000000 5:0.000000 6:27.500000 7:0.141000 8:40.000000 669 | +1 1:6.000000 2:98.000000 3:58.000000 4:33.000000 5:190.000000 6:34.000000 7:0.430000 8:43.000000 670 | +1 1:9.000000 2:154.000000 3:78.000000 4:30.000000 5:100.000000 6:30.900000 7:0.164000 8:45.000000 671 | +1 1:6.000000 2:165.000000 3:68.000000 4:26.000000 5:168.000000 6:33.599998 7:0.631000 8:49.000000 672 | +1 1:1.000000 2:99.000000 3:58.000000 4:10.000000 5:0.000000 6:25.400000 7:0.551000 8:21.000000 673 | +1 1:10.000000 2:68.000000 3:106.000000 4:23.000000 5:49.000000 6:35.500000 7:0.285000 8:47.000000 674 | +1 1:3.000000 2:123.000000 3:100.000000 4:35.000000 5:240.000000 6:57.299999 7:0.880000 8:22.000000 675 | +1 1:8.000000 2:91.000000 3:82.000000 4:0.000000 5:0.000000 6:35.599998 7:0.587000 8:68.000000 676 | -1 1:6.000000 2:195.000000 3:70.000000 4:0.000000 5:0.000000 6:30.900000 7:0.328000 8:31.000000 677 | -1 1:9.000000 2:156.000000 3:86.000000 4:0.000000 5:0.000000 6:24.799999 7:0.230000 8:53.000000 678 | +1 1:0.000000 2:93.000000 3:60.000000 4:0.000000 5:0.000000 6:35.299999 7:0.263000 8:25.000000 679 | -1 1:3.000000 2:121.000000 3:52.000000 4:0.000000 5:0.000000 6:36.000000 7:0.127000 8:25.000000 680 | +1 1:2.000000 2:101.000000 3:58.000000 4:17.000000 5:265.000000 6:24.200001 7:0.614000 8:23.000000 681 | +1 1:2.000000 2:56.000000 3:56.000000 4:28.000000 5:45.000000 6:24.200001 7:0.332000 8:22.000000 682 | -1 1:0.000000 2:162.000000 3:76.000000 4:36.000000 5:0.000000 6:49.599998 7:0.364000 8:26.000000 683 | +1 1:0.000000 2:95.000000 3:64.000000 4:39.000000 5:105.000000 6:44.599998 7:0.366000 8:22.000000 684 | -1 1:4.000000 2:125.000000 3:80.000000 4:0.000000 5:0.000000 6:32.299999 7:0.536000 8:27.000000 685 | +1 1:5.000000 2:136.000000 3:82.000000 4:0.000000 5:0.000000 6:0.000000 7:0.640000 8:69.000000 686 | +1 1:2.000000 2:129.000000 3:74.000000 4:26.000000 5:205.000000 6:33.200001 7:0.591000 8:25.000000 687 | +1 1:3.000000 2:130.000000 3:64.000000 4:0.000000 5:0.000000 6:23.100000 7:0.314000 8:22.000000 688 | +1 1:1.000000 2:107.000000 3:50.000000 4:19.000000 5:0.000000 6:28.299999 7:0.181000 8:29.000000 689 | +1 1:1.000000 2:140.000000 3:74.000000 4:26.000000 5:180.000000 6:24.100000 7:0.828000 8:23.000000 690 | -1 1:1.000000 2:144.000000 3:82.000000 4:46.000000 5:180.000000 6:46.099998 7:0.335000 8:46.000000 691 | +1 1:8.000000 2:107.000000 3:80.000000 4:0.000000 5:0.000000 6:24.600000 7:0.856000 8:34.000000 692 | -1 1:13.000000 2:158.000000 3:114.000000 4:0.000000 5:0.000000 6:42.299999 7:0.257000 8:44.000000 693 | +1 1:2.000000 2:121.000000 3:70.000000 4:32.000000 5:95.000000 6:39.099998 7:0.886000 8:23.000000 694 | -1 1:7.000000 2:129.000000 3:68.000000 4:49.000000 5:125.000000 6:38.500000 7:0.439000 8:43.000000 695 | +1 1:2.000000 2:90.000000 3:60.000000 4:0.000000 5:0.000000 6:23.500000 7:0.191000 8:25.000000 696 | -1 1:7.000000 2:142.000000 3:90.000000 4:24.000000 5:480.000000 6:30.400000 7:0.128000 8:43.000000 697 | -1 1:3.000000 2:169.000000 3:74.000000 4:19.000000 5:125.000000 6:29.900000 7:0.268000 8:31.000000 698 | +1 1:0.000000 2:99.000000 3:0.000000 4:0.000000 5:0.000000 6:25.000000 7:0.253000 8:22.000000 699 | +1 1:4.000000 2:127.000000 3:88.000000 4:11.000000 5:155.000000 6:34.500000 7:0.598000 8:28.000000 700 | +1 1:4.000000 2:118.000000 3:70.000000 4:0.000000 5:0.000000 6:44.500000 7:0.904000 8:26.000000 701 | +1 1:2.000000 2:122.000000 3:76.000000 4:27.000000 5:200.000000 6:35.900002 7:0.483000 8:26.000000 702 | -1 1:6.000000 2:125.000000 3:78.000000 4:31.000000 5:0.000000 6:27.600000 7:0.565000 8:49.000000 703 | -1 1:1.000000 2:168.000000 3:88.000000 4:29.000000 5:0.000000 6:35.000000 7:0.905000 8:52.000000 704 | +1 1:2.000000 2:129.000000 3:0.000000 4:0.000000 5:0.000000 6:38.500000 7:0.304000 8:41.000000 705 | +1 1:4.000000 2:110.000000 3:76.000000 4:20.000000 5:100.000000 6:28.400000 7:0.118000 8:27.000000 706 | +1 1:6.000000 2:80.000000 3:80.000000 4:36.000000 5:0.000000 6:39.799999 7:0.177000 8:28.000000 707 | -1 1:10.000000 2:115.000000 3:0.000000 4:0.000000 5:0.000000 6:0.000000 7:0.261000 8:30.000000 708 | +1 1:2.000000 2:127.000000 3:46.000000 4:21.000000 5:335.000000 6:34.400002 7:0.176000 8:22.000000 709 | -1 1:9.000000 2:164.000000 3:78.000000 4:0.000000 5:0.000000 6:32.799999 7:0.148000 8:45.000000 710 | -1 1:2.000000 2:93.000000 3:64.000000 4:32.000000 5:160.000000 6:38.000000 7:0.674000 8:23.000000 711 | +1 1:3.000000 2:158.000000 3:64.000000 4:13.000000 5:387.000000 6:31.200001 7:0.295000 8:24.000000 712 | +1 1:5.000000 2:126.000000 3:78.000000 4:27.000000 5:22.000000 6:29.600000 7:0.439000 8:40.000000 713 | -1 1:10.000000 2:129.000000 3:62.000000 4:36.000000 5:0.000000 6:41.200001 7:0.441000 8:38.000000 714 | +1 1:0.000000 2:134.000000 3:58.000000 4:20.000000 5:291.000000 6:26.400000 7:0.352000 8:21.000000 715 | +1 1:3.000000 2:102.000000 3:74.000000 4:0.000000 5:0.000000 6:29.500000 7:0.121000 8:32.000000 716 | -1 1:7.000000 2:187.000000 3:50.000000 4:33.000000 5:392.000000 6:33.900002 7:0.826000 8:34.000000 717 | -1 1:3.000000 2:173.000000 3:78.000000 4:39.000000 5:185.000000 6:33.799999 7:0.970000 8:31.000000 718 | +1 1:10.000000 2:94.000000 3:72.000000 4:18.000000 5:0.000000 6:23.100000 7:0.595000 8:56.000000 719 | +1 1:1.000000 2:108.000000 3:60.000000 4:46.000000 5:178.000000 6:35.500000 7:0.415000 8:24.000000 720 | -1 1:5.000000 2:97.000000 3:76.000000 4:27.000000 5:0.000000 6:35.599998 7:0.378000 8:52.000000 721 | +1 1:4.000000 2:83.000000 3:86.000000 4:19.000000 5:0.000000 6:29.299999 7:0.317000 8:34.000000 722 | +1 1:1.000000 2:114.000000 3:66.000000 4:36.000000 5:200.000000 6:38.099998 7:0.289000 8:21.000000 723 | -1 1:1.000000 2:149.000000 3:68.000000 4:29.000000 5:127.000000 6:29.299999 7:0.349000 8:42.000000 724 | +1 1:5.000000 2:117.000000 3:86.000000 4:30.000000 5:105.000000 6:39.099998 7:0.251000 8:42.000000 725 | +1 1:1.000000 2:111.000000 3:94.000000 4:0.000000 5:0.000000 6:32.799999 7:0.265000 8:45.000000 726 | +1 1:4.000000 2:112.000000 3:78.000000 4:40.000000 5:0.000000 6:39.400002 7:0.236000 8:38.000000 727 | +1 1:1.000000 2:116.000000 3:78.000000 4:29.000000 5:180.000000 6:36.099998 7:0.496000 8:25.000000 728 | +1 1:0.000000 2:141.000000 3:84.000000 4:26.000000 5:0.000000 6:32.400002 7:0.433000 8:22.000000 729 | +1 1:2.000000 2:175.000000 3:88.000000 4:0.000000 5:0.000000 6:22.900000 7:0.326000 8:22.000000 730 | +1 1:2.000000 2:92.000000 3:52.000000 4:0.000000 5:0.000000 6:30.100000 7:0.141000 8:22.000000 731 | -1 1:3.000000 2:130.000000 3:78.000000 4:23.000000 5:79.000000 6:28.400000 7:0.323000 8:34.000000 732 | -1 1:8.000000 2:120.000000 3:86.000000 4:0.000000 5:0.000000 6:28.400000 7:0.259000 8:22.000000 733 | -1 1:2.000000 2:174.000000 3:88.000000 4:37.000000 5:120.000000 6:44.500000 7:0.646000 8:24.000000 734 | +1 1:2.000000 2:106.000000 3:56.000000 4:27.000000 5:165.000000 6:29.000000 7:0.426000 8:22.000000 735 | +1 1:2.000000 2:105.000000 3:75.000000 4:0.000000 5:0.000000 6:23.299999 7:0.560000 8:53.000000 736 | +1 1:4.000000 2:95.000000 3:60.000000 4:32.000000 5:0.000000 6:35.400002 7:0.284000 8:28.000000 737 | +1 1:0.000000 2:126.000000 3:86.000000 4:27.000000 5:120.000000 6:27.400000 7:0.515000 8:21.000000 738 | +1 1:8.000000 2:65.000000 3:72.000000 4:23.000000 5:0.000000 6:32.000000 7:0.600000 8:42.000000 739 | +1 1:2.000000 2:99.000000 3:60.000000 4:17.000000 5:160.000000 6:36.599998 7:0.453000 8:21.000000 740 | -1 1:1.000000 2:102.000000 3:74.000000 4:0.000000 5:0.000000 6:39.500000 7:0.293000 8:42.000000 741 | -1 1:11.000000 2:120.000000 3:80.000000 4:37.000000 5:150.000000 6:42.299999 7:0.785000 8:48.000000 742 | +1 1:3.000000 2:102.000000 3:44.000000 4:20.000000 5:94.000000 6:30.799999 7:0.400000 8:26.000000 743 | +1 1:1.000000 2:109.000000 3:58.000000 4:18.000000 5:116.000000 6:28.500000 7:0.219000 8:22.000000 744 | -1 1:9.000000 2:140.000000 3:94.000000 4:0.000000 5:0.000000 6:32.700001 7:0.734000 8:45.000000 745 | +1 1:13.000000 2:153.000000 3:88.000000 4:37.000000 5:140.000000 6:40.599998 7:1.174000 8:39.000000 746 | +1 1:12.000000 2:100.000000 3:84.000000 4:33.000000 5:105.000000 6:30.000000 7:0.488000 8:46.000000 747 | -1 1:1.000000 2:147.000000 3:94.000000 4:41.000000 5:0.000000 6:49.299999 7:0.358000 8:27.000000 748 | +1 1:1.000000 2:81.000000 3:74.000000 4:41.000000 5:57.000000 6:46.299999 7:1.096000 8:32.000000 749 | -1 1:3.000000 2:187.000000 3:70.000000 4:22.000000 5:200.000000 6:36.400002 7:0.408000 8:36.000000 750 | -1 1:6.000000 2:162.000000 3:62.000000 4:0.000000 5:0.000000 6:24.299999 7:0.178000 8:50.000000 751 | -1 1:4.000000 2:136.000000 3:70.000000 4:0.000000 5:0.000000 6:31.200001 7:1.182000 8:22.000000 752 | +1 1:1.000000 2:121.000000 3:78.000000 4:39.000000 5:74.000000 6:39.000000 7:0.261000 8:28.000000 753 | +1 1:3.000000 2:108.000000 3:62.000000 4:24.000000 5:0.000000 6:26.000000 7:0.223000 8:25.000000 754 | -1 1:0.000000 2:181.000000 3:88.000000 4:44.000000 5:510.000000 6:43.299999 7:0.222000 8:26.000000 755 | -1 1:8.000000 2:154.000000 3:78.000000 4:32.000000 5:0.000000 6:32.400002 7:0.443000 8:45.000000 756 | -1 1:1.000000 2:128.000000 3:88.000000 4:39.000000 5:110.000000 6:36.500000 7:1.057000 8:37.000000 757 | +1 1:7.000000 2:137.000000 3:90.000000 4:41.000000 5:0.000000 6:32.000000 7:0.391000 8:39.000000 758 | -1 1:0.000000 2:123.000000 3:72.000000 4:0.000000 5:0.000000 6:36.299999 7:0.258000 8:52.000000 759 | +1 1:1.000000 2:106.000000 3:76.000000 4:0.000000 5:0.000000 6:37.500000 7:0.197000 8:26.000000 760 | -1 1:6.000000 2:190.000000 3:92.000000 4:0.000000 5:0.000000 6:35.500000 7:0.278000 8:66.000000 761 | +1 1:2.000000 2:88.000000 3:58.000000 4:26.000000 5:16.000000 6:28.400000 7:0.766000 8:22.000000 762 | -1 1:9.000000 2:170.000000 3:74.000000 4:31.000000 5:0.000000 6:44.000000 7:0.403000 8:43.000000 763 | +1 1:9.000000 2:89.000000 3:62.000000 4:0.000000 5:0.000000 6:22.500000 7:0.142000 8:33.000000 764 | +1 1:10.000000 2:101.000000 3:76.000000 4:48.000000 5:180.000000 6:32.900002 7:0.171000 8:63.000000 765 | +1 1:2.000000 2:122.000000 3:70.000000 4:27.000000 5:0.000000 6:36.799999 7:0.340000 8:27.000000 766 | +1 1:5.000000 2:121.000000 3:72.000000 4:23.000000 5:112.000000 6:26.200001 7:0.245000 8:30.000000 767 | -1 1:1.000000 2:126.000000 3:60.000000 4:0.000000 5:0.000000 6:30.100000 7:0.349000 8:47.000000 768 | +1 1:1.000000 2:93.000000 3:70.000000 4:31.000000 5:0.000000 6:30.400000 7:0.315000 8:23.000000 769 | -------------------------------------------------------------------------------- /homework/Bonus2/Bonus2.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Bonus2: Build a Supervised Autoencoder.\n", 8 | "\n", 9 | "### Name: [Your-Name?]\n" 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "\n", 17 | "PCA and the standard autoencoder are unsupervised dimensionality reduction methods, and their learned features are not discriminative. If you build a classifier upon the low-dimenional features extracted by PCA and autoencoder, you will find the classification accuracy very poor.\n", 18 | "\n", 19 | "Linear discriminant analysis (LDA) is a traditionally supervised dimensionality reduction method for learning low-dimensional features which are highly discriminative. Likewise, can we extend autoencoder to supervised leanring?\n", 20 | "\n" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "**You are required to build and train a supervised autoencoder look like the following.** You are required to add other layers properly to alleviate overfitting.\n", 28 | "\n", 29 | "\n", 30 | "![Network Structure](https://github.com/wangshusen/CS583A-2019Spring/blob/master/homework/HM5/supervised_ae.png?raw=true \"NetworkStructure\")\n" 31 | ] 32 | }, 33 | { 34 | "cell_type": "markdown", 35 | "metadata": {}, 36 | "source": [ 37 | "## 0. You will do the following:\n", 38 | "\n", 39 | "1. Read and run my code to train a standard dense autoencoder.\n", 40 | "\n", 41 | "2. Build and train a supervised autoencoder, visual the low-dim features and the reconstructions, and evaluate whether the learned low-dim features are discriminative.\n", 42 | " \n", 43 | "3. Convert the .IPYNB file to .HTML file.\n", 44 | "\n", 45 | " * The HTML file must contain the code and the output after execution.\n", 46 | " \n", 47 | " \n", 48 | " \n", 49 | "4. Upload this .HTML file to your Google Drive, Dropbox, or Github repo.\n", 50 | "\n", 51 | "4. Submit the link to this .HTML file to Canvas.\n", 52 | "\n", 53 | " * Example: https://github.com/wangshusen/CS583-2020S/blob/master/homework/Bonus2/Bonus2.html\n" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "## 1. Data preparation" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "### 1.1. Load data\n" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "from keras.datasets import mnist\n", 77 | "\n", 78 | "(x_train, y_train), (x_test, y_test) = mnist.load_data()\n", 79 | "x_train = x_train.reshape(60000, 28*28).astype('float32') / 255.\n", 80 | "x_test = x_test.reshape(10000, 28*28).astype('float32') / 255.\n", 81 | "\n", 82 | "print('Shape of x_train: ' + str(x_train.shape)) \n", 83 | "print('Shape of x_test: ' + str(x_test.shape))\n", 84 | "print('Shape of y_train: ' + str(y_train.shape))\n", 85 | "print('Shape of y_test: ' + str(y_test.shape))" 86 | ] 87 | }, 88 | { 89 | "cell_type": "markdown", 90 | "metadata": {}, 91 | "source": [ 92 | "### 1.2. One-hot encode the labels\n", 93 | "\n", 94 | "In the input, a label is a scalar in $\\{0, 1, \\cdots , 9\\}$. One-hot encode transform such a scalar to a $10$-dim vector. E.g., a scalar ```y_train[j]=3``` is transformed to the vector ```y_train_vec[j]=[0, 0, 0, 1, 0, 0, 0, 0, 0, 0]```.\n", 95 | "\n", 96 | "1. Define a function ```to_one_hot``` that transforms an $n\\times 1$ array to a $n\\times 10$ matrix.\n", 97 | "\n", 98 | "2. Apply the function to ```y_train``` and ```y_test```." 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": null, 104 | "metadata": {}, 105 | "outputs": [], 106 | "source": [ 107 | "import numpy\n", 108 | "\n", 109 | "def to_one_hot(y, num_class=10):\n", 110 | " results = numpy.zeros((len(y), num_class))\n", 111 | " for i, label in enumerate(y):\n", 112 | " results[i, label] = 1.\n", 113 | " return results\n", 114 | "\n", 115 | "y_train_vec = to_one_hot(y_train)\n", 116 | "y_test_vec = to_one_hot(y_test)\n", 117 | "\n", 118 | "print('Shape of y_train_vec: ' + str(y_train_vec.shape))\n", 119 | "print('Shape of y_test_vec: ' + str(y_test_vec.shape))\n", 120 | "\n", 121 | "print(y_train[0])\n", 122 | "print(y_train_vec[0])" 123 | ] 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "metadata": {}, 128 | "source": [ 129 | "### 1.3. Randomly partition the training set to training and validation sets\n", 130 | "\n", 131 | "Randomly partition the 60K training samples to 2 sets:\n", 132 | "* a training set containing 10K samples;\n", 133 | "* a validation set containing 50K samples. (You can use only 10K to save time.)\n" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": null, 139 | "metadata": {}, 140 | "outputs": [], 141 | "source": [ 142 | "rand_indices = numpy.random.permutation(60000)\n", 143 | "train_indices = rand_indices[0:10000]\n", 144 | "valid_indices = rand_indices[10000:20000]\n", 145 | "\n", 146 | "x_val = x_train[valid_indices, :]\n", 147 | "y_val = y_train_vec[valid_indices, :]\n", 148 | "\n", 149 | "x_tr = x_train[train_indices, :]\n", 150 | "y_tr = y_train_vec[train_indices, :]\n", 151 | "\n", 152 | "print('Shape of x_tr: ' + str(x_tr.shape))\n", 153 | "print('Shape of y_tr: ' + str(y_tr.shape))\n", 154 | "print('Shape of x_val: ' + str(x_val.shape))\n", 155 | "print('Shape of y_val: ' + str(y_val.shape))" 156 | ] 157 | }, 158 | { 159 | "cell_type": "markdown", 160 | "metadata": {}, 161 | "source": [ 162 | "## 2. Build an unsupervised autoencoder and tune its hyper-parameters\n", 163 | "\n", 164 | "1. Build a dense autoencoder model\n", 165 | "2. Use the validation data to tune the hyper-parameters (e.g., network structure, and optimization algorithm)\n", 166 | " * Do NOT use test data for hyper-parameter tuning!!!\n", 167 | " \n", 168 | "3. Try to achieve a validation loss as low as possible.\n", 169 | "4. Evaluate the model on the test set.\n", 170 | "5. Visualize the low-dim features and reconstructions." 171 | ] 172 | }, 173 | { 174 | "cell_type": "markdown", 175 | "metadata": {}, 176 | "source": [ 177 | "### 2.1. Build the model" 178 | ] 179 | }, 180 | { 181 | "cell_type": "code", 182 | "execution_count": null, 183 | "metadata": {}, 184 | "outputs": [], 185 | "source": [ 186 | "from keras.layers import Dense, Input\n", 187 | "from keras import models\n", 188 | "\n", 189 | "input_img = Input(shape=(784,), name='input_img')\n", 190 | "\n", 191 | "encode1 = Dense(128, activation='relu', name='encode1')(input_img)\n", 192 | "encode2 = Dense(32, activation='relu', name='encode2')(encode1)\n", 193 | "encode3 = Dense(8, activation='relu', name='encode3')(encode2)\n", 194 | "bottleneck = Dense(2, activation='relu', name='bottleneck')(encode3)\n", 195 | "decode1 = Dense(8, activation='relu', name='decode1')(bottleneck)\n", 196 | "decode2 = Dense(32, activation='relu', name='decode2')(decode1)\n", 197 | "decode3 = Dense(128, activation='relu', name='decode3')(decode2)\n", 198 | "decode4 = Dense(784, activation='relu', name='decode4')(decode3)\n", 199 | "\n", 200 | "ae = models.Model(input_img, decode4)\n", 201 | "\n", 202 | "ae.summary()" 203 | ] 204 | }, 205 | { 206 | "cell_type": "code", 207 | "execution_count": null, 208 | "metadata": {}, 209 | "outputs": [], 210 | "source": [ 211 | "# print the network structure to a PDF file\n", 212 | "\n", 213 | "from IPython.display import SVG\n", 214 | "from keras.utils.vis_utils import model_to_dot, plot_model\n", 215 | "\n", 216 | "SVG(model_to_dot(ae, show_shapes=False).create(prog='dot', format='svg'))\n", 217 | "\n", 218 | "plot_model(\n", 219 | " model=ae, show_shapes=False,\n", 220 | " to_file='unsupervised_ae.pdf'\n", 221 | ")\n", 222 | "\n", 223 | "# you can find the file \"unsupervised_ae.pdf\" in the current directory." 224 | ] 225 | }, 226 | { 227 | "cell_type": "markdown", 228 | "metadata": {}, 229 | "source": [ 230 | "### 2.2. Train the model and tune the hyper-parameters" 231 | ] 232 | }, 233 | { 234 | "cell_type": "code", 235 | "execution_count": null, 236 | "metadata": {}, 237 | "outputs": [], 238 | "source": [ 239 | "from keras import optimizers\n", 240 | "\n", 241 | "learning_rate = 1E-3 # to be tuned!\n", 242 | "\n", 243 | "ae.compile(loss='mean_squared_error',\n", 244 | " optimizer=optimizers.RMSprop(lr=learning_rate))" 245 | ] 246 | }, 247 | { 248 | "cell_type": "code", 249 | "execution_count": null, 250 | "metadata": {}, 251 | "outputs": [], 252 | "source": [ 253 | "history = ae.fit(x_tr, x_tr, \n", 254 | " batch_size=128, \n", 255 | " epochs=100, \n", 256 | " validation_data=(x_val, x_val))" 257 | ] 258 | }, 259 | { 260 | "cell_type": "code", 261 | "execution_count": null, 262 | "metadata": {}, 263 | "outputs": [], 264 | "source": [ 265 | "import matplotlib.pyplot as plt\n", 266 | "%matplotlib inline\n", 267 | "\n", 268 | "loss = history.history['loss']\n", 269 | "val_loss = history.history['val_loss']\n", 270 | "\n", 271 | "epochs = range(len(loss))\n", 272 | "\n", 273 | "plt.plot(epochs, loss, 'bo', label='Training Loss')\n", 274 | "plt.plot(epochs, val_loss, 'r', label='Validation Loss')\n", 275 | "plt.xlabel('Epochs')\n", 276 | "plt.ylabel('Loss')\n", 277 | "plt.legend()\n", 278 | "plt.show()" 279 | ] 280 | }, 281 | { 282 | "cell_type": "markdown", 283 | "metadata": {}, 284 | "source": [ 285 | "### 2.3. Visualize the reconstructed test images" 286 | ] 287 | }, 288 | { 289 | "cell_type": "code", 290 | "execution_count": null, 291 | "metadata": {}, 292 | "outputs": [], 293 | "source": [ 294 | "ae_output = ae.predict(x_test).reshape((10000, 28, 28))\n", 295 | "\n", 296 | "ROW = 5\n", 297 | "COLUMN = 4\n", 298 | "\n", 299 | "x = ae_output\n", 300 | "fname = 'reconstruct_ae.pdf'\n", 301 | "\n", 302 | "fig, axes = plt.subplots(nrows=ROW, ncols=COLUMN, figsize=(8, 10))\n", 303 | "for ax, i in zip(axes.flat, numpy.arange(ROW*COLUMN)):\n", 304 | " image = x[i].reshape(28, 28)\n", 305 | " ax.imshow(image, cmap='gray')\n", 306 | " ax.axis('off')\n", 307 | "\n", 308 | "plt.tight_layout()\n", 309 | "plt.savefig(fname)\n", 310 | "plt.show()" 311 | ] 312 | }, 313 | { 314 | "cell_type": "markdown", 315 | "metadata": {}, 316 | "source": [ 317 | "### 2.4. Evaluate the model on the test set\n", 318 | "\n", 319 | "Do NOT used the test set until now. Make sure that your model parameters and hyper-parameters are independent of the test set." 320 | ] 321 | }, 322 | { 323 | "cell_type": "code", 324 | "execution_count": null, 325 | "metadata": {}, 326 | "outputs": [], 327 | "source": [ 328 | "loss = ae.evaluate(x_test, x_test)\n", 329 | "print('loss = ' + str(loss))" 330 | ] 331 | }, 332 | { 333 | "cell_type": "markdown", 334 | "metadata": {}, 335 | "source": [ 336 | "### 2.5. Visualize the low-dimensional features" 337 | ] 338 | }, 339 | { 340 | "cell_type": "code", 341 | "execution_count": null, 342 | "metadata": {}, 343 | "outputs": [], 344 | "source": [ 345 | "# build the encoder network\n", 346 | "ae_encoder = models.Model(input_img, bottleneck)\n", 347 | "ae_encoder.summary()" 348 | ] 349 | }, 350 | { 351 | "cell_type": "code", 352 | "execution_count": null, 353 | "metadata": {}, 354 | "outputs": [], 355 | "source": [ 356 | "# extract low-dimensional features from the test data\n", 357 | "encoded_test = ae_encoder.predict(x_test)\n", 358 | "print('Shape of encoded_test: ' + str(encoded_test.shape))" 359 | ] 360 | }, 361 | { 362 | "cell_type": "code", 363 | "execution_count": null, 364 | "metadata": {}, 365 | "outputs": [], 366 | "source": [ 367 | "colors = numpy.array(['r', 'g', 'b', 'm', 'c', 'k', 'y', 'purple', 'darkred', 'navy'])\n", 368 | "colors_test = colors[y_test]\n", 369 | "\n", 370 | "\n", 371 | "import matplotlib.pyplot as plt\n", 372 | "%matplotlib inline\n", 373 | "\n", 374 | "fig = plt.figure(figsize=(8, 8))\n", 375 | "plt.scatter(encoded_test[:, 0], encoded_test[:, 1], s=10, c=colors_test, edgecolors=colors_test)\n", 376 | "plt.axis('off')\n", 377 | "plt.tight_layout()\n", 378 | "fname = 'ae_code.pdf'\n", 379 | "plt.savefig(fname)" 380 | ] 381 | }, 382 | { 383 | "cell_type": "markdown", 384 | "metadata": {}, 385 | "source": [ 386 | "#### Remark:\n", 387 | "\n", 388 | "Judging from the visualization, the low-dim features seems not discriminative, as 2D features from different classes are mixed. Let quantatively find out whether they are discriminative." 389 | ] 390 | }, 391 | { 392 | "cell_type": "markdown", 393 | "metadata": {}, 394 | "source": [ 395 | "## 3. Are the learned low-dim features discriminative?\n", 396 | "\n", 397 | "To find the answer, lets train a classifier on the training set (the extracted 2-dim features) and evaluation on the test set." 398 | ] 399 | }, 400 | { 401 | "cell_type": "code", 402 | "execution_count": null, 403 | "metadata": {}, 404 | "outputs": [], 405 | "source": [ 406 | "# extract the 2D features from the training, validation, and test samples\n", 407 | "f_tr = ae_encoder.predict(x_tr)\n", 408 | "f_val = ae_encoder.predict(x_val)\n", 409 | "f_te = ae_encoder.predict(x_test)\n", 410 | "\n", 411 | "print('Shape of f_tr: ' + str(f_tr.shape))\n", 412 | "print('Shape of f_te: ' + str(f_te.shape))" 413 | ] 414 | }, 415 | { 416 | "cell_type": "code", 417 | "execution_count": null, 418 | "metadata": {}, 419 | "outputs": [], 420 | "source": [ 421 | "from keras.layers import Dense, Input\n", 422 | "from keras import models\n", 423 | "\n", 424 | "input_feat = Input(shape=(2,))\n", 425 | "\n", 426 | "hidden1 = Dense(128, activation='relu')(input_feat)\n", 427 | "hidden2 = Dense(128, activation='relu')(hidden1)\n", 428 | "output = Dense(10, activation='softmax')(hidden2)\n", 429 | "\n", 430 | "classifier = models.Model(input_feat, output)\n", 431 | "\n", 432 | "classifier.summary()" 433 | ] 434 | }, 435 | { 436 | "cell_type": "code", 437 | "execution_count": null, 438 | "metadata": {}, 439 | "outputs": [], 440 | "source": [ 441 | "classifier.compile(loss='categorical_crossentropy',\n", 442 | " optimizer=optimizers.RMSprop(lr=1E-4),\n", 443 | " metrics=['acc'])\n", 444 | "\n", 445 | "history = classifier.fit(f_tr, y_tr, \n", 446 | " batch_size=32, \n", 447 | " epochs=30, \n", 448 | " validation_data=(f_val, y_val))" 449 | ] 450 | }, 451 | { 452 | "cell_type": "markdown", 453 | "metadata": {}, 454 | "source": [ 455 | "### Conclusion\n", 456 | "\n", 457 | "Using the 2D features, the validation accuracy is 60~70%. Recall that using the original data, the accuracy is about 98%. Obviously, the 2D features are not very discriminative.\n", 458 | "\n", 459 | "We are going to build a supervised autoencode model for learning low-dimensional discriminative features." 460 | ] 461 | }, 462 | { 463 | "cell_type": "code", 464 | "execution_count": null, 465 | "metadata": {}, 466 | "outputs": [], 467 | "source": [] 468 | }, 469 | { 470 | "cell_type": "code", 471 | "execution_count": null, 472 | "metadata": {}, 473 | "outputs": [], 474 | "source": [] 475 | }, 476 | { 477 | "cell_type": "markdown", 478 | "metadata": {}, 479 | "source": [ 480 | "## 4. Build a supervised autoencoder model\n", 481 | "\n", 482 | "\n", 483 | "**You are required to build and train a supervised autoencoder look like the following.** (Not necessary the same.) You are required to add other layers properly to alleviate overfitting.\n", 484 | "\n", 485 | "\n", 486 | "![Network Structure](https://github.com/wangshusen/CS583A-2019Spring/blob/master/homework/HM5/supervised_ae.png?raw=true \"NetworkStructure\")\n" 487 | ] 488 | }, 489 | { 490 | "cell_type": "markdown", 491 | "metadata": {}, 492 | "source": [ 493 | "### 4.1. Build the network" 494 | ] 495 | }, 496 | { 497 | "cell_type": "code", 498 | "execution_count": null, 499 | "metadata": {}, 500 | "outputs": [], 501 | "source": [ 502 | "# build the supervised autoencoder network\n", 503 | "from keras.layers import Dense, Input\n", 504 | "from keras import models\n", 505 | "\n", 506 | "input_img = Input(shape=(784,), name='input_img')\n", 507 | "\n", 508 | "# encoder network\n", 509 | "encode1 = \n", 510 | "\n", 511 | "# The width of the bottleneck layer must be exactly 2.\n", 512 | "bottleneck = \n", 513 | "\n", 514 | "# decoder network\n", 515 | "decode1 = \n", 516 | "\n", 517 | "decode4 = \n", 518 | "\n", 519 | "# build a classifier upon the bottleneck layer\n", 520 | "classifier1 = \n", 521 | "\n", 522 | "classifier3 = " 523 | ] 524 | }, 525 | { 526 | "cell_type": "code", 527 | "execution_count": null, 528 | "metadata": {}, 529 | "outputs": [], 530 | "source": [ 531 | "# connect the input and the two outputs\n", 532 | "sae = models.Model(input_img, [decode4, classifier3])\n", 533 | "\n", 534 | "sae.summary()" 535 | ] 536 | }, 537 | { 538 | "cell_type": "code", 539 | "execution_count": null, 540 | "metadata": {}, 541 | "outputs": [], 542 | "source": [ 543 | "# print the network structure to a PDF file\n", 544 | "\n", 545 | "from IPython.display import SVG\n", 546 | "from keras.utils.vis_utils import model_to_dot, plot_model\n", 547 | "\n", 548 | "SVG(model_to_dot(sae, show_shapes=False).create(prog='dot', format='svg'))\n", 549 | "\n", 550 | "plot_model(\n", 551 | " model=sae, show_shapes=False,\n", 552 | " to_file='supervised_ae.pdf'\n", 553 | ")\n", 554 | "\n", 555 | "# you can find the file \"supervised_ae.pdf\" in the current directory." 556 | ] 557 | }, 558 | { 559 | "cell_type": "markdown", 560 | "metadata": {}, 561 | "source": [ 562 | "### 4.2. Train the new model and tune the hyper-parameters\n", 563 | "\n", 564 | "The new model has multiple output. Thus we specify **multiple** loss functions and their weights. " 565 | ] 566 | }, 567 | { 568 | "cell_type": "code", 569 | "execution_count": null, 570 | "metadata": {}, 571 | "outputs": [], 572 | "source": [ 573 | "from keras import optimizers\n", 574 | "\n", 575 | "sae.compile(loss=['mean_squared_error', 'categorical_crossentropy'],\n", 576 | " loss_weights=[1, 0.5], # to be tuned\n", 577 | " optimizer=optimizers.RMSprop(lr=1E-3))\n", 578 | "\n", 579 | "history = sae.fit(x_tr, [x_tr, y_tr], \n", 580 | " batch_size=32, \n", 581 | " epochs=100, \n", 582 | " validation_data=(x_val, [x_val, y_val]))" 583 | ] 584 | }, 585 | { 586 | "cell_type": "code", 587 | "execution_count": null, 588 | "metadata": {}, 589 | "outputs": [], 590 | "source": [ 591 | "import matplotlib.pyplot as plt\n", 592 | "%matplotlib inline\n", 593 | "\n", 594 | "loss = history.history['loss']\n", 595 | "val_loss = history.history['val_loss']\n", 596 | "\n", 597 | "epochs = range(len(loss))\n", 598 | "\n", 599 | "plt.plot(epochs, loss, 'bo', label='Training Loss')\n", 600 | "plt.plot(epochs, val_loss, 'r', label='Validation Loss')\n", 601 | "plt.xlabel('Epochs')\n", 602 | "plt.ylabel('Loss')\n", 603 | "plt.legend()\n", 604 | "plt.show()" 605 | ] 606 | }, 607 | { 608 | "cell_type": "markdown", 609 | "metadata": {}, 610 | "source": [ 611 | "### Question\n", 612 | "\n", 613 | "Do you think overfitting is happening? If yes, what can you do? Please make necessary changes to the supervised autoencoder network structure.\n", 614 | "\n", 615 | "**Failing to add proper regularization will lose 1~2 scores.**" 616 | ] 617 | }, 618 | { 619 | "cell_type": "markdown", 620 | "metadata": {}, 621 | "source": [ 622 | "### 4.3. Visualize the reconstructed test images" 623 | ] 624 | }, 625 | { 626 | "cell_type": "code", 627 | "execution_count": null, 628 | "metadata": {}, 629 | "outputs": [], 630 | "source": [ 631 | "sae_output = sae.predict(x_test)[0].reshape((10000, 28, 28))\n", 632 | "\n", 633 | "ROW = 5\n", 634 | "COLUMN = 4\n", 635 | "\n", 636 | "x = sae_output\n", 637 | "fname = 'reconstruct_sae.pdf'\n", 638 | "\n", 639 | "fig, axes = plt.subplots(nrows=ROW, ncols=COLUMN, figsize=(8, 10))\n", 640 | "for ax, i in zip(axes.flat, numpy.arange(ROW*COLUMN)):\n", 641 | " image = x[i].reshape(28, 28)\n", 642 | " ax.imshow(image, cmap='gray')\n", 643 | " ax.axis('off')\n", 644 | "\n", 645 | "plt.tight_layout()\n", 646 | "plt.savefig(fname)\n", 647 | "plt.show()" 648 | ] 649 | }, 650 | { 651 | "cell_type": "markdown", 652 | "metadata": {}, 653 | "source": [ 654 | "### 4.4. Visualize the low-dimensional features" 655 | ] 656 | }, 657 | { 658 | "cell_type": "code", 659 | "execution_count": null, 660 | "metadata": {}, 661 | "outputs": [], 662 | "source": [ 663 | "# build the encoder model\n", 664 | "sae_encoder = models.Model(input_img, bottleneck)\n", 665 | "sae_encoder.summary()" 666 | ] 667 | }, 668 | { 669 | "cell_type": "code", 670 | "execution_count": null, 671 | "metadata": { 672 | "scrolled": true 673 | }, 674 | "outputs": [], 675 | "source": [ 676 | "# extract test features\n", 677 | "encoded_test = sae_encoder.predict(x_test)\n", 678 | "print('Shape of encoded_test: ' + str(encoded_test.shape))\n", 679 | "\n", 680 | "colors = numpy.array(['r', 'g', 'b', 'm', 'c', 'k', 'y', 'purple', 'darkred', 'navy'])\n", 681 | "colors_test = colors[y_test]\n", 682 | "\n", 683 | "\n", 684 | "import matplotlib.pyplot as plt\n", 685 | "%matplotlib inline\n", 686 | "\n", 687 | "fig = plt.figure(figsize=(8, 8))\n", 688 | "plt.scatter(encoded_test[:, 0], encoded_test[:, 1], s=10, c=colors_test, edgecolors=colors_test)\n", 689 | "plt.axis('off')\n", 690 | "plt.tight_layout()\n", 691 | "fname = 'sae_code.pdf'\n", 692 | "plt.savefig(fname)" 693 | ] 694 | }, 695 | { 696 | "cell_type": "markdown", 697 | "metadata": {}, 698 | "source": [ 699 | "### 4.5. Are the learned low-dim features discriminative?\n", 700 | "\n", 701 | "To find the answer, lets train a classifier on the training set (the extracted 2-dim features) and evaluation on the validation and test set.\n" 702 | ] 703 | }, 704 | { 705 | "cell_type": "code", 706 | "execution_count": null, 707 | "metadata": {}, 708 | "outputs": [], 709 | "source": [ 710 | "# extract 2D features from the training, validation, and test samples\n", 711 | "f_tr = sae_encoder.predict(x_tr)\n", 712 | "f_val = sae_encoder.predict(x_val)\n", 713 | "f_te = sae_encoder.predict(x_test)" 714 | ] 715 | }, 716 | { 717 | "cell_type": "code", 718 | "execution_count": null, 719 | "metadata": {}, 720 | "outputs": [], 721 | "source": [ 722 | "# build a classifier which takes the 2D features as input\n", 723 | "from keras.layers import Dense, Input\n", 724 | "from keras import models\n", 725 | "\n", 726 | "input_feat = Input(shape=(2,))\n", 727 | "\n", 728 | "\n", 729 | "output = \n", 730 | "\n", 731 | "classifier = models.Model(input_feat, output)\n", 732 | "\n", 733 | "classifier.summary()" 734 | ] 735 | }, 736 | { 737 | "cell_type": "code", 738 | "execution_count": null, 739 | "metadata": {}, 740 | "outputs": [], 741 | "source": [ 742 | "classifier.compile(loss='categorical_crossentropy',\n", 743 | " optimizer=optimizers.RMSprop(lr=1E-4),\n", 744 | " metrics=['acc'])\n", 745 | "\n", 746 | "history = classifier.fit(f_tr, y_tr, \n", 747 | " batch_size=32, \n", 748 | " epochs=30, \n", 749 | " validation_data=(f_val, y_val))" 750 | ] 751 | }, 752 | { 753 | "cell_type": "markdown", 754 | "metadata": {}, 755 | "source": [ 756 | "#### Remark:\n", 757 | "\n", 758 | "The validation accuracy must be above 90%. It means the low-dim features learned by the supervised autoencoder are very effective." 759 | ] 760 | }, 761 | { 762 | "cell_type": "code", 763 | "execution_count": null, 764 | "metadata": {}, 765 | "outputs": [], 766 | "source": [ 767 | "# evaluate your model on the never-seen-before test data\n", 768 | "# write your code here:\n", 769 | "..." 770 | ] 771 | } 772 | ], 773 | "metadata": { 774 | "kernelspec": { 775 | "display_name": "Python 3", 776 | "language": "python", 777 | "name": "python3" 778 | }, 779 | "language_info": { 780 | "codemirror_mode": { 781 | "name": "ipython", 782 | "version": 3 783 | }, 784 | "file_extension": ".py", 785 | "mimetype": "text/x-python", 786 | "name": "python", 787 | "nbconvert_exporter": "python", 788 | "pygments_lexer": "ipython3", 789 | "version": "3.7.6" 790 | } 791 | }, 792 | "nbformat": 4, 793 | "nbformat_minor": 2 794 | } 795 | -------------------------------------------------------------------------------- /homework/Bonus2/supervised_ae.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wangshusen/CS583-2021S/55f8ce5c1257f01333ab133f3ab447f208ff85a8/homework/Bonus2/supervised_ae.png -------------------------------------------------------------------------------- /homework/Exam-Sample/Sample.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wangshusen/CS583-2021S/55f8ce5c1257f01333ab133f3ab447f208ff85a8/homework/Exam-Sample/Sample.pdf -------------------------------------------------------------------------------- /homework/HM2/HM2.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# HM2: Numerical Optimization for Logistic Regression.\n", 8 | "\n", 9 | "### Name: [Your-Name?]\n" 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "## 0. You will do the following:\n", 17 | "\n", 18 | "1. Read the lecture note: [click here](https://github.com/wangshusen/DeepLearning/blob/master/LectureNotes/Logistic/paper/logistic.pdf)\n", 19 | "\n", 20 | "2. Read, complete, and run my code.\n", 21 | "\n", 22 | "3. **Implement mini-batch SGD** and evaluate the performance.\n", 23 | "\n", 24 | "4. Convert the .IPYNB file to .HTML file.\n", 25 | "\n", 26 | " * The HTML file must contain **the code** and **the output after execution**.\n", 27 | " \n", 28 | " * Missing **the output after execution** will not be graded.\n", 29 | " \n", 30 | "5. Upload this .HTML file to your Google Drive, Dropbox, or your Github repo. (If you submit the file to Google Drive or Dropbox, you must make the file \"open-access\". The delay caused by \"deny of access\" may result in late penalty.)\n", 31 | "\n", 32 | "6. Submit the link to this .HTML file to Canvas.\n", 33 | "\n", 34 | " * Example: https://github.com/wangshusen/CS583-2020S/blob/master/homework/HM2/HM2.html\n", 35 | "\n", 36 | "\n", 37 | "## Grading criteria:\n", 38 | "\n", 39 | "1. When computing the ```gradient``` and ```objective function value``` using a batch of samples, use **matrix-vector multiplication** rather than a FOR LOOP of **vector-vector multiplications**.\n", 40 | "\n", 41 | "2. Plot ```objective function value``` against ```epochs```. In the plot, compare GD, SGD, and MB-SGD (with $b=8$ and $b=64$). The plot must look reasonable." 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": null, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [] 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "metadata": {}, 54 | "source": [ 55 | "# 1. Data processing\n", 56 | "\n", 57 | "- Download the Diabete dataset from https://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/diabetes\n", 58 | "- Load the data using sklearn.\n", 59 | "- Preprocess the data." 60 | ] 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "metadata": {}, 65 | "source": [ 66 | "## 1.1. Load the data" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": null, 72 | "metadata": {}, 73 | "outputs": [], 74 | "source": [ 75 | "from sklearn import datasets\n", 76 | "import numpy\n", 77 | "\n", 78 | "x_sparse, y = datasets.load_svmlight_file('diabetes')\n", 79 | "x = x_sparse.todense()\n", 80 | "\n", 81 | "print('Shape of x: ' + str(x.shape))\n", 82 | "print('Shape of y: ' + str(y.shape))" 83 | ] 84 | }, 85 | { 86 | "cell_type": "markdown", 87 | "metadata": {}, 88 | "source": [ 89 | "## 1.2. Partition to training and test sets" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": null, 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "# partition the data to training and test sets\n", 99 | "n = x.shape[0]\n", 100 | "n_train = 640\n", 101 | "n_test = n - n_train\n", 102 | "\n", 103 | "rand_indices = numpy.random.permutation(n)\n", 104 | "train_indices = rand_indices[0:n_train]\n", 105 | "test_indices = rand_indices[n_train:n]\n", 106 | "\n", 107 | "x_train = x[train_indices, :]\n", 108 | "x_test = x[test_indices, :]\n", 109 | "y_train = y[train_indices].reshape(n_train, 1)\n", 110 | "y_test = y[test_indices].reshape(n_test, 1)\n", 111 | "\n", 112 | "print('Shape of x_train: ' + str(x_train.shape))\n", 113 | "print('Shape of x_test: ' + str(x_test.shape))\n", 114 | "print('Shape of y_train: ' + str(y_train.shape))\n", 115 | "print('Shape of y_test: ' + str(y_test.shape))" 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": {}, 121 | "source": [ 122 | "## 1.3. Feature scaling" 123 | ] 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "metadata": {}, 128 | "source": [ 129 | "Use the standardization to trainsform both training and test features" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": null, 135 | "metadata": {}, 136 | "outputs": [], 137 | "source": [ 138 | "# Standardization\n", 139 | "import numpy\n", 140 | "\n", 141 | "# calculate mu and sig using the training set\n", 142 | "d = x_train.shape[1]\n", 143 | "mu = numpy.mean(x_train, axis=0).reshape(1, d)\n", 144 | "sig = numpy.std(x_train, axis=0).reshape(1, d)\n", 145 | "\n", 146 | "# transform the training features\n", 147 | "x_train = (x_train - mu) / (sig + 1E-6)\n", 148 | "\n", 149 | "# transform the test features\n", 150 | "x_test = (x_test - mu) / (sig + 1E-6)\n", 151 | "\n", 152 | "print('test mean = ')\n", 153 | "print(numpy.mean(x_test, axis=0))\n", 154 | "\n", 155 | "print('test std = ')\n", 156 | "print(numpy.std(x_test, axis=0))" 157 | ] 158 | }, 159 | { 160 | "cell_type": "markdown", 161 | "metadata": {}, 162 | "source": [ 163 | "## 1.4. Add a dimension of all ones" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": null, 169 | "metadata": {}, 170 | "outputs": [], 171 | "source": [ 172 | "n_train, d = x_train.shape\n", 173 | "x_train = numpy.concatenate((x_train, numpy.ones((n_train, 1))), axis=1)\n", 174 | "\n", 175 | "n_test, d = x_test.shape\n", 176 | "x_test = numpy.concatenate((x_test, numpy.ones((n_test, 1))), axis=1)\n", 177 | "\n", 178 | "print('Shape of x_train: ' + str(x_train.shape))\n", 179 | "print('Shape of x_test: ' + str(x_test.shape))" 180 | ] 181 | }, 182 | { 183 | "cell_type": "markdown", 184 | "metadata": {}, 185 | "source": [ 186 | "# 2. Logistic regression model\n", 187 | "\n", 188 | "The objective function is $Q (w; X, y) = \\frac{1}{n} \\sum_{i=1}^n \\log \\Big( 1 + \\exp \\big( - y_i x_i^T w \\big) \\Big) + \\frac{\\lambda}{2} \\| w \\|_2^2 $." 189 | ] 190 | }, 191 | { 192 | "cell_type": "code", 193 | "execution_count": null, 194 | "metadata": {}, 195 | "outputs": [], 196 | "source": [ 197 | "# Calculate the objective function value\n", 198 | "# Inputs:\n", 199 | "# w: d-by-1 matrix\n", 200 | "# x: n-by-d matrix\n", 201 | "# y: n-by-1 matrix\n", 202 | "# lam: scalar, the regularization parameter\n", 203 | "# Return:\n", 204 | "# objective function value (scalar)\n", 205 | "def objective(w, x, y, lam):\n", 206 | " n, d = x.shape\n", 207 | " yx = numpy.multiply(y, x) # n-by-d matrix\n", 208 | " yxw = numpy.dot(yx, w) # n-by-1 matrix\n", 209 | " vec1 = numpy.exp(-yxw) # n-by-1 matrix\n", 210 | " vec2 = numpy.log(1 + vec1) # n-by-1 matrix\n", 211 | " loss = numpy.mean(vec2) # scalar\n", 212 | " reg = lam / 2 * numpy.sum(w * w) # scalar\n", 213 | " return loss + reg\n", 214 | " " 215 | ] 216 | }, 217 | { 218 | "cell_type": "code", 219 | "execution_count": null, 220 | "metadata": {}, 221 | "outputs": [], 222 | "source": [ 223 | "# initialize w\n", 224 | "d = x_train.shape[1]\n", 225 | "w = numpy.zeros((d, 1))\n", 226 | "\n", 227 | "# evaluate the objective function value at w\n", 228 | "lam = 1E-6\n", 229 | "objval0 = objective(w, x_train, y_train, lam)\n", 230 | "print('Initial objective function value = ' + str(objval0))" 231 | ] 232 | }, 233 | { 234 | "cell_type": "markdown", 235 | "metadata": {}, 236 | "source": [ 237 | "# 3. Numerical optimization" 238 | ] 239 | }, 240 | { 241 | "cell_type": "markdown", 242 | "metadata": {}, 243 | "source": [ 244 | "## 3.1. Gradient descent\n" 245 | ] 246 | }, 247 | { 248 | "cell_type": "markdown", 249 | "metadata": {}, 250 | "source": [ 251 | "The gradient at $w$ is $g = - \\frac{1}{n} \\sum_{i=1}^n \\frac{y_i x_i }{1 + \\exp ( y_i x_i^T w)} + \\lambda w$" 252 | ] 253 | }, 254 | { 255 | "cell_type": "code", 256 | "execution_count": null, 257 | "metadata": {}, 258 | "outputs": [], 259 | "source": [ 260 | "# Calculate the gradient\n", 261 | "# Inputs:\n", 262 | "# w: d-by-1 matrix\n", 263 | "# x: n-by-d matrix\n", 264 | "# y: n-by-1 matrix\n", 265 | "# lam: scalar, the regularization parameter\n", 266 | "# Return:\n", 267 | "# g: g: d-by-1 matrix, full gradient\n", 268 | "def gradient(w, x, y, lam):\n", 269 | " n, d = x.shape\n", 270 | " yx = numpy.multiply(y, x) # n-by-d matrix\n", 271 | " yxw = numpy.dot(yx, w) # n-by-1 matrix\n", 272 | " vec1 = numpy.exp(yxw) # n-by-1 matrix\n", 273 | " vec2 = numpy.divide(yx, 1+vec1) # n-by-d matrix\n", 274 | " vec3 = -numpy.mean(vec2, axis=0).reshape(d, 1) # d-by-1 matrix\n", 275 | " g = vec3 + lam * w\n", 276 | " return g" 277 | ] 278 | }, 279 | { 280 | "cell_type": "code", 281 | "execution_count": null, 282 | "metadata": {}, 283 | "outputs": [], 284 | "source": [ 285 | "# Gradient descent for solving logistic regression\n", 286 | "# Inputs:\n", 287 | "# x: n-by-d matrix\n", 288 | "# y: n-by-1 matrix\n", 289 | "# lam: scalar, the regularization parameter\n", 290 | "# stepsize: scalar\n", 291 | "# max_iter: integer, the maximal iterations\n", 292 | "# w: d-by-1 matrix, initialization of w\n", 293 | "# Return:\n", 294 | "# w: d-by-1 matrix, the solution\n", 295 | "# objvals: a record of each iteration's objective value\n", 296 | "def grad_descent(x, y, lam, stepsize, max_iter=100, w=None):\n", 297 | " n, d = x.shape\n", 298 | " objvals = numpy.zeros(max_iter) # store the objective values\n", 299 | " if w is None:\n", 300 | " w = numpy.zeros((d, 1)) # zero initialization\n", 301 | " \n", 302 | " for t in range(max_iter):\n", 303 | " objval = objective(w, x, y, lam)\n", 304 | " objvals[t] = objval\n", 305 | " print('Objective value at t=' + str(t) + ' is ' + str(objval))\n", 306 | " g = gradient(w, x, y, lam)\n", 307 | " w -= stepsize * g\n", 308 | " \n", 309 | " return w, objvals" 310 | ] 311 | }, 312 | { 313 | "cell_type": "markdown", 314 | "metadata": {}, 315 | "source": [ 316 | "Run gradient descent." 317 | ] 318 | }, 319 | { 320 | "cell_type": "code", 321 | "execution_count": null, 322 | "metadata": {}, 323 | "outputs": [], 324 | "source": [ 325 | "lam = 1E-6\n", 326 | "stepsize = 1.0\n", 327 | "w, objvals_gd = grad_descent(x_train, y_train, lam, stepsize)" 328 | ] 329 | }, 330 | { 331 | "cell_type": "markdown", 332 | "metadata": {}, 333 | "source": [ 334 | "## 3.2. Stochastic gradient descent (SGD)\n", 335 | "\n", 336 | "Define $Q_i (w) = \\log \\Big( 1 + \\exp \\big( - y_i x_i^T w \\big) \\Big) + \\frac{\\lambda}{2} \\| w \\|_2^2 $.\n", 337 | "\n", 338 | "The stochastic gradient at $w$ is $g_i = \\frac{\\partial Q_i }{ \\partial w} = -\\frac{y_i x_i }{1 + \\exp ( y_i x_i^T w)} + \\lambda w$." 339 | ] 340 | }, 341 | { 342 | "cell_type": "code", 343 | "execution_count": null, 344 | "metadata": {}, 345 | "outputs": [], 346 | "source": [ 347 | "# Calculate the objective Q_i and the gradient of Q_i\n", 348 | "# Inputs:\n", 349 | "# w: d-by-1 matrix\n", 350 | "# xi: 1-by-d matrix\n", 351 | "# yi: scalar\n", 352 | "# lam: scalar, the regularization parameter\n", 353 | "# Return:\n", 354 | "# obj: scalar, the objective Q_i\n", 355 | "# g: d-by-1 matrix, gradient of Q_i\n", 356 | "def stochastic_objective_gradient(w, xi, yi, lam):\n", 357 | " yx = yi * xi # 1-by-d matrix\n", 358 | " yxw = float(numpy.dot(yx, w)) # scalar\n", 359 | " \n", 360 | " # calculate objective function Q_i\n", 361 | " loss = numpy.log(1 + numpy.exp(-yxw)) # scalar\n", 362 | " reg = lam / 2 * numpy.sum(w * w) # scalar\n", 363 | " obj = loss + reg\n", 364 | " \n", 365 | " # calculate stochastic gradient\n", 366 | " g_loss = -yx.T / (1 + numpy.exp(yxw)) # d-by-1 matrix\n", 367 | " g = g_loss + lam * w # d-by-1 matrix\n", 368 | " \n", 369 | " return obj, g" 370 | ] 371 | }, 372 | { 373 | "cell_type": "code", 374 | "execution_count": null, 375 | "metadata": {}, 376 | "outputs": [], 377 | "source": [ 378 | "# SGD for solving logistic regression\n", 379 | "# Inputs:\n", 380 | "# x: n-by-d matrix\n", 381 | "# y: n-by-1 matrix\n", 382 | "# lam: scalar, the regularization parameter\n", 383 | "# stepsize: scalar\n", 384 | "# max_epoch: integer, the maximal epochs\n", 385 | "# w: d-by-1 matrix, initialization of w\n", 386 | "# Return:\n", 387 | "# w: the solution\n", 388 | "# objvals: record of each iteration's objective value\n", 389 | "def sgd(x, y, lam, stepsize, max_epoch=100, w=None):\n", 390 | " n, d = x.shape\n", 391 | " objvals = numpy.zeros(max_epoch) # store the objective values\n", 392 | " if w is None:\n", 393 | " w = numpy.zeros((d, 1)) # zero initialization\n", 394 | " \n", 395 | " for t in range(max_epoch):\n", 396 | " # randomly shuffle the samples\n", 397 | " rand_indices = numpy.random.permutation(n)\n", 398 | " x_rand = x[rand_indices, :]\n", 399 | " y_rand = y[rand_indices, :]\n", 400 | " \n", 401 | " objval = 0 # accumulate the objective values\n", 402 | " for i in range(n):\n", 403 | " xi = x_rand[i, :] # 1-by-d matrix\n", 404 | " yi = float(y_rand[i, :]) # scalar\n", 405 | " obj, g = stochastic_objective_gradient(w, xi, yi, lam)\n", 406 | " objval += obj\n", 407 | " w -= stepsize * g\n", 408 | " \n", 409 | " stepsize *= 0.9 # decrease step size\n", 410 | " objval /= n\n", 411 | " objvals[t] = objval\n", 412 | " print('Objective value at epoch t=' + str(t) + ' is ' + str(objval))\n", 413 | " \n", 414 | " return w, objvals" 415 | ] 416 | }, 417 | { 418 | "cell_type": "markdown", 419 | "metadata": {}, 420 | "source": [ 421 | "Run SGD." 422 | ] 423 | }, 424 | { 425 | "cell_type": "code", 426 | "execution_count": null, 427 | "metadata": {}, 428 | "outputs": [], 429 | "source": [ 430 | "lam = 1E-6\n", 431 | "stepsize = 0.1\n", 432 | "w, objvals_sgd = sgd(x_train, y_train, lam, stepsize)" 433 | ] 434 | }, 435 | { 436 | "cell_type": "markdown", 437 | "metadata": {}, 438 | "source": [ 439 | "# 4. Compare GD with SGD\n", 440 | "\n", 441 | "Plot objective function values against epochs." 442 | ] 443 | }, 444 | { 445 | "cell_type": "code", 446 | "execution_count": null, 447 | "metadata": {}, 448 | "outputs": [], 449 | "source": [ 450 | "import matplotlib.pyplot as plt\n", 451 | "%matplotlib inline\n", 452 | "\n", 453 | "fig = plt.figure(figsize=(6, 4))\n", 454 | "\n", 455 | "epochs_gd = range(len(objvals_gd))\n", 456 | "epochs_sgd = range(len(objvals_sgd))\n", 457 | "\n", 458 | "line0, = plt.plot(epochs_gd, objvals_gd, '--b', LineWidth=4)\n", 459 | "line1, = plt.plot(epochs_sgd, objvals_sgd, '-r', LineWidth=2)\n", 460 | "plt.xlabel('Epochs', FontSize=20)\n", 461 | "plt.ylabel('Objective Value', FontSize=20)\n", 462 | "plt.xticks(FontSize=16)\n", 463 | "plt.yticks(FontSize=16)\n", 464 | "plt.legend([line0, line1], ['GD', 'SGD'], fontsize=20)\n", 465 | "plt.tight_layout()\n", 466 | "plt.show()\n", 467 | "fig.savefig('compare_gd_sgd.pdf', format='pdf', dpi=1200)" 468 | ] 469 | }, 470 | { 471 | "cell_type": "markdown", 472 | "metadata": {}, 473 | "source": [ 474 | "# 5. Prediction" 475 | ] 476 | }, 477 | { 478 | "cell_type": "code", 479 | "execution_count": null, 480 | "metadata": {}, 481 | "outputs": [], 482 | "source": [ 483 | "# Predict class label\n", 484 | "# Inputs:\n", 485 | "# w: d-by-1 matrix\n", 486 | "# X: m-by-d matrix\n", 487 | "# Return:\n", 488 | "# f: m-by-1 matrix, the predictions\n", 489 | "def predict(w, X):\n", 490 | " xw = numpy.dot(X, w)\n", 491 | " f = numpy.sign(xw)\n", 492 | " return f" 493 | ] 494 | }, 495 | { 496 | "cell_type": "code", 497 | "execution_count": null, 498 | "metadata": {}, 499 | "outputs": [], 500 | "source": [ 501 | "# evaluate training error\n", 502 | "f_train = predict(w, x_train)\n", 503 | "diff = numpy.abs(f_train - y_train) / 2\n", 504 | "error_train = numpy.mean(diff)\n", 505 | "print('Training classification error is ' + str(error_train))" 506 | ] 507 | }, 508 | { 509 | "cell_type": "code", 510 | "execution_count": null, 511 | "metadata": {}, 512 | "outputs": [], 513 | "source": [ 514 | "# evaluate test error\n", 515 | "f_test = predict(w, x_test)\n", 516 | "diff = numpy.abs(f_test - y_test) / 2\n", 517 | "error_test = numpy.mean(diff)\n", 518 | "print('Test classification error is ' + str(error_test))" 519 | ] 520 | }, 521 | { 522 | "cell_type": "markdown", 523 | "metadata": {}, 524 | "source": [ 525 | "# 6. Mini-batch SGD (fill the code)\n", 526 | "\n" 527 | ] 528 | }, 529 | { 530 | "cell_type": "markdown", 531 | "metadata": {}, 532 | "source": [ 533 | "## 6.1. Compute the objective $Q_I$ and its gradient using a batch of samples\n", 534 | "\n", 535 | "Define $Q_I (w) = \\frac{1}{b} \\sum_{i \\in I} \\log \\Big( 1 + \\exp \\big( - y_i x_i^T w \\big) \\Big) + \\frac{\\lambda}{2} \\| w \\|_2^2 $, where $I$ is a set containing $b$ indices randomly drawn from $\\{ 1, \\cdots , n \\}$ without replacement.\n", 536 | "\n", 537 | "The stochastic gradient at $w$ is $g_I = \\frac{\\partial Q_I }{ \\partial w} = \\frac{1}{b} \\sum_{i \\in I} \\frac{- y_i x_i }{1 + \\exp ( y_i x_i^T w)} + \\lambda w$." 538 | ] 539 | }, 540 | { 541 | "cell_type": "code", 542 | "execution_count": null, 543 | "metadata": {}, 544 | "outputs": [], 545 | "source": [ 546 | "# Calculate the objective Q_I and the gradient of Q_I\n", 547 | "# Inputs:\n", 548 | "# w: d-by-1 matrix\n", 549 | "# xi: b-by-d matrix\n", 550 | "# yi: b-by-1 matrix\n", 551 | "# lam: scalar, the regularization parameter\n", 552 | "# b: integer, the batch size\n", 553 | "# Return:\n", 554 | "# obj: scalar, the objective Q_i\n", 555 | "# g: d-by-1 matrix, gradient of Q_i\n", 556 | "def mb_stochastic_objective_gradient(w, xi, yi, lam, b):\n", 557 | " # Fill the function\n", 558 | " # Follow the implementation of stochastic_objective_gradient\n", 559 | " # Use matrix-vector multiplication; do not use FOR LOOP of vector-vector multiplications\n", 560 | " ...\n", 561 | " \n", 562 | " return obj, g" 563 | ] 564 | }, 565 | { 566 | "cell_type": "markdown", 567 | "metadata": {}, 568 | "source": [ 569 | "## 6.2. Implement mini-batch SGD\n", 570 | "\n", 571 | "Hints:\n", 572 | "1. In every epoch, randomly permute the $n$ samples (just like SGD).\n", 573 | "2. Each epoch has $\\frac{n}{b}$ iterations. In every iteration, use $b$ samples, and compute the gradient and objective using the ``mb_stochastic_objective_gradient`` function. In the next iteration, use the next $b$ samples, and so on.\n" 574 | ] 575 | }, 576 | { 577 | "cell_type": "code", 578 | "execution_count": null, 579 | "metadata": {}, 580 | "outputs": [], 581 | "source": [ 582 | "# Mini-Batch SGD for solving logistic regression\n", 583 | "# Inputs:\n", 584 | "# x: n-by-d matrix\n", 585 | "# y: n-by-1 matrix\n", 586 | "# lam: scalar, the regularization parameter\n", 587 | "# b: integer, the batch size\n", 588 | "# stepsize: scalar\n", 589 | "# max_epoch: integer, the maximal epochs\n", 590 | "# w: d-by-1 matrix, initialization of w\n", 591 | "# Return:\n", 592 | "# w: the solution\n", 593 | "# objvals: record of each iteration's objective value\n", 594 | "def mb_sgd(x, y, lam, b, stepsize, max_epoch=100, w=None):\n", 595 | " # Fill the function\n", 596 | " # Follow the implementation of sgd\n", 597 | " # Record one objective value per epoch (not per iteration!)\n", 598 | " ...\n", 599 | " \n", 600 | " return w, objvals" 601 | ] 602 | }, 603 | { 604 | "cell_type": "markdown", 605 | "metadata": {}, 606 | "source": [ 607 | "## 6.3. Run MB-SGD" 608 | ] 609 | }, 610 | { 611 | "cell_type": "code", 612 | "execution_count": null, 613 | "metadata": {}, 614 | "outputs": [], 615 | "source": [ 616 | "# MB-SGD with batch size b=8\n", 617 | "lam = 1E-6 # do not change\n", 618 | "b = 8 # do not change\n", 619 | "stepsize = 0.1 # you must tune this parameter\n", 620 | "\n", 621 | "w, objvals_mbsgd8 = mb_sgd(x_train, y_train, lam, b, stepsize)" 622 | ] 623 | }, 624 | { 625 | "cell_type": "code", 626 | "execution_count": null, 627 | "metadata": {}, 628 | "outputs": [], 629 | "source": [ 630 | "# MB-SGD with batch size b=64\n", 631 | "lam = 1E-6 # do not change\n", 632 | "b = 64 # do not change\n", 633 | "stepsize = 0.1 # you must tune this parameter\n", 634 | "\n", 635 | "w, objvals_mbsgd64 = mb_sgd(x_train, y_train, lam, b, stepsize)" 636 | ] 637 | }, 638 | { 639 | "cell_type": "markdown", 640 | "metadata": {}, 641 | "source": [ 642 | "# 7. Plot and compare GD, SGD, and MB-SGD" 643 | ] 644 | }, 645 | { 646 | "cell_type": "markdown", 647 | "metadata": {}, 648 | "source": [ 649 | "You are required to compare the following algorithms:\n", 650 | "\n", 651 | "- Gradient descent (GD)\n", 652 | "\n", 653 | "- SGD\n", 654 | "\n", 655 | "- MB-SGD with b=8\n", 656 | "\n", 657 | "- MB-SGD with b=64\n", 658 | "\n", 659 | "Follow the code in Section 4 to plot ```objective function value``` against ```epochs```. There should be four curves in the plot; each curve corresponds to one algorithm." 660 | ] 661 | }, 662 | { 663 | "cell_type": "markdown", 664 | "metadata": {}, 665 | "source": [ 666 | "Hint: Logistic regression with $\\ell_2$-norm regularization is a strongly convex optimization problem. All the algorithms will converge to the same solution. **In the end, the ``objective function value`` of the 4 algorithms will be the same. If not the same, your implementation must be wrong. Do NOT submit wrong code and wrong result!**" 667 | ] 668 | }, 669 | { 670 | "cell_type": "code", 671 | "execution_count": null, 672 | "metadata": {}, 673 | "outputs": [], 674 | "source": [ 675 | "# plot the 4 curves:" 676 | ] 677 | }, 678 | { 679 | "cell_type": "markdown", 680 | "metadata": {}, 681 | "source": [] 682 | } 683 | ], 684 | "metadata": { 685 | "kernelspec": { 686 | "display_name": "Python 3", 687 | "language": "python", 688 | "name": "python3" 689 | }, 690 | "language_info": { 691 | "codemirror_mode": { 692 | "name": "ipython", 693 | "version": 3 694 | }, 695 | "file_extension": ".py", 696 | "mimetype": "text/x-python", 697 | "name": "python", 698 | "nbconvert_exporter": "python", 699 | "pygments_lexer": "ipython3", 700 | "version": "3.8.3" 701 | } 702 | }, 703 | "nbformat": 4, 704 | "nbformat_minor": 2 705 | } 706 | -------------------------------------------------------------------------------- /homework/HM4/HM4.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Home 4: Build a CNN for image recognition.\n", 8 | "\n", 9 | "### Name: [Your-Name?]\n" 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "## 0. You will do the following:\n", 17 | "\n", 18 | "1. Read, complete, and run the code.\n", 19 | "\n", 20 | "2. **Make substantial improvements** to maximize the accurcy.\n", 21 | " \n", 22 | "3. Convert the .IPYNB file to .HTML file.\n", 23 | "\n", 24 | " * The HTML file must contain the code and the output after execution.\n", 25 | " \n", 26 | " * Missing **the output after execution** will not be graded.\n", 27 | " \n", 28 | "4. Upload this .HTML file to your Google Drive, Dropbox, or Github repo. (If you submit the file to Google Drive or Dropbox, you must make the file \"open-access\". The delay caused by \"deny of access\" may result in late penalty.)\n", 29 | "\n", 30 | "4. Submit the link to this .HTML file to Canvas.\n", 31 | "\n", 32 | " * Example: https://github.com/wangshusen/CS583-2020S/blob/master/homework/HM4/HM4.html\n", 33 | "\n", 34 | "\n", 35 | "## Requirements:\n", 36 | "\n", 37 | "1. You can use whatever CNN architecture, including VGG, Inception, and ResNet. However, you must build the networks layer by layer. You must NOT import the archetectures from ```keras.applications```.\n", 38 | "\n", 39 | "2. Make sure ```BatchNormalization``` is between a ```Conv```/```Dense``` layer and an ```activation``` layer.\n", 40 | "\n", 41 | "3. If you want to regularize a ```Conv```/```Dense``` layer, you should place a ```Dropout``` layer **before** the ```Conv```/```Dense``` layer.\n", 42 | "\n", 43 | "4. An accuracy above 70% is considered reasonable. An accuracy above 80% is considered good. Without data augmentation, achieving 80% accuracy is difficult.\n", 44 | "\n", 45 | "\n", 46 | "## Google Colab\n", 47 | "\n", 48 | "- If you do not have GPU, the training of a CNN can be slow. Google Colab is a good option.\n", 49 | "\n", 50 | "- Keep in mind that you must download it as an IPYNB file and then use IPython Notebook to convert it to HTML.\n", 51 | "\n", 52 | "- Also keep in mind that the IPYNB and HTML files must contain the outputs. (Otherwise, the instructor will not be able to know the correctness and performance.) Do the followings to keep the outputs.\n", 53 | "\n", 54 | "- In Colab, go to ```Runtime``` --> ```Change runtime type``` --> Do NOT check ```Omit code cell output when saving this notebook```. In this way, the downloaded IPYNB file contains the outputs." 55 | ] 56 | }, 57 | { 58 | "cell_type": "markdown", 59 | "metadata": {}, 60 | "source": [ 61 | "## 1. Data preparation" 62 | ] 63 | }, 64 | { 65 | "cell_type": "markdown", 66 | "metadata": {}, 67 | "source": [ 68 | "### 1.1. Load data\n" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": null, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "from keras.datasets import cifar10\n", 78 | "import numpy\n", 79 | "\n", 80 | "(x_train, y_train), (x_test, y_test) = cifar10.load_data()\n", 81 | "\n", 82 | "print('shape of x_train: ' + str(x_train.shape))\n", 83 | "print('shape of y_train: ' + str(y_train.shape))\n", 84 | "print('shape of x_test: ' + str(x_test.shape))\n", 85 | "print('shape of y_test: ' + str(y_test.shape))\n", 86 | "print('number of classes: ' + str(numpy.max(y_train) - numpy.min(y_train) + 1))" 87 | ] 88 | }, 89 | { 90 | "cell_type": "markdown", 91 | "metadata": {}, 92 | "source": [ 93 | "### 1.2. One-hot encode the labels\n", 94 | "\n", 95 | "In the input, a label is a scalar in $\\{0, 1, \\cdots , 9\\}$. One-hot encode transform such a scalar to a $10$-dim vector. E.g., a scalar ```y_train[j]=3``` is transformed to the vector ```y_train_vec[j]=[0, 0, 0, 1, 0, 0, 0, 0, 0, 0]```.\n", 96 | "\n", 97 | "1. Define a function ```to_one_hot``` that transforms an $n\\times 1$ array to a $n\\times 10$ matrix.\n", 98 | "\n", 99 | "2. Apply the function to ```y_train``` and ```y_test```." 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "metadata": {}, 106 | "outputs": [], 107 | "source": [ 108 | "def to_one_hot(y, num_class=10):\n", 109 | " \n", 110 | " ...\n", 111 | "\n", 112 | "y_train_vec = to_one_hot(y_train)\n", 113 | "y_test_vec = to_one_hot(y_test)\n", 114 | "\n", 115 | "print('Shape of y_train_vec: ' + str(y_train_vec.shape))\n", 116 | "print('Shape of y_test_vec: ' + str(y_test_vec.shape))\n", 117 | "\n", 118 | "print(y_train[0])\n", 119 | "print(y_train_vec[0])" 120 | ] 121 | }, 122 | { 123 | "cell_type": "markdown", 124 | "metadata": {}, 125 | "source": [ 126 | "#### Remark: the outputs should be\n", 127 | "* Shape of y_train_vec: (50000, 10)\n", 128 | "* Shape of y_test_vec: (10000, 10)\n", 129 | "* [6]\n", 130 | "* [0. 0. 0. 0. 0. 0. 1. 0. 0. 0.]" 131 | ] 132 | }, 133 | { 134 | "cell_type": "markdown", 135 | "metadata": {}, 136 | "source": [ 137 | "### 1.3. Randomly partition the training set to training and validation sets\n", 138 | "\n", 139 | "Randomly partition the 50K training samples to 2 sets:\n", 140 | "* a training set containing 40K samples\n", 141 | "* a validation set containing 10K samples\n" 142 | ] 143 | }, 144 | { 145 | "cell_type": "code", 146 | "execution_count": null, 147 | "metadata": {}, 148 | "outputs": [], 149 | "source": [ 150 | "rand_indices = numpy.random.permutation(50000)\n", 151 | "train_indices = rand_indices[0:40000]\n", 152 | "valid_indices = rand_indices[40000:50000]\n", 153 | "\n", 154 | "x_val = x_train[valid_indices, :]\n", 155 | "y_val = y_train_vec[valid_indices, :]\n", 156 | "\n", 157 | "x_tr = x_train[train_indices, :]\n", 158 | "y_tr = y_train_vec[train_indices, :]\n", 159 | "\n", 160 | "print('Shape of x_tr: ' + str(x_tr.shape))\n", 161 | "print('Shape of y_tr: ' + str(y_tr.shape))\n", 162 | "print('Shape of x_val: ' + str(x_val.shape))\n", 163 | "print('Shape of y_val: ' + str(y_val.shape))" 164 | ] 165 | }, 166 | { 167 | "cell_type": "markdown", 168 | "metadata": {}, 169 | "source": [ 170 | "## 2. Build a CNN and tune its hyper-parameters\n", 171 | "\n", 172 | "1. Build a convolutional neural network model\n", 173 | "2. Use the validation data to tune the hyper-parameters (e.g., network structure, and optimization algorithm)\n", 174 | " * Do NOT use test data for hyper-parameter tuning!!!\n", 175 | "3. Try to achieve a validation accuracy as high as possible." 176 | ] 177 | }, 178 | { 179 | "cell_type": "markdown", 180 | "metadata": {}, 181 | "source": [ 182 | "### Remark: \n", 183 | "\n", 184 | "The following CNN is just an example. You are supposed to make **substantial improvements** such as:\n", 185 | "* Add more layers.\n", 186 | "* Use regularizations, e.g., dropout.\n", 187 | "* Use batch normalization." 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": null, 193 | "metadata": {}, 194 | "outputs": [], 195 | "source": [ 196 | "from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense\n", 197 | "from keras.models import Sequential\n", 198 | "\n", 199 | "model = Sequential()\n", 200 | "model.add(Conv2D(32, (3, 3), activation='relu', padding='same', input_shape=(32, 32, 3)))\n", 201 | "model.add(MaxPooling2D((2, 2)))\n", 202 | "model.add(Conv2D(64, (3, 3), activation='relu', padding='same'))\n", 203 | "model.add(MaxPooling2D((2, 2)))\n", 204 | "model.add(Flatten())\n", 205 | "model.add(Dense(128, activation='relu'))\n", 206 | "model.add(Dense(10, activation='softmax'))\n", 207 | "\n", 208 | "model.summary()" 209 | ] 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": null, 214 | "metadata": {}, 215 | "outputs": [], 216 | "source": [ 217 | "from keras import optimizers\n", 218 | "\n", 219 | "learning_rate = 1E-5 # to be tuned!\n", 220 | "\n", 221 | "model.compile(loss='categorical_crossentropy',\n", 222 | " optimizer=optimizers.RMSprop(lr=learning_rate),\n", 223 | " metrics=['acc'])" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": null, 229 | "metadata": {}, 230 | "outputs": [], 231 | "source": [ 232 | "history = model.fit(x_tr, y_tr, batch_size=32, epochs=10, validation_data=(x_val, y_val))" 233 | ] 234 | }, 235 | { 236 | "cell_type": "code", 237 | "execution_count": null, 238 | "metadata": {}, 239 | "outputs": [], 240 | "source": [ 241 | "import matplotlib.pyplot as plt\n", 242 | "%matplotlib inline\n", 243 | "\n", 244 | "acc = history.history['acc']\n", 245 | "val_acc = history.history['val_acc']\n", 246 | "\n", 247 | "epochs = range(len(acc))\n", 248 | "\n", 249 | "plt.plot(epochs, acc, 'bo', label='Training acc')\n", 250 | "plt.plot(epochs, val_acc, 'r', label='Validation acc')\n", 251 | "plt.xlabel('Epochs')\n", 252 | "plt.ylabel('Accuracy')\n", 253 | "plt.legend()\n", 254 | "plt.show()" 255 | ] 256 | }, 257 | { 258 | "cell_type": "markdown", 259 | "metadata": {}, 260 | "source": [ 261 | "## 3. Train (again) and evaluate the model\n", 262 | "\n", 263 | "- To this end, you have found the \"best\" hyper-parameters. \n", 264 | "- Now, fix the hyper-parameters and train the network on the entire training set (all the 50K training samples)\n", 265 | "- Evaluate your model on the test set." 266 | ] 267 | }, 268 | { 269 | "cell_type": "markdown", 270 | "metadata": {}, 271 | "source": [ 272 | "### 3.1. Train the model on the entire training set\n", 273 | "\n", 274 | "Why? Previously, you used 40K samples for training; you wasted 10K samples for the sake of hyper-parameter tuning. Now you already know the hyper-parameters, so why not using all the 50K samples for training?" 275 | ] 276 | }, 277 | { 278 | "cell_type": "code", 279 | "execution_count": null, 280 | "metadata": {}, 281 | "outputs": [], 282 | "source": [ 283 | "\n", 284 | "..." 285 | ] 286 | }, 287 | { 288 | "cell_type": "code", 289 | "execution_count": null, 290 | "metadata": {}, 291 | "outputs": [], 292 | "source": [ 293 | "\n", 294 | "\n", 295 | "\n", 296 | "..." 297 | ] 298 | }, 299 | { 300 | "cell_type": "markdown", 301 | "metadata": {}, 302 | "source": [ 303 | "### 3.2. Evaluate the model on the test set\n", 304 | "\n", 305 | "Do NOT used the test set until now. Make sure that your model parameters and hyper-parameters are independent of the test set." 306 | ] 307 | }, 308 | { 309 | "cell_type": "code", 310 | "execution_count": null, 311 | "metadata": {}, 312 | "outputs": [], 313 | "source": [ 314 | "loss_and_acc = model.evaluate(x_test, y_test_vec)\n", 315 | "print('loss = ' + str(loss_and_acc[0]))\n", 316 | "print('accuracy = ' + str(loss_and_acc[1]))" 317 | ] 318 | }, 319 | { 320 | "cell_type": "code", 321 | "execution_count": null, 322 | "metadata": {}, 323 | "outputs": [], 324 | "source": [] 325 | } 326 | ], 327 | "metadata": { 328 | "kernelspec": { 329 | "display_name": "Python 3", 330 | "language": "python", 331 | "name": "python3" 332 | }, 333 | "language_info": { 334 | "codemirror_mode": { 335 | "name": "ipython", 336 | "version": 3 337 | }, 338 | "file_extension": ".py", 339 | "mimetype": "text/x-python", 340 | "name": "python", 341 | "nbconvert_exporter": "python", 342 | "pygments_lexer": "ipython3", 343 | "version": "3.6.4" 344 | } 345 | }, 346 | "nbformat": 4, 347 | "nbformat_minor": 2 348 | } 349 | -------------------------------------------------------------------------------- /homework/HM5/HM5.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Home 5: Build a seq2seq model for machine translation.\n", 8 | "\n", 9 | "### Name: [Your-Name?]\n", 10 | "\n", 11 | "### Task: Translate English to [what-language?]" 12 | ] 13 | }, 14 | { 15 | "cell_type": "markdown", 16 | "metadata": {}, 17 | "source": [ 18 | "## 0. You will do the following:\n", 19 | "\n", 20 | "1. Read and run my code.\n", 21 | "2. Complete the code in Section 1.1 and Section 4.2.\n", 22 | "\n", 23 | " * Translation **English** to **German** is not acceptable!!! Try another pair of languages.\n", 24 | " \n", 25 | "3. **Make improvements.** Directly modify the code in Section 3. Do at least one of the two. By doing both correctly, you will get up to 1 bonus score to the total.\n", 26 | "\n", 27 | " * Bi-LSTM instead of LSTM.\n", 28 | " \n", 29 | " * Attention. (You are allowed to use existing code.)\n", 30 | " \n", 31 | "4. Evaluate the translation using the BLEU score. \n", 32 | "\n", 33 | " * Optional. Up to 1 bonus scores to the total.\n", 34 | " \n", 35 | "5. Convert the notebook to .HTML file. \n", 36 | "\n", 37 | " * The HTML file must contain the code and the output after execution.\n", 38 | "\n", 39 | "6. Put the .HTML file in your Google Drive, Dropbox, or Github repo. (If you submit the file to Google Drive or Dropbox, you must make the file \"open-access\". The delay caused by \"deny of access\" may result in late penalty.)\n", 40 | "\n", 41 | "7. Submit the link to the HTML file to Canvas. \n" 42 | ] 43 | }, 44 | { 45 | "cell_type": "markdown", 46 | "metadata": {}, 47 | "source": [ 48 | "### Hint: \n", 49 | "\n", 50 | "To implement ```Bi-LSTM```, you will need the following code to build the encoder. Do NOT use Bi-LSTM for the decoder." 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "from keras.layers import Bidirectional, Concatenate\n", 60 | "\n", 61 | "encoder_bilstm = Bidirectional(LSTM(latent_dim, return_state=True, \n", 62 | " dropout=0.5, name='encoder_lstm'))\n", 63 | "_, forward_h, forward_c, backward_h, backward_c = encoder_bilstm(encoder_inputs)\n", 64 | "\n", 65 | "state_h = Concatenate()([forward_h, backward_h])\n", 66 | "state_c = Concatenate()([forward_c, backward_c])" 67 | ] 68 | }, 69 | { 70 | "cell_type": "markdown", 71 | "metadata": {}, 72 | "source": [ 73 | "## 1. Data preparation\n", 74 | "\n", 75 | "1. Download data (e.g., \"deu-eng.zip\") from http://www.manythings.org/anki/\n", 76 | "2. Unzip the .ZIP file.\n", 77 | "3. Put the .TXT file (e.g., \"deu.txt\") in the directory \"./Data/\"." 78 | ] 79 | }, 80 | { 81 | "cell_type": "markdown", 82 | "metadata": {}, 83 | "source": [ 84 | "### 1.1. Load and clean text\n" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": null, 90 | "metadata": {}, 91 | "outputs": [], 92 | "source": [ 93 | "import re\n", 94 | "import string\n", 95 | "from unicodedata import normalize\n", 96 | "import numpy\n", 97 | "\n", 98 | "# load doc into memory\n", 99 | "def load_doc(filename):\n", 100 | " # open the file as read only\n", 101 | " file = open(filename, mode='rt', encoding='utf-8')\n", 102 | " # read all text\n", 103 | " text = file.read()\n", 104 | " # close the file\n", 105 | " file.close()\n", 106 | " return text\n", 107 | "\n", 108 | "\n", 109 | "# split a loaded document into sentences\n", 110 | "def to_pairs(doc):\n", 111 | " lines = doc.strip().split('\\n')\n", 112 | " pairs = [line.split('\\t') for line in lines]\n", 113 | " return pairs\n", 114 | "\n", 115 | "def clean_data(lines):\n", 116 | " cleaned = list()\n", 117 | " # prepare regex for char filtering\n", 118 | " re_print = re.compile('[^%s]' % re.escape(string.printable))\n", 119 | " # prepare translation table for removing punctuation\n", 120 | " table = str.maketrans('', '', string.punctuation)\n", 121 | " for pair in lines:\n", 122 | " clean_pair = list()\n", 123 | " for line in pair:\n", 124 | " # normalize unicode characters\n", 125 | " line = normalize('NFD', line).encode('ascii', 'ignore')\n", 126 | " line = line.decode('UTF-8')\n", 127 | " # tokenize on white space\n", 128 | " line = line.split()\n", 129 | " # convert to lowercase\n", 130 | " line = [word.lower() for word in line]\n", 131 | " # remove punctuation from each token\n", 132 | " line = [word.translate(table) for word in line]\n", 133 | " # remove non-printable chars form each token\n", 134 | " line = [re_print.sub('', w) for w in line]\n", 135 | " # remove tokens with numbers in them\n", 136 | " line = [word for word in line if word.isalpha()]\n", 137 | " # store as string\n", 138 | " clean_pair.append(' '.join(line))\n", 139 | " cleaned.append(clean_pair)\n", 140 | " return numpy.array(cleaned)" 141 | ] 142 | }, 143 | { 144 | "cell_type": "markdown", 145 | "metadata": {}, 146 | "source": [ 147 | "#### Fill the following blanks:" 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": null, 153 | "metadata": {}, 154 | "outputs": [], 155 | "source": [ 156 | "# e.g., filename = 'Data/deu.txt'\n", 157 | "filename = \n", 158 | "\n", 159 | "# e.g., n_train = 20000\n", 160 | "n_train = " 161 | ] 162 | }, 163 | { 164 | "cell_type": "code", 165 | "execution_count": null, 166 | "metadata": {}, 167 | "outputs": [], 168 | "source": [ 169 | "# load dataset\n", 170 | "doc = load_doc(filename)\n", 171 | "\n", 172 | "# split into Language1-Language2 pairs\n", 173 | "pairs = to_pairs(doc)\n", 174 | "\n", 175 | "# clean sentences\n", 176 | "clean_pairs = clean_data(pairs)[0:n_train, :]" 177 | ] 178 | }, 179 | { 180 | "cell_type": "code", 181 | "execution_count": null, 182 | "metadata": {}, 183 | "outputs": [], 184 | "source": [ 185 | "for i in range(3000, 3010):\n", 186 | " print('[' + clean_pairs[i, 0] + '] => [' + clean_pairs[i, 1] + ']')" 187 | ] 188 | }, 189 | { 190 | "cell_type": "code", 191 | "execution_count": null, 192 | "metadata": {}, 193 | "outputs": [], 194 | "source": [ 195 | "input_texts = clean_pairs[:, 0]\n", 196 | "target_texts = ['\\t' + text + '\\n' for text in clean_pairs[:, 1]]\n", 197 | "\n", 198 | "print('Length of input_texts: ' + str(input_texts.shape))\n", 199 | "print('Length of target_texts: ' + str(input_texts.shape))" 200 | ] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": null, 205 | "metadata": {}, 206 | "outputs": [], 207 | "source": [ 208 | "max_encoder_seq_length = max(len(line) for line in input_texts)\n", 209 | "max_decoder_seq_length = max(len(line) for line in target_texts)\n", 210 | "\n", 211 | "print('max length of input sentences: %d' % (max_encoder_seq_length))\n", 212 | "print('max length of target sentences: %d' % (max_decoder_seq_length))" 213 | ] 214 | }, 215 | { 216 | "cell_type": "markdown", 217 | "metadata": {}, 218 | "source": [ 219 | "**Remark:** To this end, you have two lists of sentences: input_texts and target_texts" 220 | ] 221 | }, 222 | { 223 | "cell_type": "markdown", 224 | "metadata": {}, 225 | "source": [ 226 | "## 2. Text processing\n", 227 | "\n", 228 | "### 2.1. Convert texts to sequences\n", 229 | "\n", 230 | "- Input: A list of $n$ sentences (with max length $t$).\n", 231 | "- It is represented by a $n\\times t$ matrix after the tokenization and zero-padding." 232 | ] 233 | }, 234 | { 235 | "cell_type": "code", 236 | "execution_count": null, 237 | "metadata": {}, 238 | "outputs": [], 239 | "source": [ 240 | "from keras.preprocessing.text import Tokenizer\n", 241 | "from keras.preprocessing.sequence import pad_sequences\n", 242 | "\n", 243 | "# encode and pad sequences\n", 244 | "def text2sequences(max_len, lines):\n", 245 | " tokenizer = Tokenizer(char_level=True, filters='')\n", 246 | " tokenizer.fit_on_texts(lines)\n", 247 | " seqs = tokenizer.texts_to_sequences(lines)\n", 248 | " seqs_pad = pad_sequences(seqs, maxlen=max_len, padding='post')\n", 249 | " return seqs_pad, tokenizer.word_index\n", 250 | "\n", 251 | "\n", 252 | "encoder_input_seq, input_token_index = text2sequences(max_encoder_seq_length, \n", 253 | " input_texts)\n", 254 | "decoder_input_seq, target_token_index = text2sequences(max_decoder_seq_length, \n", 255 | " target_texts)\n", 256 | "\n", 257 | "print('shape of encoder_input_seq: ' + str(encoder_input_seq.shape))\n", 258 | "print('shape of input_token_index: ' + str(len(input_token_index)))\n", 259 | "print('shape of decoder_input_seq: ' + str(decoder_input_seq.shape))\n", 260 | "print('shape of target_token_index: ' + str(len(target_token_index)))" 261 | ] 262 | }, 263 | { 264 | "cell_type": "code", 265 | "execution_count": null, 266 | "metadata": {}, 267 | "outputs": [], 268 | "source": [ 269 | "num_encoder_tokens = len(input_token_index) + 1\n", 270 | "num_decoder_tokens = len(target_token_index) + 1\n", 271 | "\n", 272 | "print('num_encoder_tokens: ' + str(num_encoder_tokens))\n", 273 | "print('num_decoder_tokens: ' + str(num_decoder_tokens))" 274 | ] 275 | }, 276 | { 277 | "cell_type": "markdown", 278 | "metadata": {}, 279 | "source": [ 280 | "**Remark:** To this end, the input language and target language texts are converted to 2 matrices. \n", 281 | "\n", 282 | "- Their number of rows are both n_train.\n", 283 | "- Their number of columns are respective max_encoder_seq_length and max_decoder_seq_length." 284 | ] 285 | }, 286 | { 287 | "cell_type": "markdown", 288 | "metadata": {}, 289 | "source": [ 290 | "The followings print a sentence and its representation as a sequence." 291 | ] 292 | }, 293 | { 294 | "cell_type": "code", 295 | "execution_count": null, 296 | "metadata": {}, 297 | "outputs": [], 298 | "source": [ 299 | "target_texts[100]" 300 | ] 301 | }, 302 | { 303 | "cell_type": "code", 304 | "execution_count": null, 305 | "metadata": {}, 306 | "outputs": [], 307 | "source": [ 308 | "decoder_input_seq[100, :]" 309 | ] 310 | }, 311 | { 312 | "cell_type": "markdown", 313 | "metadata": {}, 314 | "source": [ 315 | "## 2.2. One-hot encode\n", 316 | "\n", 317 | "- Input: A list of $n$ sentences (with max length $t$).\n", 318 | "- It is represented by a $n\\times t$ matrix after the tokenization and zero-padding.\n", 319 | "- It is represented by a $n\\times t \\times v$ tensor ($t$ is the number of unique chars) after the one-hot encoding." 320 | ] 321 | }, 322 | { 323 | "cell_type": "code", 324 | "execution_count": null, 325 | "metadata": {}, 326 | "outputs": [], 327 | "source": [ 328 | "from keras.utils import to_categorical\n", 329 | "\n", 330 | "# one hot encode target sequence\n", 331 | "def onehot_encode(sequences, max_len, vocab_size):\n", 332 | " n = len(sequences)\n", 333 | " data = numpy.zeros((n, max_len, vocab_size))\n", 334 | " for i in range(n):\n", 335 | " data[i, :, :] = to_categorical(sequences[i], num_classes=vocab_size)\n", 336 | " return data\n", 337 | "\n", 338 | "encoder_input_data = onehot_encode(encoder_input_seq, max_encoder_seq_length, num_encoder_tokens)\n", 339 | "decoder_input_data = onehot_encode(decoder_input_seq, max_decoder_seq_length, num_decoder_tokens)\n", 340 | "\n", 341 | "decoder_target_seq = numpy.zeros(decoder_input_seq.shape)\n", 342 | "decoder_target_seq[:, 0:-1] = decoder_input_seq[:, 1:]\n", 343 | "decoder_target_data = onehot_encode(decoder_target_seq, \n", 344 | " max_decoder_seq_length, \n", 345 | " num_decoder_tokens)\n", 346 | "\n", 347 | "print(encoder_input_data.shape)\n", 348 | "print(decoder_input_data.shape)" 349 | ] 350 | }, 351 | { 352 | "cell_type": "markdown", 353 | "metadata": {}, 354 | "source": [ 355 | "## 3. Build the networks (for training)\n", 356 | "\n", 357 | "- Build encoder, decoder, and connect the two modules to get \"model\". \n", 358 | "\n", 359 | "- Fit the model on the bilingual data to train the parameters in the encoder and decoder." 360 | ] 361 | }, 362 | { 363 | "cell_type": "markdown", 364 | "metadata": {}, 365 | "source": [ 366 | "### 3.1. Encoder network\n", 367 | "\n", 368 | "- Input: one-hot encode of the input language\n", 369 | "\n", 370 | "- Return: \n", 371 | "\n", 372 | " -- output (all the hidden states $h_1, \\cdots , h_t$) are always discarded\n", 373 | " \n", 374 | " -- the final hidden state $h_t$\n", 375 | " \n", 376 | " -- the final conveyor belt $c_t$" 377 | ] 378 | }, 379 | { 380 | "cell_type": "code", 381 | "execution_count": null, 382 | "metadata": {}, 383 | "outputs": [], 384 | "source": [ 385 | "from keras.layers import Input, LSTM\n", 386 | "from keras.models import Model\n", 387 | "\n", 388 | "latent_dim = 256\n", 389 | "\n", 390 | "# inputs of the encoder network\n", 391 | "encoder_inputs = Input(shape=(None, num_encoder_tokens), \n", 392 | " name='encoder_inputs')\n", 393 | "\n", 394 | "# set the LSTM layer\n", 395 | "encoder_lstm = LSTM(latent_dim, return_state=True, \n", 396 | " dropout=0.5, name='encoder_lstm')\n", 397 | "_, state_h, state_c = encoder_lstm(encoder_inputs)\n", 398 | "\n", 399 | "# build the encoder network model\n", 400 | "encoder_model = Model(inputs=encoder_inputs, \n", 401 | " outputs=[state_h, state_c],\n", 402 | " name='encoder')" 403 | ] 404 | }, 405 | { 406 | "cell_type": "markdown", 407 | "metadata": {}, 408 | "source": [ 409 | "Print a summary and save the encoder network structure to \"./encoder.pdf\"" 410 | ] 411 | }, 412 | { 413 | "cell_type": "code", 414 | "execution_count": null, 415 | "metadata": {}, 416 | "outputs": [], 417 | "source": [ 418 | "from IPython.display import SVG\n", 419 | "from keras.utils.vis_utils import model_to_dot, plot_model\n", 420 | "\n", 421 | "SVG(model_to_dot(encoder_model, show_shapes=False).create(prog='dot', format='svg'))\n", 422 | "\n", 423 | "plot_model(\n", 424 | " model=encoder_model, show_shapes=False,\n", 425 | " to_file='encoder.pdf'\n", 426 | ")\n", 427 | "\n", 428 | "encoder_model.summary()" 429 | ] 430 | }, 431 | { 432 | "cell_type": "markdown", 433 | "metadata": {}, 434 | "source": [ 435 | "### 3.2. Decoder network\n", 436 | "\n", 437 | "- Inputs: \n", 438 | "\n", 439 | " -- one-hot encode of the target language\n", 440 | " \n", 441 | " -- The initial hidden state $h_t$ \n", 442 | " \n", 443 | " -- The initial conveyor belt $c_t$ \n", 444 | "\n", 445 | "- Return: \n", 446 | "\n", 447 | " -- output (all the hidden states) $h_1, \\cdots , h_t$\n", 448 | "\n", 449 | " -- the final hidden state $h_t$ (discarded in the training and used in the prediction)\n", 450 | " \n", 451 | " -- the final conveyor belt $c_t$ (discarded in the training and used in the prediction)" 452 | ] 453 | }, 454 | { 455 | "cell_type": "code", 456 | "execution_count": null, 457 | "metadata": {}, 458 | "outputs": [], 459 | "source": [ 460 | "from keras.layers import Input, LSTM, Dense\n", 461 | "from keras.models import Model\n", 462 | "\n", 463 | "# inputs of the decoder network\n", 464 | "decoder_input_h = Input(shape=(latent_dim,), name='decoder_input_h')\n", 465 | "decoder_input_c = Input(shape=(latent_dim,), name='decoder_input_c')\n", 466 | "decoder_input_x = Input(shape=(None, num_decoder_tokens), name='decoder_input_x')\n", 467 | "\n", 468 | "# set the LSTM layer\n", 469 | "decoder_lstm = LSTM(latent_dim, return_sequences=True, \n", 470 | " return_state=True, dropout=0.5, name='decoder_lstm')\n", 471 | "decoder_lstm_outputs, state_h, state_c = decoder_lstm(decoder_input_x, \n", 472 | " initial_state=[decoder_input_h, decoder_input_c])\n", 473 | "\n", 474 | "# set the dense layer\n", 475 | "decoder_dense = Dense(num_decoder_tokens, activation='softmax', name='decoder_dense')\n", 476 | "decoder_outputs = decoder_dense(decoder_lstm_outputs)\n", 477 | "\n", 478 | "# build the decoder network model\n", 479 | "decoder_model = Model(inputs=[decoder_input_x, decoder_input_h, decoder_input_c],\n", 480 | " outputs=[decoder_outputs, state_h, state_c],\n", 481 | " name='decoder')" 482 | ] 483 | }, 484 | { 485 | "cell_type": "markdown", 486 | "metadata": {}, 487 | "source": [ 488 | "Print a summary and save the encoder network structure to \"./decoder.pdf\"" 489 | ] 490 | }, 491 | { 492 | "cell_type": "code", 493 | "execution_count": null, 494 | "metadata": {}, 495 | "outputs": [], 496 | "source": [ 497 | "from IPython.display import SVG\n", 498 | "from keras.utils.vis_utils import model_to_dot, plot_model\n", 499 | "\n", 500 | "SVG(model_to_dot(decoder_model, show_shapes=False).create(prog='dot', format='svg'))\n", 501 | "\n", 502 | "plot_model(\n", 503 | " model=decoder_model, show_shapes=False,\n", 504 | " to_file='decoder.pdf'\n", 505 | ")\n", 506 | "\n", 507 | "decoder_model.summary()" 508 | ] 509 | }, 510 | { 511 | "cell_type": "markdown", 512 | "metadata": {}, 513 | "source": [ 514 | "### 3.3. Connect the encoder and decoder" 515 | ] 516 | }, 517 | { 518 | "cell_type": "code", 519 | "execution_count": null, 520 | "metadata": {}, 521 | "outputs": [], 522 | "source": [ 523 | "# input layers\n", 524 | "encoder_input_x = Input(shape=(None, num_encoder_tokens), name='encoder_input_x')\n", 525 | "decoder_input_x = Input(shape=(None, num_decoder_tokens), name='decoder_input_x')\n", 526 | "\n", 527 | "# connect encoder to decoder\n", 528 | "encoder_final_states = encoder_model([encoder_input_x])\n", 529 | "decoder_lstm_output, _, _ = decoder_lstm(decoder_input_x, initial_state=encoder_final_states)\n", 530 | "decoder_pred = decoder_dense(decoder_lstm_output)\n", 531 | "\n", 532 | "model = Model(inputs=[encoder_input_x, decoder_input_x], \n", 533 | " outputs=decoder_pred, \n", 534 | " name='model_training')" 535 | ] 536 | }, 537 | { 538 | "cell_type": "code", 539 | "execution_count": null, 540 | "metadata": {}, 541 | "outputs": [], 542 | "source": [ 543 | "print(state_h)\n", 544 | "print(decoder_input_h)" 545 | ] 546 | }, 547 | { 548 | "cell_type": "code", 549 | "execution_count": null, 550 | "metadata": {}, 551 | "outputs": [], 552 | "source": [ 553 | "from IPython.display import SVG\n", 554 | "from keras.utils.vis_utils import model_to_dot, plot_model\n", 555 | "\n", 556 | "SVG(model_to_dot(model, show_shapes=False).create(prog='dot', format='svg'))\n", 557 | "\n", 558 | "plot_model(\n", 559 | " model=model, show_shapes=False,\n", 560 | " to_file='model_training.pdf'\n", 561 | ")\n", 562 | "\n", 563 | "model.summary()" 564 | ] 565 | }, 566 | { 567 | "cell_type": "markdown", 568 | "metadata": {}, 569 | "source": [ 570 | "### 3.5. Fit the model on the bilingual dataset\n", 571 | "\n", 572 | "- encoder_input_data: one-hot encode of the input language\n", 573 | "\n", 574 | "- decoder_input_data: one-hot encode of the input language\n", 575 | "\n", 576 | "- decoder_target_data: labels (left shift of decoder_input_data)\n", 577 | "\n", 578 | "- tune the hyper-parameters\n", 579 | "\n", 580 | "- stop when the validation loss stop decreasing." 581 | ] 582 | }, 583 | { 584 | "cell_type": "code", 585 | "execution_count": null, 586 | "metadata": {}, 587 | "outputs": [], 588 | "source": [ 589 | "print('shape of encoder_input_data' + str(encoder_input_data.shape))\n", 590 | "print('shape of decoder_input_data' + str(decoder_input_data.shape))\n", 591 | "print('shape of decoder_target_data' + str(decoder_target_data.shape))" 592 | ] 593 | }, 594 | { 595 | "cell_type": "code", 596 | "execution_count": null, 597 | "metadata": {}, 598 | "outputs": [], 599 | "source": [ 600 | "model.compile(optimizer='rmsprop', loss='categorical_crossentropy')\n", 601 | "\n", 602 | "model.fit([encoder_input_data, decoder_input_data], # training data\n", 603 | " decoder_target_data, # labels (left shift of the target sequences)\n", 604 | " batch_size=64, epochs=50, validation_split=0.2)\n", 605 | "\n", 606 | "model.save('seq2seq.h5')" 607 | ] 608 | }, 609 | { 610 | "cell_type": "markdown", 611 | "metadata": {}, 612 | "source": [ 613 | "## 4. Make predictions\n", 614 | "\n", 615 | "\n", 616 | "### 4.1. Translate English to XXX\n", 617 | "\n", 618 | "1. Encoder read a sentence (source language) and output its final states, $h_t$ and $c_t$.\n", 619 | "2. Take the [star] sign \"\\t\" and the final state $h_t$ and $c_t$ as input and run the decoder.\n", 620 | "3. Get the new states and predicted probability distribution.\n", 621 | "4. sample a char from the predicted probability distribution\n", 622 | "5. take the sampled char and the new states as input and repeat the process (stop if reach the [stop] sign \"\\n\")." 623 | ] 624 | }, 625 | { 626 | "cell_type": "code", 627 | "execution_count": null, 628 | "metadata": {}, 629 | "outputs": [], 630 | "source": [ 631 | "# Reverse-lookup token index to decode sequences back to something readable.\n", 632 | "reverse_input_char_index = dict((i, char) for char, i in input_token_index.items())\n", 633 | "reverse_target_char_index = dict((i, char) for char, i in target_token_index.items())" 634 | ] 635 | }, 636 | { 637 | "cell_type": "code", 638 | "execution_count": null, 639 | "metadata": {}, 640 | "outputs": [], 641 | "source": [ 642 | "def decode_sequence(input_seq):\n", 643 | " states_value = encoder_model.predict(input_seq)\n", 644 | "\n", 645 | " target_seq = numpy.zeros((1, 1, num_decoder_tokens))\n", 646 | " target_seq[0, 0, target_token_index['\\t']] = 1.\n", 647 | "\n", 648 | " stop_condition = False\n", 649 | " decoded_sentence = ''\n", 650 | " while not stop_condition:\n", 651 | " output_tokens, h, c = decoder_model.predict([target_seq] + states_value)\n", 652 | "\n", 653 | " # this line of code is greedy selection\n", 654 | " # try to use multinomial sampling instead (with temperature)\n", 655 | " sampled_token_index = numpy.argmax(output_tokens[0, -1, :])\n", 656 | " \n", 657 | " sampled_char = reverse_target_char_index[sampled_token_index]\n", 658 | " decoded_sentence += sampled_char\n", 659 | "\n", 660 | " if (sampled_char == '\\n' or\n", 661 | " len(decoded_sentence) > max_decoder_seq_length):\n", 662 | " stop_condition = True\n", 663 | "\n", 664 | " target_seq = numpy.zeros((1, 1, num_decoder_tokens))\n", 665 | " target_seq[0, 0, sampled_token_index] = 1.\n", 666 | "\n", 667 | " states_value = [h, c]\n", 668 | "\n", 669 | " return decoded_sentence\n" 670 | ] 671 | }, 672 | { 673 | "cell_type": "code", 674 | "execution_count": null, 675 | "metadata": {}, 676 | "outputs": [], 677 | "source": [ 678 | "for seq_index in range(2100, 2120):\n", 679 | " # Take one sequence (part of the training set)\n", 680 | " # for trying out decoding.\n", 681 | " input_seq = encoder_input_data[seq_index: seq_index + 1]\n", 682 | " decoded_sentence = decode_sequence(input_seq)\n", 683 | " print('-')\n", 684 | " print('English: ', input_texts[seq_index])\n", 685 | " print('German (true): ', target_texts[seq_index][1:-1])\n", 686 | " print('German (pred): ', decoded_sentence[0:-1])\n" 687 | ] 688 | }, 689 | { 690 | "cell_type": "markdown", 691 | "metadata": {}, 692 | "source": [ 693 | "### 4.2. Translate an English sentence to the target language\n", 694 | "\n", 695 | "1. Tokenization\n", 696 | "2. One-hot encode\n", 697 | "3. Translate" 698 | ] 699 | }, 700 | { 701 | "cell_type": "code", 702 | "execution_count": null, 703 | "metadata": {}, 704 | "outputs": [], 705 | "source": [ 706 | "input_sentence = 'I love you'\n", 707 | "\n", 708 | "input_sequence = \n", 709 | "\n", 710 | "input_x = \n", 711 | "\n", 712 | "translated_sentence = \n", 713 | "\n", 714 | "print('source sentence is: ' + input_sentence)\n", 715 | "print('translated sentence is: ' + translated_sentence)" 716 | ] 717 | }, 718 | { 719 | "cell_type": "markdown", 720 | "metadata": {}, 721 | "source": [ 722 | "## 5. Evaluate the translation using BLEU score\n", 723 | "\n", 724 | "Reference: \n", 725 | "- https://machinelearningmastery.com/calculate-bleu-score-for-text-python/\n", 726 | "- https://en.wikipedia.org/wiki/BLEU\n", 727 | "\n", 728 | "\n", 729 | "**Hint:** \n", 730 | "\n", 731 | "- Randomly partition the dataset to training, validation, and test. \n", 732 | "\n", 733 | "- Evaluate the BLEU score using the test set. Report the average.\n", 734 | "\n", 735 | "- A reasonable BLEU score should be 0.1 ~ 0.5." 736 | ] 737 | }, 738 | { 739 | "cell_type": "code", 740 | "execution_count": null, 741 | "metadata": {}, 742 | "outputs": [], 743 | "source": [] 744 | } 745 | ], 746 | "metadata": { 747 | "kernelspec": { 748 | "display_name": "Python 3", 749 | "language": "python", 750 | "name": "python3" 751 | }, 752 | "language_info": { 753 | "codemirror_mode": { 754 | "name": "ipython", 755 | "version": 3 756 | }, 757 | "file_extension": ".py", 758 | "mimetype": "text/x-python", 759 | "name": "python", 760 | "nbconvert_exporter": "python", 761 | "pygments_lexer": "ipython3", 762 | "version": "3.6.4" 763 | } 764 | }, 765 | "nbformat": 4, 766 | "nbformat_minor": 2 767 | } 768 | -------------------------------------------------------------------------------- /webpage/Makefile: -------------------------------------------------------------------------------- 1 | all: html pdf docx rtf 2 | 3 | pdf: index.pdf 4 | index.pdf: index.md 5 | pandoc --standalone --template style_chmduquesne.tex \ 6 | --from markdown --to context \ 7 | -V papersize=A4 \ 8 | -o index.tex index.md; \ 9 | context index.tex 10 | 11 | html: index.html 12 | index.html: style_chmduquesne.css index.md 13 | pandoc --standalone -H style_chmduquesne.css \ 14 | --from markdown --to html \ 15 | -o index.html index.md 16 | 17 | docx: index.docx 18 | index.docx: index.md 19 | pandoc -s index.md -o index.docx 20 | 21 | rtf: index.rtf 22 | index.rtf: index.md 23 | pandoc -s index.md -o index.rtf 24 | 25 | clean: 26 | rm index.html 27 | rm index.tex 28 | rm index.tuc 29 | rm index.log 30 | rm index.pdf 31 | rm index.docx 32 | rm index.rtf 33 | -------------------------------------------------------------------------------- /webpage/index.md: -------------------------------------------------------------------------------- 1 | CS583: Deep Learning 2 | ============ 3 | 4 | 5 | > Instructor: Shusen Wang 6 | 7 | > TA: Xuting Tang and Sesha Vadlamudi 8 | 9 | 10 | Description 11 | --------- 12 | 13 | **Meeting Time:** 14 | 15 | - Section A: Thursday, 6:30-9:00 PM, Online 16 | 17 | - Section B: Friday, 3:00-5:30 PM, Online 18 | 19 | 20 | **Office Hours:** 21 | 22 | - Thursday, 9:00-10:00 PM, virtual 23 | 24 | - Friday, 5:30-6:30 PM, virtual 25 | 26 | 27 | 28 | **Contact the Instructor:** 29 | 30 | - For questions regarding grading, talk to the instructor during office hours or send him emails. 31 | 32 | - For technical questions, post the question on the "discussion" module of Canvas or ask the instructor during the office hours. 33 | 34 | 35 | **Prerequisite:** 36 | 37 | - Elementary linear algebra, e.g., matrix multiplication, eigenvalue decomposition, and matrix norms. 38 | 39 | - Elementary calculus, e.g., convex function, differentiation of scalar functions, first derivative, and second derivative. 40 | 41 | - Python programming (especially the Numpy library) and Jupyter Notebook. 42 | 43 | 44 | **Goal:** 45 | This is a practical course; the students will be able to use DL methods for solving real-world ML, CV, and NLP problems. The students will also learn math and theories for understanding ML and DL. 46 | 47 | 48 | **Slides:** All the slides are available here: [[link](https://github.com/wangshusen/DeepLearning)] 49 | 50 | 51 | Schedule 52 | --------- 53 | 54 | 55 | - Preparations 56 | 57 | * Install the software packages by following [[this](https://github.com/wangshusen/CS583-2019F/blob/master/homework/Prepare/HM.pdf)]. 58 | 59 | * Study elementary matrix algebra by following [[this book](http://vmls-book.stanford.edu/vmls.pdf)]. 60 | 61 | * If you are unfamilar with matrix computation, you need to watch the recorded lectures of CS600: 62 | 63 | - Addition and multiplication [[slides](https://github.com/wangshusen/AdvancedAlgorithms/blob/master/Slides/5_Matrix_1.pdf)] 64 | [[video](https://youtu.be/ZTtW6SMTmcY)]. 65 | 66 | - Dense and sparse matrix data structures [[slides](https://github.com/wangshusen/AdvancedAlgorithms/blob/master/Slides/5_Matrix_2.pdf)] 67 | [[video](https://youtu.be/fy_dSZb-Xx8)]. 68 | 69 | * Study probability theory and randomized algorithms by watching the recorded lectures of CS600: 70 | 71 | - Monte Carlo [[slides](https://github.com/wangshusen/AdvancedAlgorithms)] 72 | [[video](https://youtu.be/CmpWM2HMhxw)]. 73 | 74 | - Random permutation [[slides](https://github.com/wangshusen/AdvancedAlgorithms)] 75 | [[video](https://youtu.be/xaSBvljOQkc)]. 76 | 77 | * Finish the [[sample questions](https://github.com/wangshusen/CS583-2019F/blob/master/homework/Quiz1-Sample/Q1.pdf)] before Quiz 1. 78 | 79 | 80 | - Feb 4/5, Lecture 1 81 | 82 | * ML Basics 83 | 84 | * Linear Regression 85 | 86 | 87 | - Feb 11/12, Lecture 2 88 | 89 | * Read these in advance: 90 | [[Matrix Calculus](https://github.com/wangshusen/CS583A-2019Spring/blob/master/reading/MatrixCalculus.pdf)] 91 | [[Logistic Regression](https://github.com/wangshusen/DeepLearning/blob/master/LectureNotes/Logistic/paper/logistic.pdf)] 92 | 93 | 94 | * Polynomial Regression 95 | 96 | * Classification: logistic regression. 97 | 98 | 99 | 100 | 101 | - Feb 18/19, Lecture 3 102 | 103 | * Classification: SVM. 104 | 105 | * Regularizations. 106 | 107 | 108 | - Feb 21, 8:30PM - 10:00PM, **Quiz 1**, online 109 | 110 | * Coverage: vectors norms ($\ell_2$-norm, $\ell_1$-norm, $\ell_p$-norm, $\ell_\infty$-norm), vector inner product, matrix multiplication, matrix trace, matrix Frobenius norm, scalar function differential, convex function, use Numpy for matrix computation, randomized algorithm, and ML basics. 111 | 112 | * Time limit: 60 minutes 113 | 114 | 115 | 116 | - Feb 25/26, Lecture 4 117 | 118 | * Classification: SVM, softmax classifier, and KNN. 119 | 120 | * Scientific computing. 121 | 122 | 123 | - Mar 4/5, Lecture 5 124 | 125 | * Read Sections 1 to 4 in advance: [[neural networks and backpropagation](https://github.com/wangshusen/DeepLearning/blob/master/LectureNotes/BP/bp.pdf)] 126 | 127 | * Neural networks. 128 | 129 | * Keras. 130 | 131 | 132 | - Mar 11/12, Lecture 6 133 | 134 | * Convolutional neural networks (CNNs). 135 | 136 | * CNNs: Useful tricks 137 | 138 | 139 | - Mar 18/19, Lecture 7 140 | 141 | * Read the note in advance: [[lecture note](https://github.com/wangshusen/DeepLearning/blob/master/LectureNotes/Parallel/Parallel.pdf)] 142 | 143 | * CNNs: batch normalization, theories. 144 | 145 | 146 | 147 | 148 | - Mar 25/26, Lecture 8 149 | 150 | * CNN architectures. 151 | 152 | * Parallel computing. 153 | 154 | 155 | - Mar 28, 8:30PM - 10:00PM, **Quiz 2**, online 156 | 157 | * Coverage: vector and matrix operations, gradients, ML basics, neural networks, CNNs, parallel computing. 158 | 159 | * Time limit: 30 minutes. 160 | 161 | * Sample: [[click here](https://github.com/wangshusen/CS583-2020S/blob/master/homework/Exam-Sample/Sample.pdf)] 162 | 163 | 164 | - Apr 1/2, Lecture 9 165 | 166 | * Federated learning. 167 | 168 | * Text processing. 169 | 170 | * Simple RNN. 171 | 172 | 173 | 174 | 175 | 176 | - Apr 8/9, Lecture 10 177 | 178 | * RNNs: LSTM, Text generation, machine translation. 179 | 180 | 181 | - Apr 15/16, Lecture 11 182 | 183 | * Attention and self-attention. 184 | 185 | * Transformer and BERT. 186 | 187 | 188 | 189 | - Apr 22/23, Lecture 12 190 | 191 | * Autoencoders. 192 | 193 | * Variational Autoencoder (VAE). 194 | 195 | * Adversarial Robustness. 196 | 197 | 198 | - Apr 25, 8:30PM - 10:00PM, **Quiz 3**, online 199 | 200 | * Coverage: ML basics, CNNs, RNNs, and Transformer. 201 | 202 | * Time limit: 50 minutes. 203 | 204 | 205 | - Apr 29, 7:00AM - 6:00PM, **Bonus Quiz**, online 206 | 207 | * Coverage: Deep Reinforcement Learning. 208 | 209 | * Time limit: 30 minutes. 210 | 211 | 212 | - Apr 29/30, Lecture 13 213 | 214 | * GAN. 215 | 216 | * Deep Reinforcement Learning. 217 | 218 | 219 | - May 6/7, Lecture 14 220 | 221 | * Deep Reinforcement Learning (Cont.) 222 | 223 | 224 | - May 16, 8:30PM - 10:30PM, **Final Exam**, online 225 | 226 | * Coverage: Everything, Including Monte Carlo, Transformer, BERT, Few-Shot Learning, Deep Reinforcement Learning. 227 | 228 | * Time limit: 100 minutes. 229 | 230 | 231 | 232 | 233 | 234 | Assignments and Bonus Scores 235 | --------- 236 | 237 | - Homework 1: Linear Algebra Basics 238 | 239 | * Available only on Canvas (auto-graded.) 240 | 241 | * Submit to Canvas before Feb 21. 242 | 243 | 244 | - Homework 2: Implement Numerical Optimization Algorithms 245 | 246 | * Available at the course's repo [[click here](https://github.com/wangshusen/CS583-2021S/tree/master/homework)]. 247 | * You will need the knowledge in the lecture note: [[Logistic Regression](https://github.com/wangshusen/DeepLearning/blob/master/LectureNotes/Logistic/paper/logistic.pdf)] 248 | 249 | * Submit to Canvas before Mar 7. 250 | 251 | 252 | - Homework 3: Machine Learning Basics 253 | 254 | * Available only on Canvas (auto-graded.) 255 | 256 | * Submit to Canvas before Mar 21. 257 | 258 | 259 | - Homework 4: Implement a Convolutional Neural Network 260 | 261 | * Available at the course's repo [[click here](https://github.com/wangshusen/CS583-2021S/tree/master/homework)]. 262 | 263 | * Submit to Canvas before Mar 28. 264 | 265 | 266 | - Homework 5: Implement a Recurrent Neural Network 267 | 268 | * Available at the course's repo [[click here](https://github.com/wangshusen/CS583-2021S/tree/master/homework)]. 269 | 270 | * Submit to Canvas before Apr 25. 271 | 272 | * You may get up to 2 bonus scores by doing extra work. 273 | 274 | 275 | - Bonus 1: Implement Parallel Algorithms (Voluntary) 276 | 277 | * Available at the course's repo [[click here](https://github.com/wangshusen/CS583-2021S/tree/master/homework)]. 278 | 279 | * You will need the knowledge in the lecture note: [[Parallel Computing](https://github.com/wangshusen/DeepLearning/blob/master/LectureNotes/Parallel/Parallel.pdf)] 280 | 281 | * You can choose to implement Federated Averaging or/and Decentralized Optimization. You may get up to 2 bonus points for each. 282 | 283 | * Submit to Canvas before Apr 11 (firm deadline). 284 | 285 | 286 | - Bonus 2: Implement an Autoencoder Network (Voluntary) 287 | 288 | * Available at the course's repo [[click here](https://github.com/wangshusen/CS583-2021S/tree/master/homework)]. 289 | 290 | * You may get up to 1 bonus point. 291 | 292 | * Submit to Canvas before May 1 (firm deadline). 293 | 294 | 295 | 296 | Textbooks 297 | --------- 298 | 299 | **Required** (Please notice the difference between "required" and "recommended"): 300 | 301 | - Francois Chollet. Deep learning with Python. Manning Publications Co., 2017. (Available online.) 302 | 303 | **Highly Recommended**: 304 | 305 | - S. Boyd and L. Vandenberghe. Introduction to Applied Linear Algebra. Cambridge University Press, 2018. (Available online.) 306 | 307 | **Recommended**: 308 | 309 | - Y. Nesterov. Introductory Lectures on Convex Optimization Book. Springer, 2013. (Available online.) 310 | 311 | - D. S. Watkins. Fundamentals of Matrix Computations. John Wiley & Sons, 2004. 312 | 313 | - I. Goodfellow, Y. Bengio, A. Courville, Y. Bengio. Deep learning. MIT press, 2016. (Available online.) 314 | 315 | - M. Mohri, A. Rostamizadeh, and A. Talwalkar. Foundations of machine learning. MIT press, 2012. 316 | 317 | - J. Friedman, T. Hastie, and R. Tibshirani. The elements of statistical learning. Springer series in statistics, 2001. (Available online.) 318 | 319 | 320 | 321 | Grading Policy 322 | --------- 323 | **Grades**: 324 | 325 | - **A**: 93 and above. 326 | 327 | - **A-**: [90, 93) 328 | 329 | - **B+**: [87, 90) 330 | 331 | - **B**: [83, 87) 332 | 333 | - **B-**: [80, 83) 334 | 335 | - **C+**: [77, 80) 336 | 337 | - **C**: [73, 77) 338 | 339 | - **Fail**: below 73 340 | 341 | 342 | **Weights**: 343 | 344 | - Homework 50\% 345 | 346 | - Quizzes 30\% 347 | 348 | - Final 20\% 349 | 350 | - Bonus 351 | 352 | 353 | **Expected grade on record**: 354 | 355 | - An average student is expected to lose at least 10 points. 356 | 357 | - If an average student does not collect any bonus score, his grade on record is expected to be "B+". 358 | An average student needs at least 3 bonus scores to get "A". 359 | 360 | - According to Stevens's policy, a score lower than 73.0 will be fail. 361 | 362 | 363 | **Late penalty**: 364 | 365 | - Late submissions of assignments or project document for whatever reason will be punished. 2\% of the score of an assignment/project will be deducted per day. For example, if an assignment is submitted 15 days and 1 minute later than the deadline (counted as 16 days) and it gets a grade of 95\%, then the score after the deduction will be: 95\% - 2*16\% = 63\%. 366 | 367 | - All the deadlines for bonus are firm. Late submission will not receive bonus score. 368 | 369 | - May 1 is the firm deadline for all the homework and the course project. Submissions later than the firm deadline will not be graded. 370 | 371 | 372 | -------------------------------------------------------------------------------- /webpage/style_chmduquesne.css: -------------------------------------------------------------------------------- 1 | 95 | -------------------------------------------------------------------------------- /webpage/style_chmduquesne.tex: -------------------------------------------------------------------------------- 1 | % Copyright 2013 Christophe-Marie Duquesne 2 | % Copyright 2014 Mark Szepieniec 3 | % 4 | % ConText style for making a resume with pandoc. Inspired by moderncv. 5 | % 6 | % This CSS document is delivered to you under the CC BY-SA 3.0 License. 7 | % https://creativecommons.org/licenses/by-sa/3.0/deed.en_US 8 | 9 | \startmode[*mkii] 10 | \enableregime[utf-8] 11 | \setupcolors[state=start] 12 | \stopmode 13 | $if(mainlang)$ 14 | \mainlanguage[$mainlang$] 15 | $endif$ 16 | 17 | \setupcolor[hex] 18 | \definecolor[titlegrey][h=757575] 19 | \definecolor[sectioncolor][h=397249] 20 | \definecolor[rulecolor][h=9cb770] 21 | 22 | % Enable hyperlinks 23 | \setupinteraction[state=start, color=sectioncolor] 24 | 25 | \setuppapersize [$if(papersize)$$papersize$$else$letter$endif$][$if(papersize)$$papersize$$else$letter$endif$] 26 | \setuplayout [width=middle, height=middle, 27 | backspace=20mm, cutspace=0mm, 28 | topspace=10mm, bottomspace=20mm, 29 | header=0mm, footer=0mm] 30 | 31 | %\setuppagenumbering[location={footer,center}] 32 | 33 | \setupbodyfont[11pt, helvetica] 34 | 35 | \setupwhitespace[medium] 36 | 37 | \setupblackrules[width=31mm, color=rulecolor] 38 | 39 | \setuphead[chapter] [style=\tfd] 40 | \setuphead[section] [style=\tfd\bf, color=titlegrey, align=middle] 41 | \setuphead[subsection] [style=\tfb\bf, color=sectioncolor, align=right, 42 | before={\leavevmode\blackrule\hspace}] 43 | \setuphead[subsubsection][style=\bf] 44 | 45 | $if(number-sections)$ 46 | $else$ 47 | \setuphead[chapter, section, subsection, subsubsection][number=no] 48 | $endif$ 49 | 50 | %\setupdescriptions[width=10mm] 51 | 52 | \definedescription 53 | [description] 54 | [headstyle=bold, style=normal, 55 | location=hanging, width=18mm, distance=14mm, margin=0cm] 56 | 57 | \setupitemize[autointro, packed] % prevent orphan list intro 58 | \setupitemize[indentnext=no] 59 | 60 | \setupfloat[figure][default={here,nonumber}] 61 | \setupfloat[table][default={here,nonumber}] 62 | 63 | \setuptables[textwidth=max, HL=none] 64 | 65 | \setupthinrules[width=15em] % width of horizontal rules 66 | 67 | \setupdelimitedtext 68 | [blockquote] 69 | [before={\setupalign[middle]}, 70 | indentnext=no, 71 | ] 72 | 73 | $if(toc)$ 74 | \setupcombinedlist[content][list={$placelist$}] 75 | 76 | $endif$ 77 | $for(header-includes)$ 78 | $header-includes$ 79 | $endfor$ 80 | 81 | \starttext 82 | $if(title)$ 83 | \startalignment[center] 84 | \blank[2*big] 85 | {\tfd $title$} 86 | $if(author)$ 87 | \blank[3*medium] 88 | {\tfa $for(author)$$author$$sep$\crlf $endfor$} 89 | $endif$ 90 | $if(date)$ 91 | \blank[2*medium] 92 | {\tfa $date$} 93 | $endif$ 94 | \blank[3*medium] 95 | \stopalignment 96 | $endif$ 97 | $for(include-before)$ 98 | $include-before$ 99 | $endfor$ 100 | $if(toc)$ 101 | \placecontent 102 | $endif$ 103 | 104 | $body$ 105 | 106 | $for(include-after)$ 107 | $include-after$ 108 | $endfor$ 109 | \stoptext 110 | --------------------------------------------------------------------------------