├── .gitignore ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── capsLayer.py ├── capsNet.py ├── config.py ├── dist_version ├── README.md ├── capsnet_slim.py └── distributed_train.py ├── download_data.py ├── imgs ├── capsuleVSneuron.png └── my_wechat_QR.png ├── main.py ├── plot_acc.R ├── results ├── margin_loss.png ├── mnist │ ├── 1_iter_loss.csv │ ├── 1_iter_train_acc.csv │ ├── 1_iter_val_acc.csv │ ├── 3_iter_loss.csv │ ├── 3_iter_train_acc.csv │ ├── 3_iter_val_acc.csv │ ├── 4_iter_loss.csv │ ├── 4_iter_train_acc.csv │ └── 4_iter_val_acc.csv ├── reconstruction_loss.png ├── routing_trials.png └── total_loss.png └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | *__pycache__ 2 | data* 3 | .ropeproject 4 | logdir* 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | This repo is under development, your contribution is welcome. 2 | 3 | - If you have any questions, feel free to open an issue. 4 | - If you want to contribute your code, fork it, and open your Pull Requests. 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CapsNet-Tensorflow 2 | 3 | [![Contributions welcome](https://img.shields.io/badge/contributions-welcome-brightgreen.svg?style=plastic)](CONTRIBUTING.md) 4 | [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg?style=plastic)](https://opensource.org/licenses/Apache-2.0) 5 | [![Gitter](https://img.shields.io/gitter/room/nwjs/nw.js.svg?style=plastic)](https://gitter.im/CapsNet-Tensorflow/Lobby) 6 | 7 | A Tensorflow implementation of CapsNet based on Geoffrey Hinton's paper [Dynamic Routing Between Capsules](https://arxiv.org/abs/1710.09829) 8 | 9 | ![capsVSneuron](imgs/capsuleVSneuron.png) 10 | 11 | > **Notes:** 12 | > 1. The current version supports [MNIST](http://yann.lecun.com/exdb/mnist/) and [Fashion-MNIST](https://github.com/zalandoresearch/fashion-mnist) datasets. The current test accuracy for MNIST is `99.64%`, and Fashion-MNIST `90.60%`, see details in the [Results](https://github.com/naturomics/CapsNet-Tensorflow#results) section 13 | > 2. See [dist_version](dist_version) for multi-GPU support 14 | > 3. [Here(知乎)](https://zhihu.com/question/67287444/answer/251460831) is an article explaining my understanding of the paper. It may be helpful in understanding the code. 15 | 16 | 17 | > **Important:** 18 | > 19 | > If you need to apply CapsNet model to your own datasets or build up a new model with the basic block of CapsNet, please follow my new project [CapsLayer](https://github.com/naturomics/CapsLayer), which is an advanced library for capsule theory, aiming to integrate capsule-relevant technologies, provide relevant analysis tools, develop related application examples, and promote the development of capsule theory. For example, you can use capsule layer block in your code easily with the API ``capsLayer.layers.fully_connected`` and ``capsLayer.layers.conv2d`` 20 | 21 | 22 | ## Requirements 23 | - Python 24 | - NumPy 25 | - [Tensorflow](https://github.com/tensorflow/tensorflow)>=1.3 26 | - tqdm (for displaying training progress info) 27 | - scipy (for saving images) 28 | 29 | ## Usage 30 | **Step 1.** Download this repository with ``git`` or click the [download ZIP](https://github.com/naturomics/CapsNet-Tensorflow/archive/master.zip) button. 31 | 32 | ``` 33 | $ git clone https://github.com/naturomics/CapsNet-Tensorflow.git 34 | $ cd CapsNet-Tensorflow 35 | ``` 36 | 37 | **Step 2.** Download [MNIST](http://yann.lecun.com/exdb/mnist/) or [Fashion-MNIST](https://github.com/zalandoresearch/fashion-mnist) dataset. In this step, you have two choices: 38 | 39 | - a) Automatic downloading with `download_data.py` script 40 | ``` 41 | $ python download_data.py (for mnist dataset) 42 | $ python download_data.py --dataset fashion-mnist --save_to data/fashion-mnist (for fashion-mnist dataset) 43 | ``` 44 | 45 | - b) Manual downloading with `wget` or other tools, move and extract dataset into ``data/mnist`` or ``data/fashion-mnist`` directory, for example: 46 | 47 | ``` 48 | $ mkdir -p data/mnist 49 | $ wget -c -P data/mnist http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz 50 | $ wget -c -P data/mnist http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz 51 | $ wget -c -P data/mnist http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz 52 | $ wget -c -P data/mnist http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz 53 | $ gunzip data/mnist/*.gz 54 | ``` 55 | 56 | **Step 3.** Start the training(Using the MNIST dataset by default): 57 | 58 | ``` 59 | $ python main.py 60 | $ # or training for fashion-mnist dataset 61 | $ python main.py --dataset fashion-mnist 62 | $ # If you need to monitor the training process, open tensorboard with this command 63 | $ tensorboard --logdir=logdir 64 | $ # or use `tail` command on linux system 65 | $ tail -f results/val_acc.csv 66 | ``` 67 | 68 | **Step 4.** Calculate test accuracy 69 | 70 | ``` 71 | $ python main.py --is_training=False 72 | $ # for fashion-mnist dataset 73 | $ python main.py --dataset fashion-mnist --is_training=False 74 | ``` 75 | 76 | > **Note:** The default parameters of batch size is 128, and epoch 50. You may need to modify the ``config.py`` file or use command line parameters to suit your case, e.g. set batch size to 64 and do once test summary every 200 steps: ``python main.py --test_sum_freq=200 --batch_size=48`` 77 | 78 | ## Results 79 | The pictures here are plotted by tensorboard and my tool `plot_acc.R` 80 | 81 | - training loss 82 | 83 | ![total_loss](results/total_loss.png) 84 | ![margin_loss](results/margin_loss.png) 85 | ![reconstruction_loss](results/reconstruction_loss.png) 86 | 87 | Here are the models I trained and my talk and something else: 88 | 89 | [Baidu Netdisk](https://pan.baidu.com/s/1pLp8fdL)(password:ahjs) 90 | 91 | - The best val error(using reconstruction) 92 | 93 | Routing iteration | 1 | 3 | 4 | 94 | :-----|:----:|:----:|:------| 95 | val error | 0.36 | 0.36 | 0.41 | 96 | *Paper* | 0.29 | 0.25 | - | 97 | 98 | ![test_acc](results/routing_trials.png) 99 | 100 | 101 | > My simple comments for capsule 102 | > 1. A new version neural unit(vector in vector out, not scalar in scalar out) 103 | > 2. The routing algorithm is similar to attention mechanism 104 | > 3. Anyway, a great potential work, a lot to be built upon 105 | 106 | 107 | ## My weChat: 108 | ![my_wechat](/imgs/my_wechat_QR.png) 109 | 110 | ### Reference 111 | - [XifengGuo/CapsNet-Keras](https://github.com/XifengGuo/CapsNet-Keras): referred for some code optimizations 112 | -------------------------------------------------------------------------------- /capsLayer.py: -------------------------------------------------------------------------------- 1 | """ 2 | License: Apache-2.0 3 | Author: Huadong Liao 4 | E-mail: naturomics.liao@gmail.com 5 | """ 6 | 7 | import numpy as np 8 | import tensorflow as tf 9 | 10 | from config import cfg 11 | from utils import reduce_sum 12 | from utils import softmax 13 | from utils import get_shape 14 | 15 | 16 | epsilon = 1e-9 17 | 18 | 19 | class CapsLayer(object): 20 | ''' Capsule layer. 21 | Args: 22 | input: A 4-D tensor. 23 | num_outputs: the number of capsule in this layer. 24 | vec_len: integer, the length of the output vector of a capsule. 25 | layer_type: string, one of 'FC' or "CONV", the type of this layer, 26 | fully connected or convolution, for the future expansion capability 27 | with_routing: boolean, this capsule is routing with the 28 | lower-level layer capsule. 29 | 30 | Returns: 31 | A 4-D tensor. 32 | ''' 33 | def __init__(self, num_outputs, vec_len, with_routing=True, layer_type='FC'): 34 | self.num_outputs = num_outputs 35 | self.vec_len = vec_len 36 | self.with_routing = with_routing 37 | self.layer_type = layer_type 38 | 39 | def __call__(self, input, kernel_size=None, stride=None): 40 | ''' 41 | The parameters 'kernel_size' and 'stride' will be used while 'layer_type' equal 'CONV' 42 | ''' 43 | if self.layer_type == 'CONV': 44 | self.kernel_size = kernel_size 45 | self.stride = stride 46 | 47 | if not self.with_routing: 48 | # the PrimaryCaps layer, a convolutional layer 49 | # input: [batch_size, 20, 20, 256] 50 | # assert input.get_shape() == [cfg.batch_size, 20, 20, 256] 51 | 52 | # NOTE: I can't find out any words from the paper whether the 53 | # PrimaryCap convolution does a ReLU activation or not before 54 | # squashing function, but experiment show that using ReLU get a 55 | # higher test accuracy. So, which one to use will be your choice 56 | capsules = tf.contrib.layers.conv2d(input, self.num_outputs * self.vec_len, 57 | self.kernel_size, self.stride, padding="VALID", 58 | activation_fn=tf.nn.relu) 59 | # capsules = tf.contrib.layers.conv2d(input, self.num_outputs * self.vec_len, 60 | # self.kernel_size, self.stride,padding="VALID", 61 | # activation_fn=None) 62 | capsules = tf.reshape(capsules, (cfg.batch_size, -1, self.vec_len, 1)) 63 | 64 | # return tensor with shape [batch_size, 1152, 8, 1] 65 | capsules = squash(capsules) 66 | return(capsules) 67 | 68 | if self.layer_type == 'FC': 69 | if self.with_routing: 70 | # the DigitCaps layer, a fully connected layer 71 | # Reshape the input into [batch_size, 1152, 1, 8, 1] 72 | self.input = tf.reshape(input, shape=(cfg.batch_size, -1, 1, input.shape[-2].value, 1)) 73 | 74 | with tf.variable_scope('routing'): 75 | # b_IJ: [batch_size, num_caps_l, num_caps_l_plus_1, 1, 1], 76 | # about the reason of using 'batch_size', see issue #21 77 | b_IJ = tf.constant(np.zeros([cfg.batch_size, input.shape[1].value, self.num_outputs, 1, 1], dtype=np.float32)) 78 | capsules = routing(self.input, b_IJ, num_outputs=self.num_outputs, num_dims=self.vec_len) 79 | capsules = tf.squeeze(capsules, axis=1) 80 | 81 | return(capsules) 82 | 83 | 84 | def routing(input, b_IJ, num_outputs=10, num_dims=16): 85 | ''' The routing algorithm. 86 | 87 | Args: 88 | input: A Tensor with [batch_size, num_caps_l=1152, 1, length(u_i)=8, 1] 89 | shape, num_caps_l meaning the number of capsule in the layer l. 90 | num_outputs: the number of output capsules. 91 | num_dims: the number of dimensions for output capsule. 92 | Returns: 93 | A Tensor of shape [batch_size, num_caps_l_plus_1, length(v_j)=16, 1] 94 | representing the vector output `v_j` in the layer l+1 95 | Notes: 96 | u_i represents the vector output of capsule i in the layer l, and 97 | v_j the vector output of capsule j in the layer l+1. 98 | ''' 99 | 100 | # W: [1, num_caps_i, num_caps_j * len_v_j, len_u_j, 1] 101 | input_shape = get_shape(input) 102 | W = tf.get_variable('Weight', shape=[1, input_shape[1], num_dims * num_outputs] + input_shape[-2:], 103 | dtype=tf.float32, initializer=tf.random_normal_initializer(stddev=cfg.stddev)) 104 | biases = tf.get_variable('bias', shape=(1, 1, num_outputs, num_dims, 1)) 105 | 106 | # Eq.2, calc u_hat 107 | # Since tf.matmul is a time-consuming op, 108 | # A better solution is using element-wise multiply, reduce_sum and reshape 109 | # ops instead. Matmul [a, b] x [b, c] is equal to a series ops as 110 | # element-wise multiply [a*c, b] * [a*c, b], reduce_sum at axis=1 and 111 | # reshape to [a, c] 112 | input = tf.tile(input, [1, 1, num_dims * num_outputs, 1, 1]) 113 | # assert input.get_shape() == [cfg.batch_size, 1152, 160, 8, 1] 114 | 115 | u_hat = reduce_sum(W * input, axis=3, keepdims=True) 116 | u_hat = tf.reshape(u_hat, shape=[-1, input_shape[1], num_outputs, num_dims, 1]) 117 | # assert u_hat.get_shape() == [cfg.batch_size, 1152, 10, 16, 1] 118 | 119 | # In forward, u_hat_stopped = u_hat; in backward, no gradient passed back from u_hat_stopped to u_hat 120 | u_hat_stopped = tf.stop_gradient(u_hat, name='stop_gradient') 121 | 122 | # line 3,for r iterations do 123 | for r_iter in range(cfg.iter_routing): 124 | with tf.variable_scope('iter_' + str(r_iter)): 125 | # line 4: 126 | # => [batch_size, 1152, 10, 1, 1] 127 | c_IJ = softmax(b_IJ, axis=2) 128 | 129 | # At last iteration, use `u_hat` in order to receive gradients from the following graph 130 | if r_iter == cfg.iter_routing - 1: 131 | # line 5: 132 | # weighting u_hat with c_IJ, element-wise in the last two dims 133 | # => [batch_size, 1152, 10, 16, 1] 134 | s_J = tf.multiply(c_IJ, u_hat) 135 | # then sum in the second dim, resulting in [batch_size, 1, 10, 16, 1] 136 | s_J = reduce_sum(s_J, axis=1, keepdims=True) + biases 137 | # assert s_J.get_shape() == [cfg.batch_size, 1, num_outputs, num_dims, 1] 138 | 139 | # line 6: 140 | # squash using Eq.1, 141 | v_J = squash(s_J) 142 | # assert v_J.get_shape() == [cfg.batch_size, 1, 10, 16, 1] 143 | elif r_iter < cfg.iter_routing - 1: # Inner iterations, do not apply backpropagation 144 | s_J = tf.multiply(c_IJ, u_hat_stopped) 145 | s_J = reduce_sum(s_J, axis=1, keepdims=True) + biases 146 | v_J = squash(s_J) 147 | 148 | # line 7: 149 | # reshape & tile v_j from [batch_size ,1, 10, 16, 1] to [batch_size, 1152, 10, 16, 1] 150 | # then matmul in the last tow dim: [16, 1].T x [16, 1] => [1, 1], reduce mean in the 151 | # batch_size dim, resulting in [1, 1152, 10, 1, 1] 152 | v_J_tiled = tf.tile(v_J, [1, input_shape[1], 1, 1, 1]) 153 | u_produce_v = reduce_sum(u_hat_stopped * v_J_tiled, axis=3, keepdims=True) 154 | # assert u_produce_v.get_shape() == [cfg.batch_size, 1152, 10, 1, 1] 155 | 156 | # b_IJ += tf.reduce_sum(u_produce_v, axis=0, keep_dims=True) 157 | b_IJ += u_produce_v 158 | 159 | return(v_J) 160 | 161 | 162 | def squash(vector): 163 | '''Squashing function corresponding to Eq. 1 164 | Args: 165 | vector: A tensor with shape [batch_size, 1, num_caps, vec_len, 1] or [batch_size, num_caps, vec_len, 1]. 166 | Returns: 167 | A tensor with the same shape as vector but squashed in 'vec_len' dimension. 168 | ''' 169 | vec_squared_norm = reduce_sum(tf.square(vector), -2, keepdims=True) 170 | scalar_factor = vec_squared_norm / (1 + vec_squared_norm) / tf.sqrt(vec_squared_norm + epsilon) 171 | vec_squashed = scalar_factor * vector # element-wise 172 | return(vec_squashed) 173 | -------------------------------------------------------------------------------- /capsNet.py: -------------------------------------------------------------------------------- 1 | """ 2 | License: Apache-2.0 3 | Author: Huadong Liao 4 | E-mail: naturomics.liao@gmail.com 5 | """ 6 | 7 | import tensorflow as tf 8 | 9 | from config import cfg 10 | from utils import get_batch_data 11 | from utils import softmax 12 | from utils import reduce_sum 13 | from capsLayer import CapsLayer 14 | 15 | 16 | epsilon = 1e-9 17 | 18 | 19 | class CapsNet(object): 20 | def __init__(self, is_training=True, height=28, width=28, channels=1, num_label=10): 21 | """ 22 | Args: 23 | height: Integer, the height of inputs. 24 | width: Integer, the width of inputs. 25 | channels: Integer, the channels of inputs. 26 | num_label: Integer, the category number. 27 | """ 28 | self.height = height 29 | self.width = width 30 | self.channels = channels 31 | self.num_label = num_label 32 | 33 | self.graph = tf.Graph() 34 | 35 | with self.graph.as_default(): 36 | if is_training: 37 | self.X, self.labels = get_batch_data(cfg.dataset, cfg.batch_size, cfg.num_threads) 38 | self.Y = tf.one_hot(self.labels, depth=self.num_label, axis=1, dtype=tf.float32) 39 | 40 | self.build_arch() 41 | self.loss() 42 | self._summary() 43 | 44 | # t_vars = tf.trainable_variables() 45 | self.global_step = tf.Variable(0, name='global_step', trainable=False) 46 | self.optimizer = tf.train.AdamOptimizer() 47 | self.train_op = self.optimizer.minimize(self.total_loss, global_step=self.global_step) 48 | else: 49 | self.X = tf.placeholder(tf.float32, shape=(cfg.batch_size, self.height, self.width, self.channels)) 50 | self.labels = tf.placeholder(tf.int32, shape=(cfg.batch_size, )) 51 | self.Y = tf.reshape(self.labels, shape=(cfg.batch_size, self.num_label, 1)) 52 | self.build_arch() 53 | 54 | tf.logging.info('Seting up the main structure') 55 | 56 | def build_arch(self): 57 | with tf.variable_scope('Conv1_layer'): 58 | # Conv1, return tensor with shape [batch_size, 20, 20, 256] 59 | conv1 = tf.contrib.layers.conv2d(self.X, num_outputs=256, 60 | kernel_size=9, stride=1, 61 | padding='VALID') 62 | 63 | # Primary Capsules layer, return tensor with shape [batch_size, 1152, 8, 1] 64 | with tf.variable_scope('PrimaryCaps_layer'): 65 | primaryCaps = CapsLayer(num_outputs=32, vec_len=8, with_routing=False, layer_type='CONV') 66 | caps1 = primaryCaps(conv1, kernel_size=9, stride=2) 67 | 68 | # DigitCaps layer, return shape [batch_size, 10, 16, 1] 69 | with tf.variable_scope('DigitCaps_layer'): 70 | digitCaps = CapsLayer(num_outputs=self.num_label, vec_len=16, with_routing=True, layer_type='FC') 71 | self.caps2 = digitCaps(caps1) 72 | 73 | # Decoder structure in Fig. 2 74 | # 1. Do masking, how: 75 | with tf.variable_scope('Masking'): 76 | # a). calc ||v_c||, then do softmax(||v_c||) 77 | # [batch_size, 10, 16, 1] => [batch_size, 10, 1, 1] 78 | self.v_length = tf.sqrt(reduce_sum(tf.square(self.caps2), 79 | axis=2, keepdims=True) + epsilon) 80 | self.softmax_v = softmax(self.v_length, axis=1) 81 | # assert self.softmax_v.get_shape() == [cfg.batch_size, self.num_label, 1, 1] 82 | 83 | # b). pick out the index of max softmax val of the 10 caps 84 | # [batch_size, 10, 1, 1] => [batch_size] (index) 85 | self.argmax_idx = tf.to_int32(tf.argmax(self.softmax_v, axis=1)) 86 | # assert self.argmax_idx.get_shape() == [cfg.batch_size, 1, 1] 87 | self.argmax_idx = tf.reshape(self.argmax_idx, shape=(cfg.batch_size, )) 88 | 89 | # Method 1. 90 | if not cfg.mask_with_y: 91 | # c). indexing 92 | # It's not easy to understand the indexing process with argmax_idx 93 | # as we are 3-dim animal 94 | masked_v = [] 95 | for batch_size in range(cfg.batch_size): 96 | v = self.caps2[batch_size][self.argmax_idx[batch_size], :] 97 | masked_v.append(tf.reshape(v, shape=(1, 1, 16, 1))) 98 | 99 | self.masked_v = tf.concat(masked_v, axis=0) 100 | assert self.masked_v.get_shape() == [cfg.batch_size, 1, 16, 1] 101 | # Method 2. masking with true label, default mode 102 | else: 103 | self.masked_v = tf.multiply(tf.squeeze(self.caps2), tf.reshape(self.Y, (-1, self.num_label, 1))) 104 | self.v_length = tf.sqrt(reduce_sum(tf.square(self.caps2), axis=2, keepdims=True) + epsilon) 105 | 106 | # 2. Reconstructe the MNIST images with 3 FC layers 107 | # [batch_size, 1, 16, 1] => [batch_size, 16] => [batch_size, 512] 108 | with tf.variable_scope('Decoder'): 109 | vector_j = tf.reshape(self.masked_v, shape=(cfg.batch_size, -1)) 110 | fc1 = tf.contrib.layers.fully_connected(vector_j, num_outputs=512) 111 | fc2 = tf.contrib.layers.fully_connected(fc1, num_outputs=1024) 112 | self.decoded = tf.contrib.layers.fully_connected(fc2, 113 | num_outputs=self.height * self.width * self.channels, 114 | activation_fn=tf.sigmoid) 115 | 116 | def loss(self): 117 | # 1. The margin loss 118 | 119 | # [batch_size, 10, 1, 1] 120 | # max_l = max(0, m_plus-||v_c||)^2 121 | max_l = tf.square(tf.maximum(0., cfg.m_plus - self.v_length)) 122 | # max_r = max(0, ||v_c||-m_minus)^2 123 | max_r = tf.square(tf.maximum(0., self.v_length - cfg.m_minus)) 124 | assert max_l.get_shape() == [cfg.batch_size, self.num_label, 1, 1] 125 | 126 | # reshape: [batch_size, 10, 1, 1] => [batch_size, 10] 127 | max_l = tf.reshape(max_l, shape=(cfg.batch_size, -1)) 128 | max_r = tf.reshape(max_r, shape=(cfg.batch_size, -1)) 129 | 130 | # calc T_c: [batch_size, 10] 131 | # T_c = Y, is my understanding correct? Try it. 132 | T_c = self.Y 133 | # [batch_size, 10], element-wise multiply 134 | L_c = T_c * max_l + cfg.lambda_val * (1 - T_c) * max_r 135 | 136 | self.margin_loss = tf.reduce_mean(tf.reduce_sum(L_c, axis=1)) 137 | 138 | # 2. The reconstruction loss 139 | orgin = tf.reshape(self.X, shape=(cfg.batch_size, -1)) 140 | squared = tf.square(self.decoded - orgin) 141 | self.reconstruction_err = tf.reduce_mean(squared) 142 | 143 | # 3. Total loss 144 | # The paper uses sum of squared error as reconstruction error, but we 145 | # have used reduce_mean in `# 2 The reconstruction loss` to calculate 146 | # mean squared error. In order to keep in line with the paper,the 147 | # regularization scale should be 0.0005*784=0.392 148 | self.total_loss = self.margin_loss + cfg.regularization_scale * self.reconstruction_err 149 | 150 | # Summary 151 | def _summary(self): 152 | train_summary = [] 153 | train_summary.append(tf.summary.scalar('train/margin_loss', self.margin_loss)) 154 | train_summary.append(tf.summary.scalar('train/reconstruction_loss', self.reconstruction_err)) 155 | train_summary.append(tf.summary.scalar('train/total_loss', self.total_loss)) 156 | recon_img = tf.reshape(self.decoded, shape=(cfg.batch_size, self.height, self.width, self.channels)) 157 | train_summary.append(tf.summary.image('reconstruction_img', recon_img)) 158 | self.train_summary = tf.summary.merge(train_summary) 159 | 160 | correct_prediction = tf.equal(tf.to_int32(self.labels), self.argmax_idx) 161 | self.accuracy = tf.reduce_sum(tf.cast(correct_prediction, tf.float32)) 162 | -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | 3 | flags = tf.app.flags 4 | 5 | 6 | ############################ 7 | # hyper parameters # 8 | ############################ 9 | 10 | # For separate margin loss 11 | flags.DEFINE_float('m_plus', 0.9, 'the parameter of m plus') 12 | flags.DEFINE_float('m_minus', 0.1, 'the parameter of m minus') 13 | flags.DEFINE_float('lambda_val', 0.5, 'down weight of the loss for absent digit classes') 14 | 15 | # for training 16 | flags.DEFINE_integer('batch_size', 128, 'batch size') 17 | flags.DEFINE_integer('epoch', 50, 'epoch') 18 | flags.DEFINE_integer('iter_routing', 3, 'number of iterations in routing algorithm') 19 | flags.DEFINE_boolean('mask_with_y', True, 'use the true label to mask out target capsule or not') 20 | 21 | flags.DEFINE_float('stddev', 0.01, 'stddev for W initializer') 22 | flags.DEFINE_float('regularization_scale', 0.392, 'regularization coefficient for reconstruction loss, default to 0.0005*784=0.392') 23 | 24 | 25 | ############################ 26 | # environment setting # 27 | ############################ 28 | flags.DEFINE_string('dataset', 'mnist', 'The name of dataset [mnist, fashion-mnist') 29 | flags.DEFINE_boolean('is_training', True, 'train or predict phase') 30 | flags.DEFINE_integer('num_threads', 8, 'number of threads of enqueueing examples') 31 | flags.DEFINE_string('logdir', 'logdir', 'logs directory') 32 | flags.DEFINE_integer('train_sum_freq', 100, 'the frequency of saving train summary(step)') 33 | flags.DEFINE_integer('val_sum_freq', 500, 'the frequency of saving valuation summary(step)') 34 | flags.DEFINE_integer('save_freq', 3, 'the frequency of saving model(epoch)') 35 | flags.DEFINE_string('results', 'results', 'path for saving results') 36 | 37 | ############################ 38 | # distributed setting # 39 | ############################ 40 | flags.DEFINE_integer('num_gpu', 2, 'number of gpus for distributed training') 41 | flags.DEFINE_integer('batch_size_per_gpu', 128, 'batch size on 1 gpu') 42 | flags.DEFINE_integer('thread_per_gpu', 4, 'Number of preprocessing threads per tower.') 43 | 44 | cfg = tf.app.flags.FLAGS 45 | # tf.logging.set_verbosity(tf.logging.INFO) 46 | -------------------------------------------------------------------------------- /dist_version/README.md: -------------------------------------------------------------------------------- 1 | # CapsNet-Tensorflow Distributed Version 2 | 3 | A distributed implementation of CapsNet for training and inference. Some optimization of network structure is also added for acceleration. 4 | 5 | > **Status:** 6 | > 1. The implementation of distributed training is finished. 7 | > 2. Finish the speed test on single GPU. 8 | 9 | > **Daily task** 10 | > 1. Validation performance with multi-gpus 11 | > 2. Implement the test and inference part 12 | 13 | - Python 14 | - NumPy 15 | - [Tensorflow](https://github.com/tensorflow/tensorflow) 1.2.0+ 16 | 17 | > **Speed test report** 18 | With single GPU GTX 1080 and CPU i7-5820K CPU @ 3.30GHz. 19 | 1 epoch for training on MNIST costs 157.4s, approximately 0.34s/iteration. 20 | 100 times inferences costs 4.2s, approximately 0.04s for inference. 21 | -------------------------------------------------------------------------------- /dist_version/capsnet_slim.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import tensorflow.contrib.slim as slim 3 | from config import cfg 4 | import numpy as np 5 | 6 | def build_arch(input, y, is_train=False): 7 | initializer = tf.truncated_normal_initializer(mean=0.0, stddev=0.01) 8 | biasInitializer = tf.constant_initializer(0.0) 9 | 10 | with slim.arg_scope([slim.conv2d], trainable=is_train, weights_initializer=initializer, biases_initializer=biasInitializer): 11 | with tf.variable_scope('Conv1_layer') as scope: 12 | output = slim.conv2d(input, num_outputs=256, kernel_size=[9, 9], stride=1, padding='VALID', scope=scope) 13 | assert output.get_shape() == [cfg.batch_size_per_gpu, 20, 20, 256] 14 | 15 | with tf.variable_scope('PrimaryCaps_layer') as scope: 16 | output = slim.conv2d(output, num_outputs=32*8, kernel_size=[9, 9], stride=2, padding='VALID', scope=scope, activation_fn=None) 17 | output = tf.reshape(output, [cfg.batch_size_per_gpu, -1, 1, 8]) 18 | assert output.get_shape() == [cfg.batch_size_per_gpu, 1152, 1, 8] 19 | 20 | with tf.variable_scope('DigitCaps_layer') as scope: 21 | u_hats = [] 22 | input_groups = tf.split(axis=1, num_or_size_splits=1152, value=output) 23 | for i in range(1152): 24 | u_hat = slim.conv2d(input_groups[i], num_outputs=16*10, kernel_size=[1, 1], stride=1, padding='VALID', scope='DigitCaps_layer_w_'+str(i), activation_fn=None) 25 | u_hat = tf.reshape(u_hat, [cfg.batch_size_per_gpu, 1, 10, 16]) 26 | u_hats.append(u_hat) 27 | 28 | output = tf.concat(u_hats, axis=1) 29 | assert output.get_shape() == [cfg.batch_size_per_gpu, 1152, 10, 16] 30 | 31 | b_ijs = tf.constant(np.zeros([1152, 10], dtype=np.float32)) 32 | v_js = [] 33 | for r_iter in range(cfg.iter_routing): 34 | with tf.variable_scope('iter_'+str(r_iter)): 35 | c_ijs = tf.nn.softmax(b_ijs, dim=1) 36 | 37 | c_ij_groups = tf.split(axis=1, num_or_size_splits=10, value=c_ijs) 38 | b_ij_groups = tf.split(axis=1, num_or_size_splits=10, value=b_ijs) 39 | input_groups = tf.split(axis=2, num_or_size_splits=10, value=output) 40 | 41 | s_js = [] 42 | 43 | for i in range(10): 44 | c_ij = tf.reshape(tf.tile(c_ij_groups[i], [1, 16]), [1152, 1, 16, 1]) 45 | s_j = tf.nn.depthwise_conv2d(input_groups[i], c_ij, strides=[1, 1, 1, 1], padding='VALID') 46 | assert s_j.get_shape() == [cfg.batch_size_per_gpu, 1, 1, 16] 47 | 48 | s_j = tf.reshape(s_j, [cfg.batch_size_per_gpu, 16]) 49 | s_j_norm_square = tf.reduce_mean(tf.square(s_j), axis=1, keep_dims=True) 50 | v_j = s_j_norm_square*s_j/((1+s_j_norm_square)*tf.sqrt(s_j_norm_square+1e-9)) 51 | assert v_j.get_shape() == [cfg.batch_size_per_gpu, 16] 52 | 53 | b_ij_groups[i] = b_ij_groups[i]+tf.reduce_sum(tf.matmul(tf.reshape(input_groups[i], [cfg.batch_size_per_gpu, 1152, 16]), tf.reshape(v_j, [cfg.batch_size, 16, 1])), axis=0) 54 | 55 | if r_iter == cfg.iter_routing-1: 56 | v_js.append(tf.reshape(v_j, [cfg.batch_size_per_gpu, 1, 16])) 57 | 58 | b_ijs = tf.concat(b_ij_groups, axis=1) 59 | 60 | output = tf.concat(v_js, axis=1) 61 | 62 | with tf.variable_scope('Masking'): 63 | v_len = tf.norm(output, axis=2) 64 | 65 | if is_train: 66 | masked_v = tf.matmul(output, tf.reshape(y, [-1, 10, 1]), transpose_a=True) 67 | masked_v = tf.reshape(masked_v, [-1, 16]) 68 | 69 | with tf.variable_scope('Decoder'): 70 | output = slim.fully_connected(masked_v, 512, trainable=is_train) 71 | output = slim.fully_connected(output, 1024, trainable=is_train) 72 | output = slim.fully_connected(output, 784, trainable=is_train, activation_fn=tf.sigmoid) 73 | 74 | return v_len, output 75 | 76 | def loss(v_len, output, x, y): 77 | max_l = tf.square(tf.maximum(0., cfg.m_plus-v_len)) 78 | max_r = tf.square(tf.maximum(0., v_len - cfg.m_minus)) 79 | 80 | l_c = y*max_l+cfg.lambda_val * (1 - y) * max_r 81 | 82 | margin_loss = tf.reduce_mean(tf.reduce_sum(l_c, axis=1)) 83 | 84 | origin = tf.reshape(x, shape=[cfg.batch_size, -1]) 85 | reconstruction_err = tf.reduce_mean(tf.square(output-origin)) 86 | 87 | total_loss = margin_loss+0.0005*reconstruction_err 88 | 89 | tf.losses.add_loss(total_loss) 90 | 91 | return total_loss 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | -------------------------------------------------------------------------------- /dist_version/distributed_train.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | sys.path.append('.') 4 | 5 | import tensorflow as tf 6 | from config import cfg 7 | from utils import load_mnist 8 | import dist_version.capsnet_slim as net 9 | import time 10 | import tensorflow.contrib.slim as slim 11 | import re 12 | import copy 13 | import numpy as np 14 | import os 15 | 16 | def create_inputs(): 17 | trX, trY = load_mnist(cfg.dataset, cfg.is_training) 18 | 19 | num_pre_threads = cfg.thread_per_gpu*cfg.num_gpu 20 | data_queue = tf.train.slice_input_producer([trX, trY], capacity=64*num_pre_threads) 21 | X, Y = tf.train.shuffle_batch(data_queue, num_threads=num_pre_threads, 22 | batch_size=cfg.batch_size_per_gpu*cfg.num_gpu, 23 | capacity=cfg.batch_size_per_gpu*cfg.num_gpu * 64, 24 | min_after_dequeue=cfg.batch_size_per_gpu*cfg.num_gpu * 32, 25 | allow_smaller_final_batch=False) 26 | 27 | return (X, Y) 28 | 29 | def tower_loss(x, y, scope, reuse_variables=None): 30 | with tf.variable_scope(tf.get_variable_scope(), reuse=reuse_variables): 31 | v_len, output = net.build_arch(x, y, is_train=True) 32 | 33 | net.loss(v_len, output, x, y) 34 | 35 | loss = tf.get_collection(tf.GraphKeys.LOSSES, scope)[0] 36 | loss_name = re.sub('%s_[0-9]*/' % 'tower_', '', loss.op.name) 37 | tf.summary.scalar(loss_name, loss) 38 | 39 | return loss 40 | 41 | def average_gradients(tower_grads): 42 | """Calculate the average gradient for each shared variable across all towers. 43 | 44 | Note that this function provides a synchronization point across all towers. 45 | 46 | Args: 47 | tower_grads: List of lists of (gradient, variable) tuples. The outer list 48 | is over individual gradients. The inner list is over the gradient 49 | calculation for each tower. 50 | Returns: 51 | List of pairs of (gradient, variable) where the gradient has been averaged 52 | across all towers. 53 | """ 54 | average_grads = [] 55 | for grad_and_vars in zip(*tower_grads): 56 | # Note that each grad_and_vars looks like the following: 57 | # ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN)) 58 | grads = [] 59 | for g, _ in grad_and_vars: 60 | # Add 0 dimension to the gradients to represent the tower. 61 | expanded_g = tf.expand_dims(g, 0) 62 | 63 | # Append on a 'tower' dimension which we will average over below. 64 | grads.append(expanded_g) 65 | 66 | # Average over the 'tower' dimension. 67 | grad = tf.concat(axis=0, values=grads) 68 | grad = tf.reduce_mean(grad, 0) 69 | 70 | # Keep in mind that the Variables are redundant because they are shared 71 | # across towers. So .. we will just return the first tower's pointer to 72 | # the Variable. 73 | v = grad_and_vars[0][1] 74 | grad_and_var = (grad, v) 75 | average_grads.append(grad_and_var) 76 | return average_grads 77 | 78 | def main(_): 79 | with tf.Graph().as_default(), tf.device('/cpu:0'): 80 | global_step = tf.get_variable('global_step', [], 81 | initializer=tf.constant_initializer(0), 82 | trainable=False) 83 | 84 | num_batches_per_epoch = int(60000/(cfg.batch_size_per_gpu*cfg.num_gpu)) 85 | 86 | opt = tf.train.AdamOptimizer() 87 | 88 | batch_x, batch_labels = create_inputs() 89 | batch_y = tf.one_hot(batch_labels, depth=10, axis=1, dtype=tf.float32) 90 | input_summaries = copy.copy(tf.get_collection(tf.GraphKeys.SUMMARIES)) 91 | 92 | x_splits = tf.split(axis=0, num_or_size_splits=cfg.num_gpu, value=batch_x) 93 | y_splits = tf.split(axis=0, num_or_size_splits=cfg.num_gpu, value=batch_y) 94 | 95 | tower_grads = [] 96 | reuse_variables = None 97 | for i in range(cfg.num_gpu): 98 | with tf.device('/gpu:%d' % i): 99 | with tf.name_scope('%s_%d' % ('tower_', i)) as scope: 100 | with slim.arg_scope([slim.variable], device='/cpu:0'): 101 | loss = tower_loss(x_splits[i], y_splits[i], scope, reuse_variables) 102 | 103 | reuse_variables = True 104 | 105 | summaries = tf.get_collection(tf.GraphKeys.SUMMARIES, scope) 106 | 107 | grads = opt.compute_gradients(loss) 108 | tower_grads.append(grads) 109 | 110 | grad = average_gradients(tower_grads) 111 | 112 | summaries.extend(input_summaries) 113 | 114 | train_op = opt.apply_gradients(grad, global_step=global_step) 115 | 116 | config = tf.ConfigProto(allow_soft_placement=True, log_device_placement=False) 117 | config.gpu_options.allow_growth = True 118 | sess = tf.Session(config=config) 119 | sess.run(tf.global_variables_initializer()) 120 | 121 | saver = tf.train.Saver(tf.global_variables(), max_to_keep=cfg.epoch) 122 | summary_op = tf.summary.merge(summaries) 123 | tf.train.start_queue_runners(sess=sess) 124 | 125 | summary_writer = tf.summary.FileWriter( 126 | cfg.logdir, 127 | graph=sess.graph) 128 | 129 | for step in range(cfg.epoch*num_batches_per_epoch): 130 | tic = time.time() 131 | _, loss_value = sess.run([train_op, loss]) 132 | print(str(time.time()-tic)+' '+str(step)) 133 | 134 | assert not np.isnan(loss_value) 135 | 136 | if step % 10 == 0: 137 | summary_str = sess.run(summary_op) 138 | summary_writer.add_summary(summary_str, step) 139 | 140 | if step % num_batches_per_epoch == 0 or (step+1) == cfg.epoch*num_batches_per_epoch: 141 | ckpt_path = os.path.join(cfg.logdir, 'model.ckpt') 142 | saver.save(sess, ckpt_path, global_step=step) 143 | 144 | if __name__ == "__main__": 145 | tf.app.run() 146 | -------------------------------------------------------------------------------- /download_data.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import gzip 4 | import shutil 5 | from six.moves import urllib 6 | 7 | # mnist dataset 8 | HOMEPAGE = "http://yann.lecun.com/exdb/mnist/" 9 | MNIST_TRAIN_IMGS_URL = HOMEPAGE + "train-images-idx3-ubyte.gz" 10 | MNIST_TRAIN_LABELS_URL = HOMEPAGE + "train-labels-idx1-ubyte.gz" 11 | MNIST_TEST_IMGS_URL = HOMEPAGE + "t10k-images-idx3-ubyte.gz" 12 | MNIST_TEST_LABELS_URL = HOMEPAGE + "t10k-labels-idx1-ubyte.gz" 13 | 14 | # fashion-mnist dataset 15 | HOMEPAGE = "http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/" 16 | FASHION_MNIST_TRAIN_IMGS_URL = HOMEPAGE + "train-images-idx3-ubyte.gz" 17 | FASHION_MNIST_TRAIN_LABELS_URL = HOMEPAGE + "train-labels-idx1-ubyte.gz" 18 | FASHION_MNIST_TEST_IMGS_URL = HOMEPAGE + "t10k-images-idx3-ubyte.gz" 19 | FASHION_MNIST_TEST_LABELS_URL = HOMEPAGE + "t10k-labels-idx1-ubyte.gz" 20 | 21 | 22 | def download_and_uncompress_zip(URL, dataset_dir, force=False): 23 | ''' 24 | Args: 25 | URL: the download links for data 26 | dataset_dir: the path to save data 27 | force: re-download data 28 | ''' 29 | filename = URL.split('/')[-1] 30 | filepath = os.path.join(dataset_dir, filename) 31 | if not os.path.exists(dataset_dir): 32 | os.mkdir(dataset_dir) 33 | extract_to = os.path.splitext(filepath)[0] 34 | 35 | def download_progress(count, block_size, total_size): 36 | sys.stdout.write("\r>> Downloading %s %.1f%%" % (filename, float(count * block_size) / float(total_size) * 100.)) 37 | sys.stdout.flush() 38 | 39 | if not force and os.path.exists(filepath): 40 | print("file %s already exist" % (filename)) 41 | else: 42 | filepath, _ = urllib.request.urlretrieve(URL, filepath, download_progress) 43 | print() 44 | print('Successfully Downloaded', filename) 45 | 46 | # with zipfile.ZipFile(filepath) as fd: 47 | with gzip.open(filepath, 'rb') as f_in, open(extract_to, 'wb') as f_out: 48 | print('Extracting ', filename) 49 | shutil.copyfileobj(f_in, f_out) 50 | print('Successfully extracted') 51 | print() 52 | 53 | 54 | def start_download(dataset, save_to, force): 55 | if not os.path.exists(save_to): 56 | os.makedirs(save_to) 57 | if dataset == 'mnist': 58 | download_and_uncompress_zip(MNIST_TRAIN_IMGS_URL, save_to, force) 59 | download_and_uncompress_zip(MNIST_TRAIN_LABELS_URL, save_to, force) 60 | download_and_uncompress_zip(MNIST_TEST_IMGS_URL, save_to, force) 61 | download_and_uncompress_zip(MNIST_TEST_LABELS_URL, save_to, force) 62 | elif dataset == 'fashion-mnist': 63 | download_and_uncompress_zip(FASHION_MNIST_TRAIN_IMGS_URL, save_to, force) 64 | download_and_uncompress_zip(FASHION_MNIST_TRAIN_LABELS_URL, save_to, force) 65 | download_and_uncompress_zip(FASHION_MNIST_TEST_IMGS_URL, save_to, force) 66 | download_and_uncompress_zip(FASHION_MNIST_TEST_LABELS_URL, save_to, force) 67 | else: 68 | raise Exception("Invalid dataset name! please check it: ", dataset) 69 | 70 | if __name__ == '__main__': 71 | import argparse 72 | parser = argparse.ArgumentParser('Script for automatically downloading datasets') 73 | parser.add_argument("--dataset", default='mnist', choices=['mnist', 'fashion-mnist', 'smallNORB']) 74 | save_to = os.path.join('data', 'mnist') 75 | parser.add_argument("--save_to", default=save_to) 76 | parser.add_argument("--force", default=False, type=bool) 77 | args = parser.parse_args() 78 | start_download(args.dataset, args.save_to, args.force) 79 | -------------------------------------------------------------------------------- /imgs/capsuleVSneuron.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/naturomics/CapsNet-Tensorflow/5b464caab361ec402c1b17acb9bc2680e5fbb7de/imgs/capsuleVSneuron.png -------------------------------------------------------------------------------- /imgs/my_wechat_QR.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/naturomics/CapsNet-Tensorflow/5b464caab361ec402c1b17acb9bc2680e5fbb7de/imgs/my_wechat_QR.png -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import numpy as np 4 | import tensorflow as tf 5 | from tqdm import tqdm 6 | 7 | from config import cfg 8 | from utils import load_data 9 | from capsNet import CapsNet 10 | 11 | 12 | def save_to(): 13 | if not os.path.exists(cfg.results): 14 | os.mkdir(cfg.results) 15 | if cfg.is_training: 16 | loss = cfg.results + '/loss.csv' 17 | train_acc = cfg.results + '/train_acc.csv' 18 | val_acc = cfg.results + '/val_acc.csv' 19 | 20 | if os.path.exists(val_acc): 21 | os.remove(val_acc) 22 | if os.path.exists(loss): 23 | os.remove(loss) 24 | if os.path.exists(train_acc): 25 | os.remove(train_acc) 26 | 27 | fd_train_acc = open(train_acc, 'w') 28 | fd_train_acc.write('step,train_acc\n') 29 | fd_loss = open(loss, 'w') 30 | fd_loss.write('step,loss\n') 31 | fd_val_acc = open(val_acc, 'w') 32 | fd_val_acc.write('step,val_acc\n') 33 | return(fd_train_acc, fd_loss, fd_val_acc) 34 | else: 35 | test_acc = cfg.results + '/test_acc.csv' 36 | if os.path.exists(test_acc): 37 | os.remove(test_acc) 38 | fd_test_acc = open(test_acc, 'w') 39 | fd_test_acc.write('test_acc\n') 40 | return(fd_test_acc) 41 | 42 | 43 | def train(model, supervisor, num_label): 44 | trX, trY, num_tr_batch, valX, valY, num_val_batch = load_data(cfg.dataset, cfg.batch_size, is_training=True) 45 | Y = valY[:num_val_batch * cfg.batch_size].reshape((-1, 1)) 46 | 47 | fd_train_acc, fd_loss, fd_val_acc = save_to() 48 | config = tf.ConfigProto() 49 | config.gpu_options.allow_growth = True 50 | with supervisor.managed_session(config=config) as sess: 51 | print("\nNote: all of results will be saved to directory: " + cfg.results) 52 | for epoch in range(cfg.epoch): 53 | print("Training for epoch %d/%d:" % (epoch, cfg.epoch)) 54 | if supervisor.should_stop(): 55 | print('supervisor stoped!') 56 | break 57 | for step in tqdm(range(num_tr_batch), total=num_tr_batch, ncols=70, leave=False, unit='b'): 58 | start = step * cfg.batch_size 59 | end = start + cfg.batch_size 60 | global_step = epoch * num_tr_batch + step 61 | 62 | if global_step % cfg.train_sum_freq == 0: 63 | _, loss, train_acc, summary_str = sess.run([model.train_op, model.total_loss, model.accuracy, model.train_summary]) 64 | assert not np.isnan(loss), 'Something wrong! loss is nan...' 65 | supervisor.summary_writer.add_summary(summary_str, global_step) 66 | 67 | fd_loss.write(str(global_step) + ',' + str(loss) + "\n") 68 | fd_loss.flush() 69 | fd_train_acc.write(str(global_step) + ',' + str(train_acc / cfg.batch_size) + "\n") 70 | fd_train_acc.flush() 71 | else: 72 | sess.run(model.train_op) 73 | 74 | if cfg.val_sum_freq != 0 and (global_step) % cfg.val_sum_freq == 0: 75 | val_acc = 0 76 | for i in range(num_val_batch): 77 | start = i * cfg.batch_size 78 | end = start + cfg.batch_size 79 | acc = sess.run(model.accuracy, {model.X: valX[start:end], model.labels: valY[start:end]}) 80 | val_acc += acc 81 | val_acc = val_acc / (cfg.batch_size * num_val_batch) 82 | fd_val_acc.write(str(global_step) + ',' + str(val_acc) + '\n') 83 | fd_val_acc.flush() 84 | 85 | if (epoch + 1) % cfg.save_freq == 0: 86 | supervisor.saver.save(sess, cfg.logdir + '/model_epoch_%04d_step_%02d' % (epoch, global_step)) 87 | 88 | fd_val_acc.close() 89 | fd_train_acc.close() 90 | fd_loss.close() 91 | 92 | 93 | def evaluation(model, supervisor, num_label): 94 | teX, teY, num_te_batch = load_data(cfg.dataset, cfg.batch_size, is_training=False) 95 | fd_test_acc = save_to() 96 | with supervisor.managed_session(config=tf.ConfigProto(allow_soft_placement=True)) as sess: 97 | supervisor.saver.restore(sess, tf.train.latest_checkpoint(cfg.logdir)) 98 | tf.logging.info('Model restored!') 99 | 100 | test_acc = 0 101 | for i in tqdm(range(num_te_batch), total=num_te_batch, ncols=70, leave=False, unit='b'): 102 | start = i * cfg.batch_size 103 | end = start + cfg.batch_size 104 | acc = sess.run(model.accuracy, {model.X: teX[start:end], model.labels: teY[start:end]}) 105 | test_acc += acc 106 | test_acc = test_acc / (cfg.batch_size * num_te_batch) 107 | fd_test_acc.write(str(test_acc)) 108 | fd_test_acc.close() 109 | print('Test accuracy has been saved to ' + cfg.results + '/test_acc.csv') 110 | 111 | 112 | def main(_): 113 | tf.logging.info(' Loading Graph...') 114 | num_label = 10 115 | model = CapsNet() 116 | tf.logging.info(' Graph loaded') 117 | 118 | sv = tf.train.Supervisor(graph=model.graph, logdir=cfg.logdir, save_model_secs=0) 119 | 120 | if cfg.is_training: 121 | tf.logging.info(' Start training...') 122 | train(model, sv, num_label) 123 | tf.logging.info('Training done') 124 | else: 125 | evaluation(model, sv, num_label) 126 | 127 | if __name__ == "__main__": 128 | tf.app.run() 129 | -------------------------------------------------------------------------------- /plot_acc.R: -------------------------------------------------------------------------------- 1 | #! /bin/env Rscript 2 | 3 | library(ggplot2) 4 | 5 | dat1<-read.csv('results/accuracy_1_iter.csv') 6 | dat2<-read.csv('results/accuracy_2_iter.csv') 7 | dat3<-read.csv('results/accuracy_3_iter.csv') 8 | data_dim<-dim(dat1) 9 | dat1<-data.frame(dat1, routing_iter=factor(rep(1, data_dim[1]))) 10 | dat2<-data.frame(dat2, routing_iter=factor(rep(2, data_dim[1]))) 11 | dat3<-data.frame(dat3, routing_iter=factor(rep(3, data_dim[1]))) 12 | data<-rbind(dat1,dat2,dat3) 13 | p<-ggplot(data,aes(x=step, y=test_acc, color=routing_iter)) 14 | p<-p + geom_line() + ylim(c(0.975, .997)) + xlim(c(0, 49000)) 15 | p<-p + labs(title='Test accuracy of different routing iterations') + theme(plot.title=element_text(hjust=0.5), legend.position=c(0.92, 0.6)) 16 | ggsave(p, filename='results/routing_trials.png', width=5, height=5) 17 | -------------------------------------------------------------------------------- /results/margin_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/naturomics/CapsNet-Tensorflow/5b464caab361ec402c1b17acb9bc2680e5fbb7de/results/margin_loss.png -------------------------------------------------------------------------------- /results/mnist/1_iter_train_acc.csv: -------------------------------------------------------------------------------- 1 | step,train_acc 2 | 0,0.109375 3 | 100,0.953125 4 | 200,0.953125 5 | 300,0.953125 6 | 400,0.96875 7 | 500,0.9765625 8 | 600,0.984375 9 | 700,0.984375 10 | 800,0.9921875 11 | 900,0.9765625 12 | 1000,1.0 13 | 1100,0.9921875 14 | 1200,1.0 15 | 1300,0.9921875 16 | 1400,0.9921875 17 | 1500,1.0 18 | 1600,1.0 19 | 1700,0.9921875 20 | 1800,0.9921875 21 | 1900,0.9921875 22 | 2000,1.0 23 | 2100,0.9921875 24 | 2200,1.0 25 | 2300,1.0 26 | 2400,1.0 27 | 2500,1.0 28 | 2600,1.0 29 | 2700,1.0 30 | 2800,1.0 31 | 2900,1.0 32 | 3000,1.0 33 | 3100,1.0 34 | 3200,0.9921875 35 | 3300,1.0 36 | 3400,1.0 37 | 3500,0.9921875 38 | 3600,1.0 39 | 3700,1.0 40 | 3800,1.0 41 | 3900,1.0 42 | 4000,1.0 43 | 4100,1.0 44 | 4200,1.0 45 | 4300,1.0 46 | 4400,1.0 47 | 4500,1.0 48 | 4600,1.0 49 | 4700,1.0 50 | 4800,1.0 51 | 4900,1.0 52 | 5000,1.0 53 | 5100,1.0 54 | 5200,1.0 55 | 5300,1.0 56 | 5400,1.0 57 | 5500,1.0 58 | 5600,1.0 59 | 5700,1.0 60 | 5800,1.0 61 | 5900,1.0 62 | 6000,1.0 63 | 6100,1.0 64 | 6200,1.0 65 | 6300,1.0 66 | 6400,1.0 67 | 6500,1.0 68 | 6600,1.0 69 | 6700,1.0 70 | 6800,1.0 71 | 6900,1.0 72 | 7000,1.0 73 | 7100,1.0 74 | 7200,1.0 75 | 7300,1.0 76 | 7400,1.0 77 | 7500,1.0 78 | 7600,1.0 79 | 7700,1.0 80 | 7800,1.0 81 | 7900,1.0 82 | 8000,1.0 83 | 8100,1.0 84 | 8200,1.0 85 | 8300,1.0 86 | 8400,1.0 87 | 8500,1.0 88 | 8600,1.0 89 | 8700,1.0 90 | 8800,1.0 91 | 8900,1.0 92 | 9000,1.0 93 | 9100,1.0 94 | 9200,1.0 95 | 9300,1.0 96 | 9400,1.0 97 | 9500,1.0 98 | 9600,1.0 99 | 9700,1.0 100 | 9800,1.0 101 | 9900,1.0 102 | 10000,1.0 103 | 10100,1.0 104 | 10200,1.0 105 | 10300,1.0 106 | 10400,1.0 107 | 10500,1.0 108 | 10600,1.0 109 | 10700,1.0 110 | 10800,1.0 111 | 10900,1.0 112 | 11000,1.0 113 | 11100,1.0 114 | 11200,1.0 115 | 11300,1.0 116 | 11400,1.0 117 | 11500,1.0 118 | 11600,1.0 119 | 11700,1.0 120 | 11800,1.0 121 | 11900,1.0 122 | 12000,1.0 123 | 12100,1.0 124 | 12200,1.0 125 | 12300,1.0 126 | 12400,1.0 127 | 12500,1.0 128 | 12600,1.0 129 | 12700,1.0 130 | 12800,1.0 131 | 12900,1.0 132 | 13000,1.0 133 | 13100,1.0 134 | 13200,1.0 135 | 13300,1.0 136 | 13400,1.0 137 | 13500,1.0 138 | 13600,1.0 139 | 13700,1.0 140 | 13800,1.0 141 | 13900,1.0 142 | 14000,1.0 143 | 14100,1.0 144 | 14200,1.0 145 | 14300,1.0 146 | 14400,1.0 147 | 14500,1.0 148 | 14600,1.0 149 | 14700,1.0 150 | 14800,1.0 151 | 14900,1.0 152 | 15000,1.0 153 | 15100,1.0 154 | 15200,1.0 155 | 15300,1.0 156 | 15400,1.0 157 | 15500,1.0 158 | 15600,1.0 159 | 15700,1.0 160 | 15800,1.0 161 | 15900,1.0 162 | 16000,1.0 163 | 16100,1.0 164 | 16200,1.0 165 | 16300,1.0 166 | 16400,1.0 167 | 16500,1.0 168 | 16600,1.0 169 | 16700,1.0 170 | 16800,1.0 171 | 16900,1.0 172 | 17000,1.0 173 | 17100,1.0 174 | 17200,1.0 175 | 17300,1.0 176 | 17400,1.0 177 | 17500,1.0 178 | 17600,1.0 179 | 17700,1.0 180 | 17800,1.0 181 | 17900,1.0 182 | 18000,1.0 183 | 18100,1.0 184 | 18200,1.0 185 | 18300,1.0 186 | 18400,1.0 187 | 18500,1.0 188 | 18600,1.0 189 | 18700,1.0 190 | 18800,1.0 191 | 18900,1.0 192 | 19000,1.0 193 | 19100,1.0 194 | 19200,1.0 195 | 19300,1.0 196 | 19400,1.0 197 | 19500,1.0 198 | 19600,1.0 199 | 19700,1.0 200 | 19800,1.0 201 | 19900,1.0 202 | 20000,1.0 203 | 20100,1.0 204 | 20200,1.0 205 | 20300,1.0 206 | 20400,1.0 207 | 20500,1.0 208 | 20600,1.0 209 | 20700,1.0 210 | 20800,1.0 211 | 20900,1.0 212 | 21000,1.0 213 | 21100,1.0 214 | 21200,1.0 215 | 21300,1.0 216 | 21400,1.0 217 | 21500,1.0 218 | 21600,1.0 219 | 21700,1.0 220 | 21800,1.0 221 | 21900,1.0 222 | 22000,1.0 223 | 22100,1.0 224 | 22200,1.0 225 | 22300,1.0 226 | 22400,1.0 227 | 22500,1.0 228 | 22600,1.0 229 | 22700,1.0 230 | 22800,1.0 231 | 22900,1.0 232 | 23000,1.0 233 | 23100,1.0 234 | 23200,1.0 235 | 23300,1.0 236 | 23400,1.0 237 | 23500,1.0 238 | 23600,1.0 239 | 23700,1.0 240 | 23800,1.0 241 | 23900,1.0 242 | 24000,1.0 243 | 24100,1.0 244 | 24200,1.0 245 | 24300,1.0 246 | 24400,1.0 247 | 24500,1.0 248 | 24600,1.0 249 | 24700,1.0 250 | 24800,1.0 251 | 24900,1.0 252 | 25000,1.0 253 | 25100,1.0 254 | 25200,1.0 255 | 25300,1.0 256 | 25400,1.0 257 | 25500,1.0 258 | 25600,1.0 259 | 25700,1.0 260 | 25800,1.0 261 | 25900,1.0 262 | 26000,1.0 263 | 26100,1.0 264 | 26200,1.0 265 | 26300,1.0 266 | 26400,1.0 267 | 26500,1.0 268 | 26600,1.0 269 | 26700,1.0 270 | 26800,1.0 271 | 26900,1.0 272 | 27000,1.0 273 | 27100,1.0 274 | 27200,1.0 275 | 27300,1.0 276 | 27400,1.0 277 | 27500,1.0 278 | 27600,1.0 279 | 27700,1.0 280 | 27800,1.0 281 | 27900,1.0 282 | 28000,1.0 283 | 28100,1.0 284 | 28200,1.0 285 | 28300,1.0 286 | 28400,1.0 287 | 28500,1.0 288 | 28600,1.0 289 | 28700,1.0 290 | 28800,1.0 291 | 28900,1.0 292 | 29000,1.0 293 | 29100,1.0 294 | 29200,1.0 295 | 29300,1.0 296 | 29400,1.0 297 | 29500,1.0 298 | 29600,1.0 299 | 29700,1.0 300 | 29800,1.0 301 | 29900,1.0 302 | 30000,1.0 303 | 30100,1.0 304 | 30200,1.0 305 | 30300,1.0 306 | 30400,1.0 307 | 30500,1.0 308 | 30600,1.0 309 | 30700,1.0 310 | 30800,1.0 311 | 30900,1.0 312 | 31000,1.0 313 | 31100,1.0 314 | 31200,1.0 315 | 31300,1.0 316 | 31400,1.0 317 | 31500,1.0 318 | 31600,1.0 319 | 31700,1.0 320 | 31800,1.0 321 | 31900,1.0 322 | 32000,1.0 323 | 32100,1.0 324 | 32200,1.0 325 | 32300,1.0 326 | 32400,1.0 327 | 32500,1.0 328 | 32600,1.0 329 | 32700,1.0 330 | 32800,1.0 331 | 32900,1.0 332 | 33000,1.0 333 | 33100,1.0 334 | 33200,1.0 335 | 33300,1.0 336 | 33400,1.0 337 | 33500,1.0 338 | 33600,1.0 339 | 33700,1.0 340 | 33800,1.0 341 | 33900,1.0 342 | 34000,1.0 343 | 34100,1.0 344 | 34200,1.0 345 | 34300,1.0 346 | 34400,1.0 347 | 34500,1.0 348 | 34600,1.0 349 | 34700,1.0 350 | 34800,1.0 351 | 34900,1.0 352 | 35000,1.0 353 | 35100,1.0 354 | 35200,1.0 355 | 35300,1.0 356 | 35400,1.0 357 | 35500,1.0 358 | 35600,1.0 359 | 35700,1.0 360 | 35800,1.0 361 | 35900,1.0 362 | 36000,1.0 363 | 36100,1.0 364 | 36200,1.0 365 | 36300,1.0 366 | 36400,1.0 367 | 36500,1.0 368 | 36600,1.0 369 | 36700,1.0 370 | 36800,1.0 371 | 36900,1.0 372 | 37000,1.0 373 | 37100,1.0 374 | 37200,1.0 375 | 37300,1.0 376 | 37400,1.0 377 | 37500,1.0 378 | 37600,1.0 379 | 37700,1.0 380 | 37800,1.0 381 | 37900,1.0 382 | 38000,1.0 383 | 38100,1.0 384 | 38200,1.0 385 | 38300,1.0 386 | 38400,1.0 387 | 38500,1.0 388 | 38600,1.0 389 | 38700,1.0 390 | 38800,1.0 391 | 38900,1.0 392 | 39000,1.0 393 | 39100,1.0 394 | 39200,1.0 395 | 39300,1.0 396 | 39400,1.0 397 | 39500,1.0 398 | 39600,1.0 399 | 39700,1.0 400 | 39800,1.0 401 | 39900,1.0 402 | 40000,1.0 403 | 40100,1.0 404 | 40200,1.0 405 | 40300,1.0 406 | 40400,1.0 407 | 40500,1.0 408 | 40600,1.0 409 | 40700,1.0 410 | 40800,1.0 411 | 40900,1.0 412 | 41000,1.0 413 | 41100,1.0 414 | 41200,1.0 415 | 41300,1.0 416 | 41400,1.0 417 | 41500,1.0 418 | 41600,1.0 419 | 41700,1.0 420 | 41800,1.0 421 | 41900,1.0 422 | 42000,1.0 423 | 42100,1.0 424 | 42200,1.0 425 | 42300,1.0 426 | 42400,1.0 427 | 42500,1.0 428 | 42600,1.0 429 | 42700,1.0 430 | 42800,1.0 431 | 42900,1.0 432 | 43000,1.0 433 | 43100,1.0 434 | 43200,1.0 435 | 43300,1.0 436 | 43400,1.0 437 | 43500,1.0 438 | 43600,1.0 439 | 43700,1.0 440 | 43800,1.0 441 | 43900,1.0 442 | 44000,1.0 443 | 44100,1.0 444 | 44200,1.0 445 | 44300,1.0 446 | 44400,1.0 447 | 44500,1.0 448 | 44600,1.0 449 | 44700,1.0 450 | 44800,1.0 451 | 44900,1.0 452 | 45000,1.0 453 | 45100,1.0 454 | 45200,1.0 455 | 45300,1.0 456 | 45400,1.0 457 | 45500,1.0 458 | 45600,1.0 459 | 45700,1.0 460 | 45800,1.0 461 | 45900,1.0 462 | 46000,1.0 463 | 46100,1.0 464 | 46200,1.0 465 | 46300,1.0 466 | 46400,1.0 467 | 46500,1.0 468 | 46600,1.0 469 | 46700,1.0 470 | 46800,1.0 471 | 46900,1.0 472 | 47000,1.0 473 | 47100,1.0 474 | 47200,1.0 475 | 47300,1.0 476 | 47400,1.0 477 | 47500,1.0 478 | 47600,1.0 479 | 47700,1.0 480 | 47800,1.0 481 | 47900,1.0 482 | 48000,1.0 483 | 48100,1.0 484 | 48200,1.0 485 | 48300,1.0 486 | 48400,1.0 487 | 48500,1.0 488 | 48600,1.0 489 | 48700,1.0 490 | 48800,1.0 491 | 48900,1.0 492 | 49000,1.0 493 | 49100,1.0 494 | 49200,1.0 495 | 49300,1.0 496 | 49400,1.0 497 | 49500,1.0 498 | 49600,1.0 499 | 49700,1.0 500 | 49800,1.0 501 | 49900,1.0 502 | 50000,1.0 503 | 50100,1.0 504 | 50200,1.0 505 | 50300,1.0 506 | 50400,1.0 507 | 50500,1.0 508 | 50600,1.0 509 | 50700,1.0 510 | 50800,1.0 511 | 50900,1.0 512 | 51000,1.0 513 | 51100,1.0 514 | 51200,1.0 515 | 51300,1.0 516 | 51400,1.0 517 | 51500,1.0 518 | 51600,1.0 519 | 51700,1.0 520 | 51800,1.0 521 | 51900,1.0 522 | 52000,1.0 523 | 52100,1.0 524 | 52200,1.0 525 | 52300,1.0 526 | 52400,1.0 527 | 52500,1.0 528 | 52600,1.0 529 | 52700,1.0 530 | 52800,1.0 531 | 52900,1.0 532 | 53000,1.0 533 | 53100,1.0 534 | 53200,1.0 535 | 53300,1.0 536 | 53400,1.0 537 | 53500,1.0 538 | 53600,1.0 539 | 53700,1.0 540 | 53800,1.0 541 | 53900,1.0 542 | 54000,1.0 543 | 54100,1.0 544 | 54200,1.0 545 | 54300,1.0 546 | 54400,1.0 547 | 54500,1.0 548 | 54600,1.0 549 | 54700,1.0 550 | 54800,1.0 551 | 54900,1.0 552 | 55000,1.0 553 | 55100,1.0 554 | 55200,1.0 555 | 55300,1.0 556 | 55400,1.0 557 | 55500,1.0 558 | 55600,1.0 559 | 55700,1.0 560 | 55800,1.0 561 | 55900,1.0 562 | 56000,1.0 563 | 56100,1.0 564 | 56200,1.0 565 | 56300,1.0 566 | 56400,1.0 567 | 56500,1.0 568 | 56600,1.0 569 | 56700,1.0 570 | 56800,1.0 571 | 56900,1.0 572 | 57000,1.0 573 | 57100,1.0 574 | 57200,1.0 575 | 57300,1.0 576 | 57400,1.0 577 | 57500,1.0 578 | 57600,1.0 579 | 57700,1.0 580 | 57800,1.0 581 | 57900,1.0 582 | 58000,1.0 583 | 58100,1.0 584 | 58200,1.0 585 | 58300,1.0 586 | 58400,1.0 587 | 58500,1.0 588 | 58600,1.0 589 | 58700,1.0 590 | 58800,1.0 591 | 58900,1.0 592 | 59000,1.0 593 | 59100,1.0 594 | 59200,1.0 595 | 59300,1.0 596 | 59400,1.0 597 | 59500,1.0 598 | 59600,1.0 599 | 59700,1.0 600 | 59800,1.0 601 | 59900,1.0 602 | 60000,1.0 603 | 60100,1.0 604 | 60200,1.0 605 | 60300,1.0 606 | 60400,1.0 607 | 60500,1.0 608 | 60600,1.0 609 | 60700,1.0 610 | 60800,1.0 611 | 60900,1.0 612 | 61000,1.0 613 | 61100,1.0 614 | 61200,1.0 615 | 61300,1.0 616 | 61400,1.0 617 | 61500,1.0 618 | 61600,1.0 619 | 61700,1.0 620 | 61800,1.0 621 | 61900,1.0 622 | 62000,1.0 623 | 62100,1.0 624 | 62200,1.0 625 | 62300,1.0 626 | 62400,1.0 627 | 62500,1.0 628 | 62600,1.0 629 | 62700,1.0 630 | 62800,1.0 631 | 62900,1.0 632 | 63000,1.0 633 | 63100,1.0 634 | 63200,1.0 635 | 63300,1.0 636 | 63400,1.0 637 | 63500,1.0 638 | 63600,1.0 639 | 63700,1.0 640 | 63800,1.0 641 | 63900,1.0 642 | 64000,1.0 643 | 64100,1.0 644 | 64200,1.0 645 | 64300,1.0 646 | 64400,1.0 647 | 64500,1.0 648 | 64600,1.0 649 | 64700,1.0 650 | 64800,1.0 651 | 64900,1.0 652 | 65000,1.0 653 | 65100,1.0 654 | 65200,1.0 655 | 65300,1.0 656 | 65400,1.0 657 | 65500,1.0 658 | 65600,1.0 659 | 65700,1.0 660 | 65800,1.0 661 | 65900,1.0 662 | 66000,1.0 663 | 66100,1.0 664 | 66200,1.0 665 | 66300,1.0 666 | 66400,1.0 667 | 66500,1.0 668 | 66600,1.0 669 | 66700,1.0 670 | 66800,1.0 671 | 66900,1.0 672 | 67000,1.0 673 | 67100,1.0 674 | 67200,1.0 675 | 67300,1.0 676 | 67400,1.0 677 | 67500,1.0 678 | 67600,1.0 679 | 67700,1.0 680 | 67800,1.0 681 | 67900,1.0 682 | 68000,1.0 683 | 68100,1.0 684 | 68200,1.0 685 | 68300,1.0 686 | 68400,1.0 687 | 68500,1.0 688 | 68600,1.0 689 | 68700,1.0 690 | 68800,1.0 691 | 68900,1.0 692 | 69000,1.0 693 | 69100,1.0 694 | 69200,1.0 695 | 69300,1.0 696 | 69400,1.0 697 | 69500,1.0 698 | 69600,1.0 699 | 69700,1.0 700 | 69800,1.0 701 | 69900,1.0 702 | 70000,1.0 703 | 70100,1.0 704 | 70200,1.0 705 | 70300,1.0 706 | 70400,1.0 707 | 70500,1.0 708 | 70600,1.0 709 | 70700,1.0 710 | 70800,1.0 711 | 70900,1.0 712 | 71000,1.0 713 | 71100,1.0 714 | 71200,1.0 715 | 71300,1.0 716 | 71400,1.0 717 | 71500,1.0 718 | 71600,1.0 719 | 71700,1.0 720 | 71800,1.0 721 | 71900,1.0 722 | 72000,1.0 723 | 72100,1.0 724 | 72200,1.0 725 | 72300,1.0 726 | 72400,1.0 727 | 72500,1.0 728 | 72600,1.0 729 | 72700,1.0 730 | 72800,1.0 731 | 72900,1.0 732 | 73000,1.0 733 | 73100,1.0 734 | 73200,1.0 735 | 73300,1.0 736 | 73400,1.0 737 | 73500,1.0 738 | 73600,1.0 739 | 73700,1.0 740 | 73800,1.0 741 | 73900,1.0 742 | 74000,1.0 743 | 74100,1.0 744 | 74200,1.0 745 | 74300,1.0 746 | 74400,1.0 747 | 74500,1.0 748 | 74600,1.0 749 | 74700,1.0 750 | 74800,1.0 751 | 74900,1.0 752 | 75000,1.0 753 | 75100,1.0 754 | 75200,1.0 755 | 75300,1.0 756 | 75400,1.0 757 | 75500,1.0 758 | 75600,1.0 759 | 75700,1.0 760 | 75800,1.0 761 | 75900,1.0 762 | 76000,1.0 763 | 76100,1.0 764 | 76200,1.0 765 | 76300,1.0 766 | 76400,1.0 767 | 76500,1.0 768 | 76600,1.0 769 | 76700,1.0 770 | 76800,1.0 771 | 76900,1.0 772 | 77000,1.0 773 | 77100,1.0 774 | 77200,1.0 775 | 77300,1.0 776 | 77400,1.0 777 | 77500,1.0 778 | 77600,1.0 779 | 77700,1.0 780 | 77800,1.0 781 | 77900,1.0 782 | 78000,1.0 783 | 78100,1.0 784 | 78200,1.0 785 | 78300,1.0 786 | 78400,1.0 787 | 78500,1.0 788 | 78600,1.0 789 | 78700,1.0 790 | 78800,1.0 791 | 78900,1.0 792 | 79000,1.0 793 | 79100,1.0 794 | 79200,1.0 795 | 79300,1.0 796 | 79400,1.0 797 | 79500,1.0 798 | 79600,1.0 799 | 79700,1.0 800 | 79800,1.0 801 | 79900,1.0 802 | 80000,1.0 803 | 80100,1.0 804 | 80200,1.0 805 | 80300,1.0 806 | 80400,1.0 807 | 80500,1.0 808 | 80600,1.0 809 | 80700,1.0 810 | 80800,1.0 811 | 80900,1.0 812 | 81000,1.0 813 | 81100,1.0 814 | 81200,1.0 815 | 81300,1.0 816 | 81400,1.0 817 | 81500,1.0 818 | 81600,1.0 819 | 81700,1.0 820 | 81800,1.0 821 | 81900,1.0 822 | 82000,1.0 823 | 82100,1.0 824 | 82200,1.0 825 | 82300,1.0 826 | 82400,1.0 827 | 82500,1.0 828 | 82600,1.0 829 | 82700,1.0 830 | 82800,1.0 831 | 82900,1.0 832 | 83000,1.0 833 | 83100,1.0 834 | 83200,1.0 835 | 83300,1.0 836 | 83400,1.0 837 | 83500,1.0 838 | 83600,1.0 839 | 83700,1.0 840 | 83800,1.0 841 | 83900,1.0 842 | 84000,1.0 843 | 84100,1.0 844 | 84200,1.0 845 | 84300,1.0 846 | 84400,1.0 847 | 84500,1.0 848 | 84600,1.0 849 | 84700,1.0 850 | 84800,1.0 851 | 84900,1.0 852 | 85000,1.0 853 | 85100,1.0 854 | 85200,1.0 855 | 85300,1.0 856 | 85400,1.0 857 | 85500,1.0 858 | 85600,1.0 859 | 85700,1.0 860 | 85800,1.0 861 | 85900,1.0 862 | 86000,1.0 863 | 86100,1.0 864 | 86200,1.0 865 | 86300,1.0 866 | 86400,1.0 867 | 86500,1.0 868 | 86600,1.0 869 | 86700,1.0 870 | 86800,1.0 871 | 86900,1.0 872 | 87000,1.0 873 | 87100,1.0 874 | 87200,1.0 875 | 87300,1.0 876 | 87400,1.0 877 | 87500,1.0 878 | 87600,1.0 879 | 87700,1.0 880 | 87800,1.0 881 | 87900,1.0 882 | 88000,1.0 883 | 88100,1.0 884 | 88200,1.0 885 | 88300,1.0 886 | 88400,1.0 887 | 88500,1.0 888 | 88600,1.0 889 | 88700,1.0 890 | 88800,1.0 891 | 88900,1.0 892 | 89000,1.0 893 | 89100,1.0 894 | 89200,1.0 895 | 89300,1.0 896 | 89400,1.0 897 | 89500,1.0 898 | 89600,1.0 899 | 89700,1.0 900 | 89800,1.0 901 | 89900,1.0 902 | 90000,1.0 903 | 90100,1.0 904 | 90200,1.0 905 | 90300,1.0 906 | 90400,1.0 907 | 90500,1.0 908 | 90600,1.0 909 | 90700,1.0 910 | 90800,1.0 911 | 90900,1.0 912 | 91000,1.0 913 | 91100,1.0 914 | 91200,1.0 915 | 91300,1.0 916 | 91400,1.0 917 | 91500,1.0 918 | 91600,1.0 919 | 91700,1.0 920 | 91800,1.0 921 | 91900,1.0 922 | 92000,1.0 923 | 92100,1.0 924 | 92200,1.0 925 | 92300,1.0 926 | 92400,1.0 927 | 92500,1.0 928 | 92600,1.0 929 | 92700,1.0 930 | 92800,1.0 931 | 92900,1.0 932 | 93000,1.0 933 | 93100,1.0 934 | 93200,1.0 935 | 93300,1.0 936 | 93400,1.0 937 | 93500,1.0 938 | 93600,1.0 939 | 93700,1.0 940 | 93800,1.0 941 | 93900,1.0 942 | 94000,1.0 943 | 94100,1.0 944 | 94200,1.0 945 | 94300,1.0 946 | 94400,1.0 947 | 94500,1.0 948 | 94600,1.0 949 | 94700,1.0 950 | 94800,1.0 951 | 94900,1.0 952 | 95000,1.0 953 | 95100,1.0 954 | 95200,1.0 955 | 95300,1.0 956 | 95400,1.0 957 | 95500,1.0 958 | 95600,1.0 959 | 95700,1.0 960 | 95800,1.0 961 | 95900,1.0 962 | 96000,1.0 963 | 96100,1.0 964 | 96200,1.0 965 | 96300,1.0 966 | 96400,1.0 967 | 96500,1.0 968 | 96600,1.0 969 | 96700,1.0 970 | 96800,1.0 971 | 96900,1.0 972 | 97000,1.0 973 | 97100,1.0 974 | 97200,1.0 975 | 97300,1.0 976 | 97400,1.0 977 | 97500,1.0 978 | 97600,1.0 979 | 97700,1.0 980 | 97800,1.0 981 | 97900,1.0 982 | 98000,1.0 983 | 98100,1.0 984 | 98200,1.0 985 | 98300,1.0 986 | 98400,1.0 987 | 98500,1.0 988 | 98600,1.0 989 | 98700,1.0 990 | 98800,1.0 991 | 98900,1.0 992 | 99000,1.0 993 | 99100,1.0 994 | 99200,1.0 995 | 99300,1.0 996 | 99400,1.0 997 | 99500,1.0 998 | 99600,1.0 999 | 99700,1.0 1000 | 99800,1.0 1001 | 99900,1.0 1002 | 100000,1.0 1003 | 100100,1.0 1004 | 100200,1.0 1005 | 100300,1.0 1006 | 100400,1.0 1007 | 100500,1.0 1008 | 100600,1.0 1009 | 100700,1.0 1010 | 100800,1.0 1011 | 100900,1.0 1012 | 101000,1.0 1013 | 101100,1.0 1014 | 101200,1.0 1015 | 101300,1.0 1016 | 101400,1.0 1017 | 101500,1.0 1018 | 101600,1.0 1019 | 101700,1.0 1020 | 101800,1.0 1021 | 101900,1.0 1022 | 102000,1.0 1023 | 102100,1.0 1024 | 102200,1.0 1025 | 102300,1.0 1026 | 102400,1.0 1027 | 102500,1.0 1028 | 102600,1.0 1029 | 102700,1.0 1030 | 102800,1.0 1031 | 102900,1.0 1032 | 103000,1.0 1033 | 103100,1.0 1034 | 103200,1.0 1035 | 103300,1.0 1036 | 103400,1.0 1037 | 103500,1.0 1038 | 103600,1.0 1039 | 103700,1.0 1040 | 103800,1.0 1041 | 103900,1.0 1042 | 104000,1.0 1043 | 104100,1.0 1044 | 104200,1.0 1045 | 104300,1.0 1046 | 104400,1.0 1047 | 104500,1.0 1048 | 104600,1.0 1049 | 104700,1.0 1050 | 104800,1.0 1051 | 104900,1.0 1052 | 105000,1.0 1053 | 105100,1.0 1054 | 105200,1.0 1055 | 105300,1.0 1056 | 105400,1.0 1057 | 105500,1.0 1058 | 105600,1.0 1059 | 105700,1.0 1060 | 105800,1.0 1061 | 105900,1.0 1062 | 106000,1.0 1063 | 106100,1.0 1064 | 106200,1.0 1065 | 106300,1.0 1066 | 106400,1.0 1067 | 106500,1.0 1068 | 106600,1.0 1069 | 106700,1.0 1070 | 106800,1.0 1071 | 106900,1.0 1072 | 107000,1.0 1073 | 107100,1.0 1074 | 107200,1.0 1075 | 107300,1.0 1076 | 107400,1.0 1077 | 107500,1.0 1078 | 107600,1.0 1079 | 107700,1.0 1080 | 107800,1.0 1081 | 107900,1.0 1082 | 108000,1.0 1083 | 108100,1.0 1084 | 108200,1.0 1085 | 108300,1.0 1086 | 108400,1.0 1087 | 108500,1.0 1088 | 108600,1.0 1089 | 108700,1.0 1090 | 108800,1.0 1091 | 108900,1.0 1092 | 109000,1.0 1093 | 109100,1.0 1094 | 109200,1.0 1095 | 109300,1.0 1096 | 109400,1.0 1097 | 109500,1.0 1098 | 109600,1.0 1099 | 109700,1.0 1100 | 109800,1.0 1101 | 109900,1.0 1102 | 110000,1.0 1103 | 110100,1.0 1104 | 110200,1.0 1105 | 110300,1.0 1106 | 110400,1.0 1107 | 110500,1.0 1108 | 110600,1.0 1109 | 110700,1.0 1110 | 110800,1.0 1111 | 110900,1.0 1112 | 111000,1.0 1113 | 111100,1.0 1114 | 111200,1.0 1115 | 111300,1.0 1116 | 111400,1.0 1117 | 111500,1.0 1118 | 111600,1.0 1119 | 111700,1.0 1120 | 111800,1.0 1121 | 111900,1.0 1122 | 112000,1.0 1123 | 112100,1.0 1124 | 112200,1.0 1125 | 112300,1.0 1126 | 112400,1.0 1127 | 112500,1.0 1128 | 112600,1.0 1129 | 112700,1.0 1130 | 112800,1.0 1131 | 112900,1.0 1132 | 113000,1.0 1133 | 113100,1.0 1134 | 113200,1.0 1135 | 113300,1.0 1136 | 113400,1.0 1137 | 113500,1.0 1138 | 113600,1.0 1139 | 113700,1.0 1140 | 113800,1.0 1141 | 113900,1.0 1142 | 114000,1.0 1143 | 114100,1.0 1144 | 114200,1.0 1145 | 114300,1.0 1146 | 114400,1.0 1147 | 114500,1.0 1148 | 114600,1.0 1149 | 114700,1.0 1150 | 114800,1.0 1151 | 114900,1.0 1152 | 115000,1.0 1153 | 115100,1.0 1154 | 115200,1.0 1155 | 115300,1.0 1156 | 115400,1.0 1157 | 115500,1.0 1158 | 115600,1.0 1159 | 115700,1.0 1160 | 115800,1.0 1161 | 115900,1.0 1162 | 116000,1.0 1163 | 116100,1.0 1164 | 116200,1.0 1165 | 116300,1.0 1166 | 116400,1.0 1167 | 116500,1.0 1168 | 116600,1.0 1169 | 116700,1.0 1170 | 116800,1.0 1171 | 116900,1.0 1172 | 117000,1.0 1173 | 117100,1.0 1174 | 117200,1.0 1175 | 117300,1.0 1176 | 117400,1.0 1177 | 117500,1.0 1178 | 117600,1.0 1179 | 117700,1.0 1180 | 117800,1.0 1181 | 117900,1.0 1182 | 118000,1.0 1183 | 118100,1.0 1184 | 118200,1.0 1185 | 118300,1.0 1186 | 118400,1.0 1187 | 118500,1.0 1188 | 118600,1.0 1189 | 118700,1.0 1190 | 118800,1.0 1191 | 118900,1.0 1192 | 119000,1.0 1193 | 119100,1.0 1194 | 119200,1.0 1195 | 119300,1.0 1196 | 119400,1.0 1197 | 119500,1.0 1198 | 119600,1.0 1199 | 119700,1.0 1200 | 119800,1.0 1201 | 119900,1.0 1202 | 120000,1.0 1203 | 120100,1.0 1204 | 120200,1.0 1205 | 120300,1.0 1206 | 120400,1.0 1207 | 120500,1.0 1208 | 120600,1.0 1209 | 120700,1.0 1210 | 120800,1.0 1211 | 120900,1.0 1212 | 121000,1.0 1213 | 121100,1.0 1214 | 121200,1.0 1215 | 121300,1.0 1216 | 121400,1.0 1217 | 121500,1.0 1218 | 121600,1.0 1219 | 121700,1.0 1220 | 121800,1.0 1221 | 121900,1.0 1222 | 122000,1.0 1223 | 122100,1.0 1224 | 122200,1.0 1225 | 122300,1.0 1226 | 122400,1.0 1227 | 122500,1.0 1228 | 122600,1.0 1229 | 122700,1.0 1230 | 122800,1.0 1231 | 122900,1.0 1232 | 123000,1.0 1233 | 123100,1.0 1234 | 123200,1.0 1235 | 123300,1.0 1236 | 123400,1.0 1237 | 123500,1.0 1238 | 123600,1.0 1239 | 123700,1.0 1240 | 123800,1.0 1241 | 123900,1.0 1242 | 124000,1.0 1243 | 124100,1.0 1244 | 124200,1.0 1245 | 124300,1.0 1246 | 124400,1.0 1247 | 124500,1.0 1248 | 124600,1.0 1249 | 124700,1.0 1250 | 124800,1.0 1251 | 124900,1.0 1252 | 125000,1.0 1253 | 125100,1.0 1254 | 125200,1.0 1255 | 125300,1.0 1256 | 125400,1.0 1257 | 125500,1.0 1258 | 125600,1.0 1259 | 125700,1.0 1260 | 125800,1.0 1261 | 125900,1.0 1262 | 126000,1.0 1263 | 126100,1.0 1264 | 126200,1.0 1265 | 126300,1.0 1266 | 126400,1.0 1267 | 126500,1.0 1268 | 126600,1.0 1269 | 126700,1.0 1270 | 126800,1.0 1271 | 126900,1.0 1272 | 127000,1.0 1273 | 127100,1.0 1274 | 127200,1.0 1275 | 127300,1.0 1276 | 127400,1.0 1277 | 127500,1.0 1278 | 127600,1.0 1279 | 127700,1.0 1280 | 127800,1.0 1281 | 127900,1.0 1282 | 128000,1.0 1283 | 128100,1.0 1284 | 128200,1.0 1285 | 128300,1.0 1286 | 128400,1.0 1287 | 128500,1.0 1288 | 128600,1.0 1289 | 128700,1.0 1290 | 128800,1.0 1291 | 128900,1.0 1292 | 129000,1.0 1293 | 129100,1.0 1294 | 129200,1.0 1295 | 129300,1.0 1296 | 129400,1.0 1297 | 129500,1.0 1298 | 129600,1.0 1299 | 129700,1.0 1300 | 129800,1.0 1301 | 129900,1.0 1302 | 130000,1.0 1303 | 130100,1.0 1304 | 130200,1.0 1305 | 130300,1.0 1306 | 130400,1.0 1307 | 130500,1.0 1308 | 130600,1.0 1309 | 130700,1.0 1310 | 130800,1.0 1311 | 130900,1.0 1312 | 131000,1.0 1313 | 131100,1.0 1314 | 131200,1.0 1315 | 131300,1.0 1316 | 131400,1.0 1317 | 131500,1.0 1318 | 131600,1.0 1319 | 131700,1.0 1320 | 131800,1.0 1321 | 131900,1.0 1322 | 132000,1.0 1323 | 132100,1.0 1324 | 132200,1.0 1325 | 132300,1.0 1326 | 132400,1.0 1327 | 132500,1.0 1328 | 132600,1.0 1329 | 132700,1.0 1330 | 132800,1.0 1331 | 132900,1.0 1332 | 133000,1.0 1333 | 133100,1.0 1334 | 133200,1.0 1335 | 133300,1.0 1336 | 133400,1.0 1337 | 133500,1.0 1338 | 133600,1.0 1339 | 133700,1.0 1340 | 133800,1.0 1341 | 133900,1.0 1342 | 134000,1.0 1343 | 134100,1.0 1344 | 134200,1.0 1345 | 134300,1.0 1346 | 134400,1.0 1347 | 134500,1.0 1348 | 134600,1.0 1349 | 134700,1.0 1350 | 134800,1.0 1351 | 134900,1.0 1352 | 135000,1.0 1353 | 135100,1.0 1354 | 135200,1.0 1355 | 135300,1.0 1356 | 135400,1.0 1357 | 135500,1.0 1358 | 135600,1.0 1359 | 135700,1.0 1360 | 135800,1.0 1361 | 135900,1.0 1362 | 136000,1.0 1363 | 136100,1.0 1364 | 136200,1.0 1365 | 136300,1.0 1366 | 136400,1.0 1367 | 136500,1.0 1368 | 136600,1.0 1369 | 136700,1.0 1370 | 136800,1.0 1371 | 136900,1.0 1372 | 137000,1.0 1373 | 137100,1.0 1374 | 137200,1.0 1375 | 137300,1.0 1376 | 137400,1.0 1377 | 137500,1.0 1378 | 137600,1.0 1379 | 137700,1.0 1380 | 137800,1.0 1381 | 137900,1.0 1382 | 138000,1.0 1383 | 138100,1.0 1384 | 138200,1.0 1385 | 138300,1.0 1386 | 138400,1.0 1387 | 138500,1.0 1388 | 138600,1.0 1389 | 138700,1.0 1390 | 138800,1.0 1391 | 138900,1.0 1392 | 139000,1.0 1393 | 139100,1.0 1394 | 139200,1.0 1395 | 139300,1.0 1396 | 139400,1.0 1397 | 139500,1.0 1398 | 139600,1.0 1399 | 139700,1.0 1400 | 139800,1.0 1401 | 139900,1.0 1402 | 140000,1.0 1403 | 140100,1.0 1404 | 140200,1.0 1405 | 140300,1.0 1406 | 140400,1.0 1407 | 140500,1.0 1408 | 140600,1.0 1409 | 140700,1.0 1410 | 140800,1.0 1411 | 140900,1.0 1412 | 141000,1.0 1413 | 141100,1.0 1414 | 141200,1.0 1415 | 141300,1.0 1416 | 141400,1.0 1417 | 141500,1.0 1418 | 141600,1.0 1419 | 141700,1.0 1420 | 141800,1.0 1421 | 141900,1.0 1422 | 142000,1.0 1423 | 142100,1.0 1424 | 142200,1.0 1425 | 142300,1.0 1426 | 142400,1.0 1427 | 142500,1.0 1428 | 142600,1.0 1429 | 142700,1.0 1430 | 142800,1.0 1431 | 142900,1.0 1432 | 143000,1.0 1433 | 143100,1.0 1434 | 143200,1.0 1435 | 143300,1.0 1436 | 143400,1.0 1437 | 143500,1.0 1438 | 143600,1.0 1439 | 143700,1.0 1440 | 143800,1.0 1441 | 143900,1.0 1442 | 144000,1.0 1443 | 144100,1.0 1444 | 144200,1.0 1445 | 144300,1.0 1446 | 144400,1.0 1447 | 144500,1.0 1448 | 144600,1.0 1449 | 144700,1.0 1450 | 144800,1.0 1451 | 144900,1.0 1452 | 145000,1.0 1453 | 145100,1.0 1454 | 145200,1.0 1455 | 145300,1.0 1456 | 145400,1.0 1457 | 145500,1.0 1458 | 145600,1.0 1459 | 145700,1.0 1460 | 145800,1.0 1461 | 145900,1.0 1462 | 146000,1.0 1463 | 146100,1.0 1464 | 146200,1.0 1465 | 146300,1.0 1466 | 146400,1.0 1467 | 146500,1.0 1468 | 146600,1.0 1469 | 146700,1.0 1470 | 146800,1.0 1471 | 146900,1.0 1472 | 147000,1.0 1473 | 147100,1.0 1474 | 147200,1.0 1475 | 147300,1.0 1476 | 147400,1.0 1477 | 147500,1.0 1478 | 147600,1.0 1479 | 147700,1.0 1480 | 147800,1.0 1481 | 147900,1.0 1482 | 148000,1.0 1483 | 148100,1.0 1484 | 148200,1.0 1485 | 148300,1.0 1486 | 148400,1.0 1487 | 148500,1.0 1488 | 148600,1.0 1489 | 148700,1.0 1490 | 148800,1.0 1491 | 148900,1.0 1492 | 149000,1.0 1493 | 149100,1.0 1494 | 149200,1.0 1495 | 149300,1.0 1496 | 149400,1.0 1497 | 149500,1.0 1498 | 149600,1.0 1499 | 149700,1.0 1500 | 149800,1.0 1501 | 149900,1.0 1502 | 150000,1.0 1503 | 150100,1.0 1504 | 150200,1.0 1505 | 150300,1.0 1506 | 150400,1.0 1507 | 150500,1.0 1508 | 150600,1.0 1509 | 150700,1.0 1510 | 150800,1.0 1511 | 150900,1.0 1512 | 151000,1.0 1513 | 151100,1.0 1514 | 151200,1.0 1515 | 151300,1.0 1516 | 151400,1.0 1517 | 151500,1.0 1518 | 151600,1.0 1519 | 151700,1.0 1520 | 151800,1.0 1521 | 151900,1.0 1522 | 152000,1.0 1523 | 152100,1.0 1524 | 152200,1.0 1525 | 152300,1.0 1526 | 152400,1.0 1527 | 152500,1.0 1528 | 152600,1.0 1529 | 152700,1.0 1530 | 152800,1.0 1531 | 152900,1.0 1532 | 153000,1.0 1533 | 153100,1.0 1534 | 153200,1.0 1535 | 153300,1.0 1536 | 153400,1.0 1537 | 153500,1.0 1538 | 153600,1.0 1539 | 153700,1.0 1540 | 153800,1.0 1541 | 153900,1.0 1542 | 154000,1.0 1543 | 154100,1.0 1544 | 154200,1.0 1545 | 154300,1.0 1546 | 154400,1.0 1547 | 154500,1.0 1548 | 154600,1.0 1549 | 154700,1.0 1550 | 154800,1.0 1551 | 154900,1.0 1552 | 155000,1.0 1553 | 155100,1.0 1554 | 155200,1.0 1555 | 155300,1.0 1556 | 155400,1.0 1557 | 155500,1.0 1558 | 155600,1.0 1559 | 155700,1.0 1560 | 155800,1.0 1561 | 155900,1.0 1562 | 156000,1.0 1563 | 156100,1.0 1564 | 156200,1.0 1565 | 156300,1.0 1566 | 156400,1.0 1567 | 156500,1.0 1568 | 156600,1.0 1569 | 156700,1.0 1570 | 156800,1.0 1571 | 156900,1.0 1572 | 157000,1.0 1573 | 157100,1.0 1574 | 157200,1.0 1575 | 157300,1.0 1576 | 157400,1.0 1577 | 157500,1.0 1578 | 157600,1.0 1579 | 157700,1.0 1580 | 157800,1.0 1581 | 157900,1.0 1582 | 158000,1.0 1583 | 158100,1.0 1584 | 158200,1.0 1585 | 158300,1.0 1586 | 158400,1.0 1587 | 158500,1.0 1588 | 158600,1.0 1589 | 158700,1.0 1590 | 158800,1.0 1591 | 158900,1.0 1592 | 159000,1.0 1593 | 159100,1.0 1594 | 159200,1.0 1595 | 159300,1.0 1596 | 159400,1.0 1597 | 159500,1.0 1598 | 159600,1.0 1599 | 159700,1.0 1600 | 159800,1.0 1601 | 159900,1.0 1602 | 160000,1.0 1603 | 160100,1.0 1604 | 160200,1.0 1605 | 160300,1.0 1606 | 160400,1.0 1607 | 160500,1.0 1608 | 160600,1.0 1609 | 160700,1.0 1610 | 160800,1.0 1611 | 160900,1.0 1612 | 161000,1.0 1613 | 161100,1.0 1614 | 161200,1.0 1615 | 161300,1.0 1616 | 161400,1.0 1617 | 161500,1.0 1618 | 161600,1.0 1619 | 161700,1.0 1620 | 161800,1.0 1621 | 161900,1.0 1622 | 162000,1.0 1623 | 162100,1.0 1624 | 162200,1.0 1625 | 162300,1.0 1626 | 162400,1.0 1627 | 162500,1.0 1628 | 162600,1.0 1629 | 162700,1.0 1630 | 162800,1.0 1631 | 162900,1.0 1632 | 163000,1.0 1633 | 163100,1.0 1634 | 163200,1.0 1635 | 163300,1.0 1636 | 163400,1.0 1637 | 163500,1.0 1638 | 163600,1.0 1639 | 163700,1.0 1640 | 163800,1.0 1641 | 163900,1.0 1642 | 164000,1.0 1643 | 164100,1.0 1644 | 164200,1.0 1645 | 164300,1.0 1646 | 164400,1.0 1647 | 164500,1.0 1648 | 164600,1.0 1649 | 164700,1.0 1650 | 164800,1.0 1651 | 164900,1.0 1652 | 165000,1.0 1653 | 165100,1.0 1654 | 165200,1.0 1655 | 165300,1.0 1656 | 165400,1.0 1657 | 165500,1.0 1658 | 165600,1.0 1659 | 165700,1.0 1660 | 165800,1.0 1661 | 165900,1.0 1662 | 166000,1.0 1663 | 166100,1.0 1664 | 166200,1.0 1665 | 166300,1.0 1666 | 166400,1.0 1667 | 166500,1.0 1668 | 166600,1.0 1669 | 166700,1.0 1670 | 166800,1.0 1671 | 166900,1.0 1672 | 167000,1.0 1673 | 167100,1.0 1674 | 167200,1.0 1675 | 167300,1.0 1676 | 167400,1.0 1677 | 167500,1.0 1678 | 167600,1.0 1679 | 167700,1.0 1680 | 167800,1.0 1681 | 167900,1.0 1682 | 168000,1.0 1683 | 168100,1.0 1684 | 168200,1.0 1685 | 168300,1.0 1686 | 168400,1.0 1687 | 168500,1.0 1688 | 168600,1.0 1689 | 168700,1.0 1690 | 168800,1.0 1691 | 168900,1.0 1692 | 169000,1.0 1693 | 169100,1.0 1694 | 169200,1.0 1695 | 169300,1.0 1696 | 169400,1.0 1697 | 169500,1.0 1698 | 169600,1.0 1699 | 169700,1.0 1700 | 169800,1.0 1701 | 169900,1.0 1702 | 170000,1.0 1703 | 170100,1.0 1704 | 170200,1.0 1705 | 170300,1.0 1706 | 170400,1.0 1707 | 170500,1.0 1708 | 170600,1.0 1709 | 170700,1.0 1710 | 170800,1.0 1711 | 170900,1.0 1712 | 171000,1.0 1713 | 171100,1.0 1714 | 171200,1.0 1715 | 171300,1.0 1716 | 171400,1.0 1717 | 171500,1.0 1718 | 171600,1.0 1719 | 171700,1.0 1720 | 171800,1.0 1721 | 171900,1.0 1722 | 172000,1.0 1723 | 172100,1.0 1724 | 172200,1.0 1725 | 172300,1.0 1726 | 172400,1.0 1727 | 172500,1.0 1728 | 172600,1.0 1729 | 172700,1.0 1730 | 172800,1.0 1731 | 172900,1.0 1732 | 173000,1.0 1733 | 173100,1.0 1734 | 173200,1.0 1735 | 173300,1.0 1736 | 173400,1.0 1737 | 173500,1.0 1738 | 173600,1.0 1739 | 173700,1.0 1740 | 173800,1.0 1741 | 173900,1.0 1742 | 174000,1.0 1743 | 174100,1.0 1744 | 174200,1.0 1745 | 174300,1.0 1746 | 174400,1.0 1747 | 174500,1.0 1748 | 174600,1.0 1749 | 174700,1.0 1750 | 174800,1.0 1751 | 174900,1.0 1752 | 175000,1.0 1753 | 175100,1.0 1754 | 175200,1.0 1755 | 175300,1.0 1756 | 175400,1.0 1757 | 175500,1.0 1758 | 175600,1.0 1759 | 175700,1.0 1760 | 175800,1.0 1761 | 175900,1.0 1762 | 176000,1.0 1763 | 176100,1.0 1764 | 176200,1.0 1765 | 176300,1.0 1766 | 176400,1.0 1767 | 176500,1.0 1768 | 176600,1.0 1769 | 176700,1.0 1770 | 176800,1.0 1771 | 176900,1.0 1772 | 177000,1.0 1773 | 177100,1.0 1774 | 177200,1.0 1775 | 177300,1.0 1776 | 177400,1.0 1777 | 177500,1.0 1778 | 177600,1.0 1779 | 177700,1.0 1780 | 177800,1.0 1781 | 177900,1.0 1782 | 178000,1.0 1783 | 178100,1.0 1784 | 178200,1.0 1785 | 178300,1.0 1786 | 178400,1.0 1787 | 178500,1.0 1788 | 178600,1.0 1789 | 178700,1.0 1790 | 178800,1.0 1791 | 178900,1.0 1792 | 179000,1.0 1793 | 179100,1.0 1794 | 179200,1.0 1795 | 179300,1.0 1796 | 179400,1.0 1797 | 179500,1.0 1798 | 179600,1.0 1799 | 179700,1.0 1800 | 179800,1.0 1801 | 179900,1.0 1802 | 180000,1.0 1803 | 180100,1.0 1804 | 180200,1.0 1805 | 180300,1.0 1806 | 180400,1.0 1807 | 180500,1.0 1808 | 180600,1.0 1809 | 180700,1.0 1810 | 180800,1.0 1811 | 180900,1.0 1812 | 181000,1.0 1813 | 181100,1.0 1814 | 181200,1.0 1815 | 181300,1.0 1816 | 181400,1.0 1817 | 181500,1.0 1818 | 181600,1.0 1819 | 181700,1.0 1820 | 181800,1.0 1821 | 181900,1.0 1822 | 182000,1.0 1823 | 182100,1.0 1824 | 182200,1.0 1825 | 182300,1.0 1826 | 182400,1.0 1827 | 182500,1.0 1828 | 182600,1.0 1829 | 182700,1.0 1830 | 182800,1.0 1831 | 182900,1.0 1832 | 183000,1.0 1833 | 183100,1.0 1834 | 183200,1.0 1835 | 183300,1.0 1836 | 183400,1.0 1837 | 183500,1.0 1838 | 183600,1.0 1839 | 183700,1.0 1840 | 183800,1.0 1841 | 183900,1.0 1842 | 184000,1.0 1843 | 184100,1.0 1844 | 184200,1.0 1845 | 184300,1.0 1846 | 184400,1.0 1847 | 184500,1.0 1848 | 184600,1.0 1849 | 184700,1.0 1850 | 184800,1.0 1851 | 184900,1.0 1852 | 185000,1.0 1853 | 185100,1.0 1854 | 185200,1.0 1855 | 185300,1.0 1856 | 185400,1.0 1857 | 185500,1.0 1858 | 185600,1.0 1859 | 185700,1.0 1860 | 185800,1.0 1861 | 185900,1.0 1862 | 186000,1.0 1863 | 186100,1.0 1864 | 186200,1.0 1865 | 186300,1.0 1866 | 186400,1.0 1867 | 186500,1.0 1868 | 186600,1.0 1869 | 186700,1.0 1870 | 186800,1.0 1871 | 186900,1.0 1872 | 187000,1.0 1873 | 187100,1.0 1874 | 187200,1.0 1875 | 187300,1.0 1876 | 187400,1.0 1877 | 187500,1.0 1878 | 187600,1.0 1879 | 187700,1.0 1880 | 187800,1.0 1881 | 187900,1.0 1882 | 188000,1.0 1883 | 188100,1.0 1884 | 188200,1.0 1885 | 188300,1.0 1886 | 188400,1.0 1887 | 188500,1.0 1888 | 188600,1.0 1889 | 188700,1.0 1890 | 188800,1.0 1891 | 188900,1.0 1892 | 189000,1.0 1893 | 189100,1.0 1894 | 189200,1.0 1895 | 189300,1.0 1896 | 189400,1.0 1897 | 189500,1.0 1898 | 189600,1.0 1899 | 189700,1.0 1900 | 189800,1.0 1901 | 189900,1.0 1902 | 190000,1.0 1903 | 190100,1.0 1904 | 190200,1.0 1905 | 190300,1.0 1906 | 190400,1.0 1907 | 190500,1.0 1908 | 190600,1.0 1909 | 190700,1.0 1910 | 190800,1.0 1911 | 190900,1.0 1912 | 191000,1.0 1913 | 191100,1.0 1914 | 191200,1.0 1915 | 191300,1.0 1916 | 191400,1.0 1917 | 191500,1.0 1918 | 191600,1.0 1919 | 191700,1.0 1920 | 191800,1.0 1921 | 191900,1.0 1922 | 192000,1.0 1923 | 192100,1.0 1924 | 192200,1.0 1925 | 192300,1.0 1926 | 192400,1.0 1927 | 192500,1.0 1928 | 192600,1.0 1929 | 192700,1.0 1930 | 192800,1.0 1931 | 192900,1.0 1932 | 193000,1.0 1933 | 193100,1.0 1934 | 193200,1.0 1935 | 193300,1.0 1936 | 193400,1.0 1937 | 193500,1.0 1938 | 193600,1.0 1939 | 193700,1.0 1940 | 193800,1.0 1941 | 193900,1.0 1942 | 194000,1.0 1943 | 194100,1.0 1944 | 194200,1.0 1945 | 194300,1.0 1946 | 194400,1.0 1947 | 194500,1.0 1948 | 194600,1.0 1949 | 194700,1.0 1950 | 194800,1.0 1951 | 194900,1.0 1952 | 195000,1.0 1953 | 195100,1.0 1954 | 195200,1.0 1955 | 195300,1.0 1956 | 195400,1.0 1957 | 195500,1.0 1958 | 195600,1.0 1959 | 195700,1.0 1960 | 195800,1.0 1961 | 195900,1.0 1962 | 196000,1.0 1963 | 196100,1.0 1964 | 196200,1.0 1965 | 196300,1.0 1966 | 196400,1.0 1967 | 196500,1.0 1968 | 196600,1.0 1969 | 196700,1.0 1970 | 196800,1.0 1971 | 196900,1.0 1972 | 197000,1.0 1973 | 197100,1.0 1974 | 197200,1.0 1975 | 197300,1.0 1976 | 197400,1.0 1977 | 197500,1.0 1978 | 197600,1.0 1979 | 197700,1.0 1980 | 197800,1.0 1981 | 197900,1.0 1982 | 198000,1.0 1983 | 198100,1.0 1984 | 198200,1.0 1985 | 198300,1.0 1986 | 198400,1.0 1987 | 198500,1.0 1988 | 198600,1.0 1989 | 198700,1.0 1990 | 198800,1.0 1991 | 198900,1.0 1992 | 199000,1.0 1993 | 199100,1.0 1994 | 199200,1.0 1995 | 199300,1.0 1996 | 199400,1.0 1997 | 199500,1.0 1998 | 199600,1.0 1999 | 199700,1.0 2000 | 199800,1.0 2001 | 199900,1.0 2002 | 200000,1.0 2003 | 200100,1.0 2004 | 200200,1.0 2005 | 200300,1.0 2006 | 200400,1.0 2007 | 200500,1.0 2008 | 200600,1.0 2009 | 200700,1.0 2010 | 200800,1.0 2011 | 200900,1.0 2012 | 201000,1.0 2013 | 201100,1.0 2014 | 201200,1.0 2015 | 201300,1.0 2016 | 201400,1.0 2017 | 201500,1.0 2018 | 201600,1.0 2019 | 201700,1.0 2020 | 201800,1.0 2021 | 201900,1.0 2022 | 202000,1.0 2023 | 202100,1.0 2024 | 202200,1.0 2025 | 202300,1.0 2026 | 202400,1.0 2027 | 202500,1.0 2028 | 202600,1.0 2029 | 202700,1.0 2030 | 202800,1.0 2031 | 202900,1.0 2032 | 203000,1.0 2033 | 203100,1.0 2034 | 203200,1.0 2035 | 203300,1.0 2036 | 203400,1.0 2037 | 203500,1.0 2038 | 203600,1.0 2039 | 203700,1.0 2040 | 203800,1.0 2041 | 203900,1.0 2042 | 204000,1.0 2043 | 204100,1.0 2044 | 204200,1.0 2045 | 204300,1.0 2046 | 204400,1.0 2047 | 204500,1.0 2048 | 204600,1.0 2049 | 204700,1.0 2050 | 204800,1.0 2051 | 204900,1.0 2052 | 205000,1.0 2053 | 205100,1.0 2054 | 205200,1.0 2055 | 205300,1.0 2056 | 205400,1.0 2057 | 205500,1.0 2058 | 205600,1.0 2059 | 205700,1.0 2060 | 205800,1.0 2061 | 205900,1.0 2062 | 206000,1.0 2063 | 206100,1.0 2064 | 206200,1.0 2065 | 206300,1.0 2066 | 206400,1.0 2067 | 206500,1.0 2068 | 206600,1.0 2069 | 206700,1.0 2070 | 206800,1.0 2071 | 206900,1.0 2072 | 207000,1.0 2073 | 207100,1.0 2074 | 207200,1.0 2075 | 207300,1.0 2076 | 207400,1.0 2077 | 207500,1.0 2078 | 207600,1.0 2079 | 207700,1.0 2080 | 207800,1.0 2081 | 207900,1.0 2082 | 208000,1.0 2083 | 208100,1.0 2084 | 208200,1.0 2085 | 208300,1.0 2086 | 208400,1.0 2087 | 208500,1.0 2088 | 208600,1.0 2089 | 208700,1.0 2090 | 208800,1.0 2091 | 208900,1.0 2092 | 209000,1.0 2093 | 209100,1.0 2094 | 209200,1.0 2095 | 209300,1.0 2096 | 209400,1.0 2097 | 209500,1.0 2098 | 209600,1.0 2099 | 209700,1.0 2100 | 209800,1.0 2101 | 209900,1.0 2102 | 210000,1.0 2103 | 210100,1.0 2104 | 210200,1.0 2105 | 210300,1.0 2106 | 210400,1.0 2107 | 210500,1.0 2108 | 210600,1.0 2109 | 210700,1.0 2110 | 210800,1.0 2111 | 210900,1.0 2112 | 211000,1.0 2113 | 211100,1.0 2114 | 211200,1.0 2115 | 211300,1.0 2116 | 211400,1.0 2117 | 211500,1.0 2118 | 211600,1.0 2119 | 211700,1.0 2120 | 211800,1.0 2121 | 211900,1.0 2122 | 212000,1.0 2123 | 212100,1.0 2124 | 212200,1.0 2125 | 212300,1.0 2126 | 212400,1.0 2127 | 212500,1.0 2128 | 212600,1.0 2129 | 212700,1.0 2130 | 212800,1.0 2131 | 212900,1.0 2132 | 213000,1.0 2133 | 213100,1.0 2134 | 213200,1.0 2135 | 213300,1.0 2136 | 213400,1.0 2137 | 213500,1.0 2138 | 213600,1.0 2139 | 213700,1.0 2140 | 213800,1.0 2141 | 213900,1.0 2142 | 214000,1.0 2143 | 214100,1.0 2144 | 214200,1.0 2145 | 214300,1.0 2146 | 214400,1.0 2147 | -------------------------------------------------------------------------------- /results/mnist/1_iter_val_acc.csv: -------------------------------------------------------------------------------- 1 | step,val_acc 2 | 0,0.0961538461538 3 | 500,0.989583333333 4 | 1000,0.994190705128 5 | 1500,0.995392628205 6 | 2000,0.994391025641 7 | 2500,0.995192307692 8 | 3000,0.995192307692 9 | 3500,0.994791666667 10 | 4000,0.995793269231 11 | 4500,0.995392628205 12 | 5000,0.995993589744 13 | 5500,0.994991987179 14 | 6000,0.995592948718 15 | 6500,0.994991987179 16 | 7000,0.995192307692 17 | 7500,0.995592948718 18 | 8000,0.995592948718 19 | 8500,0.995392628205 20 | 9000,0.995793269231 21 | 9500,0.994991987179 22 | 10000,0.996193910256 23 | 10500,0.995793269231 24 | 11000,0.995592948718 25 | 11500,0.996394230769 26 | 12000,0.995592948718 27 | 12500,0.995793269231 28 | 13000,0.994991987179 29 | 13500,0.995793269231 30 | 14000,0.995793269231 31 | 14500,0.995592948718 32 | 15000,0.994791666667 33 | 15500,0.995192307692 34 | 16000,0.995993589744 35 | 16500,0.995793269231 36 | 17000,0.995392628205 37 | 17500,0.995592948718 38 | 18000,0.995993589744 39 | 18500,0.995392628205 40 | 19000,0.995793269231 41 | 19500,0.995392628205 42 | 20000,0.996193910256 43 | 20500,0.995392628205 44 | 21000,0.995392628205 45 | 21500,0.995793269231 46 | 22000,0.995793269231 47 | 22500,0.995592948718 48 | 23000,0.995392628205 49 | 23500,0.995793269231 50 | 24000,0.995392628205 51 | 24500,0.995592948718 52 | 25000,0.994991987179 53 | 25500,0.995192307692 54 | 26000,0.994591346154 55 | 26500,0.995592948718 56 | 27000,0.994991987179 57 | 27500,0.995592948718 58 | 28000,0.994991987179 59 | 28500,0.994791666667 60 | 29000,0.995392628205 61 | 29500,0.995793269231 62 | 30000,0.994591346154 63 | 30500,0.995592948718 64 | 31000,0.995192307692 65 | 31500,0.995793269231 66 | 32000,0.994791666667 67 | 32500,0.994991987179 68 | 33000,0.995592948718 69 | 33500,0.994791666667 70 | 34000,0.995192307692 71 | 34500,0.995392628205 72 | 35000,0.994591346154 73 | 35500,0.995192307692 74 | 36000,0.995192307692 75 | 36500,0.995192307692 76 | 37000,0.995192307692 77 | 37500,0.995392628205 78 | 38000,0.995192307692 79 | 38500,0.994391025641 80 | 39000,0.995392628205 81 | 39500,0.994791666667 82 | 40000,0.994791666667 83 | 40500,0.994791666667 84 | 41000,0.994991987179 85 | 41500,0.995392628205 86 | 42000,0.994190705128 87 | 42500,0.995192307692 88 | 43000,0.994991987179 89 | 43500,0.995392628205 90 | 44000,0.994791666667 91 | 44500,0.995392628205 92 | 45000,0.995592948718 93 | 45500,0.995392628205 94 | 46000,0.995392628205 95 | 46500,0.995192307692 96 | 47000,0.995592948718 97 | 47500,0.995392628205 98 | 48000,0.994190705128 99 | 48500,0.995392628205 100 | 49000,0.995392628205 101 | 49500,0.995192307692 102 | 50000,0.994991987179 103 | 50500,0.994791666667 104 | 51000,0.994591346154 105 | 51500,0.994791666667 106 | 52000,0.994791666667 107 | 52500,0.994791666667 108 | 53000,0.994591346154 109 | 53500,0.994791666667 110 | 54000,0.995392628205 111 | 54500,0.995793269231 112 | 55000,0.995592948718 113 | 55500,0.995192307692 114 | 56000,0.994991987179 115 | 56500,0.994991987179 116 | 57000,0.994591346154 117 | 57500,0.994991987179 118 | 58000,0.995192307692 119 | 58500,0.995392628205 120 | 59000,0.994791666667 121 | 59500,0.995192307692 122 | 60000,0.995392628205 123 | 60500,0.994391025641 124 | 61000,0.995592948718 125 | 61500,0.994791666667 126 | 62000,0.994991987179 127 | 62500,0.995392628205 128 | 63000,0.995192307692 129 | 63500,0.994791666667 130 | 64000,0.994791666667 131 | 64500,0.995392628205 132 | 65000,0.994791666667 133 | 65500,0.995192307692 134 | 66000,0.994791666667 135 | 66500,0.994791666667 136 | 67000,0.994991987179 137 | 67500,0.994991987179 138 | 68000,0.994591346154 139 | 68500,0.995192307692 140 | 69000,0.994791666667 141 | 69500,0.994991987179 142 | 70000,0.995793269231 143 | 70500,0.994991987179 144 | 71000,0.995192307692 145 | 71500,0.994591346154 146 | 72000,0.994791666667 147 | 72500,0.994991987179 148 | 73000,0.995192307692 149 | 73500,0.995192307692 150 | 74000,0.994791666667 151 | 74500,0.994791666667 152 | 75000,0.995392628205 153 | 75500,0.995592948718 154 | 76000,0.995592948718 155 | 76500,0.995192307692 156 | 77000,0.994791666667 157 | 77500,0.995192307692 158 | 78000,0.994991987179 159 | 78500,0.994991987179 160 | 79000,0.995192307692 161 | 79500,0.995592948718 162 | 80000,0.995392628205 163 | 80500,0.995592948718 164 | 81000,0.994991987179 165 | 81500,0.994391025641 166 | 82000,0.994991987179 167 | 82500,0.995392628205 168 | 83000,0.994591346154 169 | 83500,0.995192307692 170 | 84000,0.994991987179 171 | 84500,0.994991987179 172 | 85000,0.994791666667 173 | 85500,0.994991987179 174 | 86000,0.995392628205 175 | 86500,0.995192307692 176 | 87000,0.995192307692 177 | 87500,0.995392628205 178 | 88000,0.995192307692 179 | 88500,0.994991987179 180 | 89000,0.994791666667 181 | 89500,0.995192307692 182 | 90000,0.995192307692 183 | 90500,0.994791666667 184 | 91000,0.995392628205 185 | 91500,0.994391025641 186 | 92000,0.994791666667 187 | 92500,0.994991987179 188 | 93000,0.994991987179 189 | 93500,0.994991987179 190 | 94000,0.995392628205 191 | 94500,0.994991987179 192 | 95000,0.995192307692 193 | 95500,0.995392628205 194 | 96000,0.995392628205 195 | 96500,0.994591346154 196 | 97000,0.994791666667 197 | 97500,0.995192307692 198 | 98000,0.995192307692 199 | 98500,0.994991987179 200 | 99000,0.995192307692 201 | 99500,0.994791666667 202 | 100000,0.994190705128 203 | 100500,0.994791666667 204 | 101000,0.994991987179 205 | 101500,0.994991987179 206 | 102000,0.994791666667 207 | 102500,0.994991987179 208 | 103000,0.994791666667 209 | 103500,0.995192307692 210 | 104000,0.995392628205 211 | 104500,0.994791666667 212 | 105000,0.994791666667 213 | 105500,0.995192307692 214 | 106000,0.995392628205 215 | 106500,0.994991987179 216 | 107000,0.994791666667 217 | 107500,0.994791666667 218 | 108000,0.995192307692 219 | 108500,0.995592948718 220 | 109000,0.994991987179 221 | 109500,0.995192307692 222 | 110000,0.995392628205 223 | 110500,0.995392628205 224 | 111000,0.995192307692 225 | 111500,0.995392628205 226 | 112000,0.995592948718 227 | 112500,0.995192307692 228 | 113000,0.994991987179 229 | 113500,0.994991987179 230 | 114000,0.994991987179 231 | 114500,0.995392628205 232 | 115000,0.994591346154 233 | 115500,0.994991987179 234 | 116000,0.995192307692 235 | 116500,0.995392628205 236 | 117000,0.995392628205 237 | 117500,0.995192307692 238 | 118000,0.995192307692 239 | 118500,0.994791666667 240 | 119000,0.994991987179 241 | 119500,0.994991987179 242 | 120000,0.994991987179 243 | 120500,0.993990384615 244 | 121000,0.994991987179 245 | 121500,0.995392628205 246 | 122000,0.994791666667 247 | 122500,0.995192307692 248 | 123000,0.994591346154 249 | 123500,0.993990384615 250 | 124000,0.995192307692 251 | 124500,0.994591346154 252 | 125000,0.995192307692 253 | 125500,0.994591346154 254 | 126000,0.994991987179 255 | 126500,0.994991987179 256 | 127000,0.994791666667 257 | 127500,0.994991987179 258 | 128000,0.995192307692 259 | 128500,0.995592948718 260 | 129000,0.995392628205 261 | 129500,0.994391025641 262 | 130000,0.994791666667 263 | 130500,0.995392628205 264 | 131000,0.994591346154 265 | 131500,0.994791666667 266 | 132000,0.994591346154 267 | 132500,0.994991987179 268 | 133000,0.994791666667 269 | 133500,0.994190705128 270 | 134000,0.994391025641 271 | 134500,0.994791666667 272 | 135000,0.994391025641 273 | 135500,0.994591346154 274 | 136000,0.994391025641 275 | 136500,0.994791666667 276 | 137000,0.994391025641 277 | 137500,0.994391025641 278 | 138000,0.994591346154 279 | 138500,0.993790064103 280 | 139000,0.994991987179 281 | 139500,0.994791666667 282 | 140000,0.994591346154 283 | 140500,0.994591346154 284 | 141000,0.994591346154 285 | 141500,0.994991987179 286 | 142000,0.994791666667 287 | 142500,0.994591346154 288 | 143000,0.994991987179 289 | 143500,0.994391025641 290 | 144000,0.994391025641 291 | 144500,0.994591346154 292 | 145000,0.994190705128 293 | 145500,0.994190705128 294 | 146000,0.994591346154 295 | 146500,0.994190705128 296 | 147000,0.994190705128 297 | 147500,0.994591346154 298 | 148000,0.994591346154 299 | 148500,0.994190705128 300 | 149000,0.993990384615 301 | 149500,0.994391025641 302 | 150000,0.994591346154 303 | 150500,0.994791666667 304 | 151000,0.994391025641 305 | 151500,0.993990384615 306 | 152000,0.994591346154 307 | 152500,0.993990384615 308 | 153000,0.994591346154 309 | 153500,0.994591346154 310 | 154000,0.994591346154 311 | 154500,0.993990384615 312 | 155000,0.994791666667 313 | 155500,0.993790064103 314 | 156000,0.994391025641 315 | 156500,0.994391025641 316 | 157000,0.994791666667 317 | 157500,0.993990384615 318 | 158000,0.993990384615 319 | 158500,0.993790064103 320 | 159000,0.994190705128 321 | 159500,0.994791666667 322 | 160000,0.993990384615 323 | 160500,0.994591346154 324 | 161000,0.99358974359 325 | 161500,0.994391025641 326 | 162000,0.994991987179 327 | 162500,0.993990384615 328 | 163000,0.994190705128 329 | 163500,0.994190705128 330 | 164000,0.994190705128 331 | 164500,0.994190705128 332 | 165000,0.99358974359 333 | 165500,0.99358974359 334 | 166000,0.993990384615 335 | 166500,0.994190705128 336 | 167000,0.993990384615 337 | 167500,0.993790064103 338 | 168000,0.994190705128 339 | 168500,0.993389423077 340 | 169000,0.993790064103 341 | 169500,0.993790064103 342 | 170000,0.993990384615 343 | 170500,0.994190705128 344 | 171000,0.99358974359 345 | 171500,0.994190705128 346 | 172000,0.993990384615 347 | 172500,0.993790064103 348 | 173000,0.994190705128 349 | 173500,0.99358974359 350 | 174000,0.99358974359 351 | 174500,0.99358974359 352 | 175000,0.994190705128 353 | 175500,0.993790064103 354 | 176000,0.99358974359 355 | 176500,0.993389423077 356 | 177000,0.99358974359 357 | 177500,0.99358974359 358 | 178000,0.993790064103 359 | 178500,0.994190705128 360 | 179000,0.994190705128 361 | 179500,0.993990384615 362 | 180000,0.993990384615 363 | 180500,0.993790064103 364 | 181000,0.993990384615 365 | 181500,0.993790064103 366 | 182000,0.99358974359 367 | 182500,0.992988782051 368 | 183000,0.993790064103 369 | 183500,0.992988782051 370 | 184000,0.993389423077 371 | 184500,0.993790064103 372 | 185000,0.993189102564 373 | 185500,0.99358974359 374 | 186000,0.993389423077 375 | 186500,0.993790064103 376 | 187000,0.993389423077 377 | 187500,0.993790064103 378 | 188000,0.993389423077 379 | 188500,0.99358974359 380 | 189000,0.99358974359 381 | 189500,0.993389423077 382 | 190000,0.993990384615 383 | 190500,0.993790064103 384 | 191000,0.992988782051 385 | 191500,0.993389423077 386 | 192000,0.993189102564 387 | 192500,0.993790064103 388 | 193000,0.99358974359 389 | 193500,0.99358974359 390 | 194000,0.99358974359 391 | 194500,0.993389423077 392 | 195000,0.99358974359 393 | 195500,0.993790064103 394 | 196000,0.994190705128 395 | 196500,0.993389423077 396 | 197000,0.99358974359 397 | 197500,0.992988782051 398 | 198000,0.993389423077 399 | 198500,0.993389423077 400 | 199000,0.993990384615 401 | 199500,0.992988782051 402 | 200000,0.993389423077 403 | 200500,0.993389423077 404 | 201000,0.993389423077 405 | 201500,0.993790064103 406 | 202000,0.993389423077 407 | 202500,0.993389423077 408 | 203000,0.992788461538 409 | 203500,0.993389423077 410 | 204000,0.99358974359 411 | 204500,0.993389423077 412 | 205000,0.992988782051 413 | 205500,0.993189102564 414 | 206000,0.993189102564 415 | 206500,0.992988782051 416 | 207000,0.992988782051 417 | 207500,0.992788461538 418 | 208000,0.993389423077 419 | 208500,0.993189102564 420 | 209000,0.993189102564 421 | 209500,0.993189102564 422 | 210000,0.993389423077 423 | 210500,0.993990384615 424 | 211000,0.99358974359 425 | 211500,0.993790064103 426 | 212000,0.993790064103 427 | 212500,0.993389423077 428 | 213000,0.99358974359 429 | 213500,0.993189102564 430 | 214000,0.993189102564 431 | -------------------------------------------------------------------------------- /results/mnist/3_iter_loss.csv: -------------------------------------------------------------------------------- 1 | step,loss 2 | 0,0.900833 3 | 100,0.090049 4 | 200,0.0456072 5 | 300,0.0433924 6 | 400,0.0469553 7 | 500,0.0333918 8 | 600,0.0289794 9 | 700,0.0439903 10 | 800,0.0289287 11 | 900,0.0232712 12 | 1000,0.032533 13 | 1100,0.0265907 14 | 1200,0.0256485 15 | 1300,0.0181835 16 | 1400,0.0162144 17 | 1500,0.0152603 18 | 1600,0.0266051 19 | 1700,0.0174083 20 | 1800,0.0178013 21 | 1900,0.0185957 22 | 2000,0.0177851 23 | 2100,0.0148149 24 | 2200,0.01531 25 | 2300,0.0135139 26 | 2400,0.0111426 27 | 2500,0.0129276 28 | 2600,0.00976547 29 | 2700,0.0107548 30 | 2800,0.00880792 31 | 2900,0.0145088 32 | 3000,0.00971534 33 | 3100,0.0107468 34 | 3200,0.0106746 35 | 3300,0.00945119 36 | 3400,0.00944777 37 | 3500,0.00858944 38 | 3600,0.00824325 39 | 3700,0.00740903 40 | 3800,0.00958429 41 | 3900,0.00769402 42 | 4000,0.00807425 43 | 4100,0.00750071 44 | 4200,0.00876012 45 | 4300,0.00785926 46 | 4400,0.00723187 47 | 4500,0.00677234 48 | 4600,0.00648039 49 | 4700,0.00650377 50 | 4800,0.00643385 51 | 4900,0.00616357 52 | 5000,0.0068284 53 | 5100,0.00827757 54 | 5200,0.00627104 55 | 5300,0.0053733 56 | 5400,0.00616352 57 | 5500,0.00891144 58 | 5600,0.00799707 59 | 5700,0.00538839 60 | 5800,0.00590578 61 | 5900,0.00509941 62 | 6000,0.00569755 63 | 6100,0.00589637 64 | 6200,0.00616893 65 | 6300,0.00582675 66 | 6400,0.00612702 67 | 6500,0.00535281 68 | 6600,0.00567703 69 | 6700,0.00484108 70 | 6800,0.00631951 71 | 6900,0.00522322 72 | 7000,0.00511473 73 | 7100,0.00512285 74 | 7200,0.00518261 75 | 7300,0.00477827 76 | 7400,0.00474654 77 | 7500,0.0046223 78 | 7600,0.00468915 79 | 7700,0.00420629 80 | 7800,0.00405702 81 | 7900,0.00439265 82 | 8000,0.00425009 83 | 8100,0.00492229 84 | 8200,0.00499246 85 | 8300,0.0056118 86 | 8400,0.00676982 87 | 8500,0.00531541 88 | 8600,0.00484039 89 | 8700,0.00428301 90 | 8800,0.00446961 91 | 8900,0.00486134 92 | 9000,0.00432225 93 | 9100,0.00394429 94 | 9200,0.00404601 95 | 9300,0.00370894 96 | 9400,0.00368742 97 | 9500,0.00361809 98 | 9600,0.00369749 99 | 9700,0.00356381 100 | 9800,0.00372467 101 | 9900,0.00384705 102 | 10000,0.00368881 103 | 10100,0.00350731 104 | 10200,0.00338315 105 | 10300,0.00346778 106 | 10400,0.00333952 107 | 10500,0.00340508 108 | 10600,0.00390452 109 | 10700,0.00481714 110 | 10800,0.00669386 111 | 10900,0.00551518 112 | 11000,0.0056989 113 | 11100,0.00459923 114 | 11200,0.00375691 115 | 11300,0.00339064 116 | 11400,0.00321299 117 | 11500,0.00371797 118 | 11600,0.00322147 119 | 11700,0.00320404 120 | 11800,0.00317102 121 | 11900,0.00311539 122 | 12000,0.00324745 123 | 12100,0.00315079 124 | 12200,0.00319437 125 | 12300,0.00338814 126 | 12400,0.00315472 127 | 12500,0.0030839 128 | 12600,0.00306725 129 | 12700,0.00366961 130 | 12800,0.00321388 131 | 12900,0.00314318 132 | 13000,0.00310186 133 | 13100,0.00304331 134 | 13200,0.00325233 135 | 13300,0.00303824 136 | 13400,0.00306376 137 | 13500,0.00355828 138 | 13600,0.00293091 139 | 13700,0.00299379 140 | 13800,0.00305762 141 | 13900,0.00346516 142 | 14000,0.00301496 143 | 14100,0.00325917 144 | 14200,0.00310913 145 | 14300,0.00333836 146 | 14400,0.00295847 147 | 14500,0.00300367 148 | 14600,0.00310865 149 | 14700,0.00294769 150 | 14800,0.00273551 151 | 14900,0.00328743 152 | 15000,0.00320555 153 | 15100,0.00287706 154 | 15200,0.00279883 155 | 15300,0.00292214 156 | 15400,0.00290337 157 | 15500,0.00270235 158 | 15600,0.00310209 159 | 15700,0.00319934 160 | 15800,0.00327756 161 | 15900,0.00296055 162 | 16000,0.00315679 163 | 16100,0.00260754 164 | 16200,0.00316666 165 | 16300,0.0028022 166 | 16400,0.00302344 167 | 16500,0.00272723 168 | 16600,0.00279132 169 | 16700,0.00270168 170 | 16800,0.0028701 171 | 16900,0.00297831 172 | 17000,0.00270393 173 | 17100,0.00300571 174 | 17200,0.00292959 175 | 17300,0.00264694 176 | 17400,0.00260631 177 | 17500,0.00306622 178 | 17600,0.00260016 179 | 17700,0.0025589 180 | 17800,0.00255913 181 | 17900,0.00324114 182 | 18000,0.00249647 183 | 18100,0.0025801 184 | 18200,0.00278119 185 | 18300,0.00261893 186 | 18400,0.00284812 187 | 18500,0.00276466 188 | 18600,0.00223182 189 | 18700,0.00244611 190 | 18800,0.0028264 191 | 18900,0.00257817 192 | 19000,0.00317459 193 | 19100,0.00266065 194 | 19200,0.00247539 195 | 19300,0.00247858 196 | 19400,0.00245904 197 | 19500,0.00254874 198 | 19600,0.00408047 199 | 19700,0.00246944 200 | 19800,0.0023399 201 | 19900,0.00254492 202 | 20000,0.00254393 203 | 20100,0.00292258 204 | 20200,0.00249952 205 | 20300,0.00226502 206 | 20400,0.0024502 207 | 20500,0.00247732 208 | 20600,0.00261909 209 | 20700,0.00254909 210 | 20800,0.00273904 211 | 20900,0.00239386 212 | 21000,0.00277127 213 | 21100,0.00244675 214 | 21200,0.00234495 215 | 21300,0.0022947 216 | 21400,0.00292373 217 | 21500,0.00211332 218 | 21600,0.00234801 219 | 21700,0.00251334 220 | 21800,0.00225049 221 | 21900,0.00242015 222 | 22000,0.00288636 223 | 22100,0.0023498 224 | 22200,0.00225634 225 | 22300,0.00231614 226 | 22400,0.00224486 227 | 22500,0.00248407 228 | 22600,0.00268974 229 | 22700,0.00239089 230 | 22800,0.00236791 231 | 22900,0.00228765 232 | 23000,0.00233162 233 | 23100,0.00220628 234 | 23200,0.00256471 235 | 23300,0.00224013 236 | 23400,0.00228511 237 | 23500,0.00256355 238 | 23600,0.00208824 239 | 23700,0.00210067 240 | 23800,0.00220947 241 | 23900,0.00232505 242 | 24000,0.0020243 243 | 24100,0.00228526 244 | 24200,0.00210286 245 | 24300,0.00214645 246 | 24400,0.00237176 247 | 24500,0.00211844 248 | 24600,0.00222796 249 | 24700,0.00240455 250 | 24800,0.00206747 251 | 24900,0.002177 252 | 25000,0.00245886 253 | 25100,0.0021835 254 | 25200,0.00217164 255 | 25300,0.00223197 256 | 25400,0.0023277 257 | 25500,0.0020254 258 | 25600,0.00211439 259 | 25700,0.0021943 260 | 25800,0.00240695 261 | 25900,0.00220309 262 | 26000,0.00232469 263 | 26100,0.0021908 264 | 26200,0.00227319 265 | 26300,0.00222392 266 | 26400,0.00213271 267 | 26500,0.00192101 268 | 26600,0.00201666 269 | 26700,0.0023934 270 | 26800,0.00228135 271 | 26900,0.00251577 272 | 27000,0.00215545 273 | 27100,0.00194697 274 | 27200,0.00187257 275 | 27300,0.00230567 276 | 27400,0.00222048 277 | 27500,0.00203152 278 | 27600,0.00224689 279 | 27700,0.00212012 280 | 27800,0.00196163 281 | 27900,0.00200061 282 | 28000,0.00218542 283 | 28100,0.00371396 284 | 28200,0.00219511 285 | 28300,0.00211851 286 | 28400,0.00180313 287 | 28500,0.00209845 288 | 28600,0.00200499 289 | 28700,0.00196658 290 | 28800,0.00199698 291 | 28900,0.00196232 292 | 29000,0.00221769 293 | 29100,0.00195577 294 | 29200,0.00183482 295 | 29300,0.00214836 296 | 29400,0.00200001 297 | 29500,0.00209811 298 | 29600,0.00182426 299 | 29700,0.00183021 300 | 29800,0.00207064 301 | 29900,0.00182582 302 | 30000,0.00185238 303 | 30100,0.00190638 304 | 30200,0.00208079 305 | 30300,0.0019743 306 | 30400,0.00193444 307 | 30500,0.002169 308 | 30600,0.00209239 309 | 30700,0.0022304 310 | 30800,0.00192586 311 | 30900,0.00226734 312 | 31000,0.00211762 313 | 31100,0.00204078 314 | 31200,0.00212872 315 | 31300,0.00202593 316 | 31400,0.00207427 317 | 31500,0.0020698 318 | 31600,0.0019368 319 | 31700,0.00181583 320 | 31800,0.00188507 321 | 31900,0.00224807 322 | 32000,0.00186713 323 | 32100,0.00215887 324 | 32200,0.00192199 325 | 32300,0.00187536 326 | 32400,0.0019266 327 | 32500,0.00206171 328 | 32600,0.00195802 329 | 32700,0.00180224 330 | 32800,0.00204784 331 | 32900,0.00185295 332 | 33000,0.00179772 333 | 33100,0.00200588 334 | 33200,0.00180501 335 | 33300,0.00202211 336 | 33400,0.00176389 337 | 33500,0.00217264 338 | 33600,0.00197118 339 | 33700,0.00214035 340 | 33800,0.00188156 341 | 33900,0.00207594 342 | 34000,0.00189409 343 | 34100,0.00192462 344 | 34200,0.00193362 345 | 34300,0.00209507 346 | 34400,0.00171714 347 | 34500,0.00173336 348 | 34600,0.00208428 349 | 34700,0.00182703 350 | 34800,0.00184225 351 | 34900,0.00181863 352 | 35000,0.00197266 353 | 35100,0.00188226 354 | 35200,0.00189063 355 | 35300,0.00198731 356 | 35400,0.00197258 357 | 35500,0.00183092 358 | 35600,0.00173243 359 | 35700,0.00189613 360 | 35800,0.00172438 361 | 35900,0.00150016 362 | 36000,0.00195629 363 | 36100,0.00191146 364 | 36200,0.00191728 365 | 36300,0.00186127 366 | 36400,0.00173149 367 | 36500,0.00168166 368 | 36600,0.00180734 369 | 36700,0.00177885 370 | 36800,0.00196867 371 | 36900,0.00187292 372 | 37000,0.0017113 373 | 37100,0.00172722 374 | 37200,0.00197744 375 | 37300,0.00200758 376 | 37400,0.00177182 377 | 37500,0.00190977 378 | 37600,0.0018513 379 | 37700,0.00178822 380 | 37800,0.00186463 381 | 37900,0.0016818 382 | 38000,0.00190153 383 | 38100,0.0018338 384 | 38200,0.00179735 385 | 38300,0.00174119 386 | 38400,0.00180245 387 | 38500,0.00176394 388 | 38600,0.00202209 389 | 38700,0.00172909 390 | 38800,0.00183722 391 | 38900,0.00159391 392 | 39000,0.00192084 393 | 39100,0.00178063 394 | 39200,0.00172627 395 | 39300,0.00184457 396 | 39400,0.00184194 397 | 39500,0.0018778 398 | 39600,0.00169506 399 | 39700,0.00170375 400 | 39800,0.0017816 401 | 39900,0.00238205 402 | 40000,0.00176586 403 | 40100,0.00180392 404 | 40200,0.0018095 405 | 40300,0.00184128 406 | 40400,0.00179664 407 | 40500,0.00189823 408 | 40600,0.0017715 409 | 40700,0.00170485 410 | 40800,0.00180898 411 | 40900,0.00175026 412 | 41000,0.00167463 413 | 41100,0.00158784 414 | 41200,0.00171204 415 | 41300,0.00172673 416 | 41400,0.00174535 417 | 41500,0.00179059 418 | 41600,0.00182522 419 | 41700,0.00168595 420 | 41800,0.00178318 421 | 41900,0.00176431 422 | 42000,0.00164353 423 | 42100,0.00167165 424 | 42200,0.00174652 425 | 42300,0.00160913 426 | 42400,0.00176866 427 | 42500,0.00161901 428 | 42600,0.00174982 429 | 42700,0.00170475 430 | 42800,0.00166311 431 | 42900,0.00173859 432 | 43000,0.00174393 433 | 43100,0.00181551 434 | 43200,0.00173508 435 | 43300,0.00168174 436 | 43400,0.00166727 437 | 43500,0.00148956 438 | 43600,0.00159691 439 | 43700,0.00176613 440 | 43800,0.00162447 441 | 43900,0.00176274 442 | 44000,0.00167484 443 | 44100,0.0015209 444 | 44200,0.00160382 445 | 44300,0.00155581 446 | 44400,0.00167568 447 | 44500,0.0016941 448 | 44600,0.00167878 449 | 44700,0.00168765 450 | 44800,0.00159593 451 | 44900,0.00166672 452 | 45000,0.00179241 453 | 45100,0.00169567 454 | 45200,0.00152547 455 | 45300,0.00177548 456 | 45400,0.00185054 457 | 45500,0.00165705 458 | 45600,0.00172556 459 | 45700,0.00162836 460 | 45800,0.00157868 461 | 45900,0.0016476 462 | 46000,0.00173613 463 | 46100,0.00157029 464 | 46200,0.00155759 465 | 46300,0.00160391 466 | 46400,0.00199438 467 | 46500,0.00183637 468 | 46600,0.00151888 469 | 46700,0.00153408 470 | 46800,0.00163449 471 | 46900,0.00168919 472 | 47000,0.00180749 473 | 47100,0.0017824 474 | 47200,0.00176956 475 | 47300,0.00159044 476 | 47400,0.0015821 477 | 47500,0.00169547 478 | 47600,0.00171688 479 | 47700,0.00163962 480 | 47800,0.00178536 481 | 47900,0.0015887 482 | 48000,0.00161103 483 | 48100,0.00163812 484 | 48200,0.00205861 485 | 48300,0.00167303 486 | 48400,0.00146919 487 | 48500,0.00162586 488 | 48600,0.00157993 489 | 48700,0.00160425 490 | 48800,0.00164177 491 | 48900,0.00165546 492 | 49000,0.00168004 493 | 49100,0.00156178 494 | 49200,0.00150551 495 | 49300,0.00149106 496 | 49400,0.00169375 497 | 49500,0.00171521 498 | 49600,0.00149004 499 | 49700,0.00150316 500 | 49800,0.00154647 501 | 49900,0.00158611 502 | 50000,0.00156218 503 | 50100,0.00150733 504 | 50200,0.00158121 505 | 50300,0.00156822 506 | 50400,0.0015676 507 | 50500,0.00153929 508 | 50600,0.00163143 509 | 50700,0.00144949 510 | 50800,0.00161047 511 | 50900,0.00170176 512 | 51000,0.00159796 513 | 51100,0.00163418 514 | 51200,0.00145749 515 | 51300,0.00147456 516 | 51400,0.0015956 517 | 51500,0.00160603 518 | 51600,0.00167819 519 | 51700,0.00181636 520 | 51800,0.00159846 521 | 51900,0.00163757 522 | 52000,0.00164705 523 | 52100,0.00162335 524 | 52200,0.00155506 525 | 52300,0.0016304 526 | 52400,0.00154745 527 | 52500,0.00158139 528 | 52600,0.00160185 529 | 52700,0.00161607 530 | 52800,0.00159952 531 | 52900,0.00150746 532 | 53000,0.0014947 533 | 53100,0.0015949 534 | 53200,0.00139536 535 | 53300,0.00145117 536 | 53400,0.00172343 537 | 53500,0.0016913 538 | 53600,0.00167895 539 | 53700,0.00178227 540 | 53800,0.00152541 541 | 53900,0.00149071 542 | 54000,0.00159026 543 | 54100,0.00151483 544 | 54200,0.00154832 545 | 54300,0.00138391 546 | 54400,0.00147757 547 | 54500,0.00190618 548 | 54600,0.0014913 549 | 54700,0.00152622 550 | 54800,0.00163492 551 | 54900,0.00145927 552 | 55000,0.00157121 553 | 55100,0.00159609 554 | 55200,0.00137153 555 | 55300,0.00150461 556 | 55400,0.0013968 557 | 55500,0.00148695 558 | 55600,0.00143524 559 | 55700,0.00150126 560 | 55800,0.00147046 561 | 55900,0.00160495 562 | 56000,0.00196999 563 | 56100,0.00304764 564 | 56200,0.00159189 565 | 56300,0.00138341 566 | 56400,0.00156124 567 | 56500,0.00153725 568 | 56600,0.00165391 569 | 56700,0.00164813 570 | 56800,0.00149202 571 | 56900,0.00145211 572 | 57000,0.00150528 573 | 57100,0.00149947 574 | 57200,0.00163291 575 | 57300,0.00131073 576 | 57400,0.00151201 577 | 57500,0.00155615 578 | 57600,0.00154206 579 | 57700,0.00157621 580 | 57800,0.00185777 581 | 57900,0.00157008 582 | 58000,0.00154295 583 | 58100,0.00146213 584 | 58200,0.00137366 585 | 58300,0.00153759 586 | 58400,0.00148941 587 | 58500,0.00155203 588 | 58600,0.00193765 589 | 58700,0.00144579 590 | 58800,0.00136471 591 | 58900,0.00144163 592 | 59000,0.00160365 593 | 59100,0.0015007 594 | 59200,0.0014567 595 | 59300,0.00151242 596 | 59400,0.00149628 597 | 59500,0.00137654 598 | 59600,0.00152345 599 | 59700,0.00148859 600 | 59800,0.00157845 601 | 59900,0.00152738 602 | 60000,0.00147941 603 | 60100,0.00137721 604 | 60200,0.00146734 605 | 60300,0.00152747 606 | 60400,0.00137987 607 | 60500,0.00164816 608 | 60600,0.00154213 609 | 60700,0.00148975 610 | 60800,0.00140002 611 | 60900,0.00152661 612 | 61000,0.00162336 613 | 61100,0.00133718 614 | 61200,0.00144432 615 | 61300,0.00149388 616 | 61400,0.00145157 617 | 61500,0.0014264 618 | 61600,0.00153112 619 | 61700,0.00142392 620 | 61800,0.00142744 621 | 61900,0.00141188 622 | 62000,0.00149004 623 | 62100,0.00138784 624 | 62200,0.00146353 625 | 62300,0.00134787 626 | 62400,0.00139726 627 | 62500,0.00137262 628 | 62600,0.00140157 629 | 62700,0.001462 630 | 62800,0.00154382 631 | 62900,0.00152581 632 | 63000,0.00151619 633 | 63100,0.0015427 634 | 63200,0.00139838 635 | 63300,0.00152401 636 | 63400,0.00136274 637 | 63500,0.00162527 638 | 63600,0.00148838 639 | 63700,0.00142942 640 | 63800,0.00136381 641 | 63900,0.00159447 642 | 64000,0.00144415 643 | 64100,0.00133687 644 | 64200,0.00137048 645 | 64300,0.00138904 646 | 64400,0.00137677 647 | 64500,0.0014257 648 | 64600,0.0014502 649 | 64700,0.00136273 650 | 64800,0.00130196 651 | 64900,0.00156803 652 | 65000,0.00143668 653 | 65100,0.00150542 654 | 65200,0.00149212 655 | 65300,0.00139766 656 | 65400,0.00150766 657 | 65500,0.00144801 658 | 65600,0.00139758 659 | 65700,0.00144217 660 | 65800,0.00152615 661 | 65900,0.00149441 662 | 66000,0.0013718 663 | 66100,0.00144559 664 | 66200,0.00131964 665 | 66300,0.00147866 666 | 66400,0.00159749 667 | 66500,0.00139705 668 | 66600,0.00137028 669 | 66700,0.00154561 670 | 66800,0.00152349 671 | 66900,0.00157843 672 | 67000,0.00142124 673 | 67100,0.00145153 674 | 67200,0.00155815 675 | 67300,0.00144988 676 | 67400,0.00135865 677 | 67500,0.00139512 678 | 67600,0.00131993 679 | 67700,0.00135754 680 | 67800,0.00139975 681 | 67900,0.0014123 682 | 68000,0.00138907 683 | 68100,0.00130575 684 | 68200,0.00145657 685 | 68300,0.00149118 686 | 68400,0.00161435 687 | 68500,0.00142317 688 | 68600,0.00131698 689 | 68700,0.00154529 690 | 68800,0.00137152 691 | 68900,0.00142123 692 | 69000,0.00137951 693 | 69100,0.00133451 694 | 69200,0.00131662 695 | 69300,0.00158527 696 | 69400,0.00142577 697 | 69500,0.0013472 698 | 69600,0.00133072 699 | 69700,0.00161246 700 | 69800,0.00139592 701 | 69900,0.00135995 702 | 70000,0.00145963 703 | 70100,0.00148033 704 | 70200,0.00139188 705 | 70300,0.0013649 706 | 70400,0.00134128 707 | 70500,0.00139059 708 | 70600,0.00145942 709 | 70700,0.00142759 710 | 70800,0.00164491 711 | 70900,0.00152516 712 | 71000,0.0014739 713 | 71100,0.00137724 714 | 71200,0.00139597 715 | 71300,0.00125149 716 | 71400,0.00126325 717 | 71500,0.00142619 718 | 71600,0.00137449 719 | 71700,0.00150057 720 | 71800,0.0014423 721 | 71900,0.00130491 722 | 72000,0.00141756 723 | 72100,0.00133292 724 | 72200,0.00148131 725 | 72300,0.00134394 726 | 72400,0.00129534 727 | 72500,0.00143005 728 | 72600,0.00143358 729 | 72700,0.00133754 730 | 72800,0.00138418 731 | 72900,0.00132506 732 | 73000,0.00139663 733 | 73100,0.0013727 734 | 73200,0.00118413 735 | 73300,0.00127908 736 | 73400,0.00141799 737 | 73500,0.00128347 738 | 73600,0.00137708 739 | 73700,0.00138752 740 | 73800,0.00133654 741 | 73900,0.00279419 742 | 74000,0.00137061 743 | 74100,0.00125471 744 | 74200,0.00142629 745 | 74300,0.00142207 746 | 74400,0.00131955 747 | 74500,0.00124669 748 | 74600,0.0013792 749 | 74700,0.00133298 750 | 74800,0.00144157 751 | 74900,0.00124884 752 | 75000,0.00132273 753 | 75100,0.00113553 754 | 75200,0.00124119 755 | 75300,0.00139534 756 | 75400,0.00133496 757 | 75500,0.00142934 758 | 75600,0.00133627 759 | 75700,0.00138589 760 | 75800,0.00137543 761 | 75900,0.00132993 762 | 76000,0.00124789 763 | 76100,0.0014265 764 | 76200,0.00131879 765 | 76300,0.00128409 766 | 76400,0.00141414 767 | 76500,0.00127343 768 | 76600,0.00132109 769 | 76700,0.00140846 770 | 76800,0.00162823 771 | 76900,0.00147701 772 | 77000,0.00134931 773 | 77100,0.00136694 774 | 77200,0.0013685 775 | 77300,0.00138813 776 | 77400,0.00135676 777 | 77500,0.00151116 778 | 77600,0.00137177 779 | 77700,0.00132962 780 | 77800,0.00142245 781 | 77900,0.00122647 782 | 78000,0.00137827 783 | 78100,0.00144978 784 | 78200,0.00140002 785 | 78300,0.00129025 786 | 78400,0.00141204 787 | 78500,0.00140464 788 | 78600,0.00143375 789 | 78700,0.0014177 790 | 78800,0.00126789 791 | 78900,0.0012981 792 | 79000,0.00139515 793 | 79100,0.00130515 794 | 79200,0.00121193 795 | 79300,0.0014183 796 | 79400,0.0013811 797 | 79500,0.00133399 798 | 79600,0.00133762 799 | 79700,0.00158031 800 | 79800,0.00135261 801 | 79900,0.00141784 802 | 80000,0.0013372 803 | 80100,0.00125766 804 | 80200,0.00140399 805 | 80300,0.00124649 806 | 80400,0.00203144 807 | 80500,0.00135901 808 | 80600,0.00125144 809 | 80700,0.00127175 810 | 80800,0.0013115 811 | 80900,0.0012823 812 | 81000,0.00146114 813 | 81100,0.00136339 814 | 81200,0.00135369 815 | 81300,0.00124319 816 | 81400,0.00124992 817 | 81500,0.00135145 818 | 81600,0.00137865 819 | 81700,0.00129895 820 | 81800,0.00130391 821 | 81900,0.0014373 822 | 82000,0.00131469 823 | 82100,0.00129949 824 | 82200,0.00129608 825 | 82300,0.00129109 826 | 82400,0.00126815 827 | 82500,0.00128627 828 | 82600,0.00125721 829 | 82700,0.00138771 830 | 82800,0.00129911 831 | 82900,0.0012674 832 | 83000,0.00136064 833 | 83100,0.00127796 834 | 83200,0.00149017 835 | 83300,0.00122327 836 | 83400,0.00135122 837 | 83500,0.00128261 838 | 83600,0.00155517 839 | 83700,0.00136832 840 | 83800,0.00142062 841 | 83900,0.00135351 842 | 84000,0.00139445 843 | 84100,0.00246676 844 | 84200,0.00131507 845 | 84300,0.0013507 846 | 84400,0.00123545 847 | 84500,0.00133689 848 | 84600,0.00148375 849 | 84700,0.00129445 850 | 84800,0.00122719 851 | 84900,0.00127347 852 | 85000,0.00128761 853 | 85100,0.00128517 854 | 85200,0.00132625 855 | 85300,0.00126016 856 | 85400,0.00141513 857 | 85500,0.00138298 858 | 85600,0.00135385 859 | 85700,0.00129845 860 | 85800,0.00141728 861 | 85900,0.0014491 862 | 86000,0.0014161 863 | 86100,0.00122004 864 | 86200,0.00119704 865 | 86300,0.00122997 866 | 86400,0.00128501 867 | 86500,0.00137561 868 | 86600,0.00131333 869 | 86700,0.00136097 870 | 86800,0.00125163 871 | 86900,0.00135439 872 | 87000,0.00122684 873 | 87100,0.00133056 874 | 87200,0.00130097 875 | 87300,0.00121684 876 | 87400,0.00124445 877 | 87500,0.00119637 878 | 87600,0.00127092 879 | 87700,0.00127606 880 | 87800,0.00131699 881 | 87900,0.00139977 882 | 88000,0.00132631 883 | 88100,0.00121087 884 | 88200,0.00130249 885 | 88300,0.00128047 886 | 88400,0.00125405 887 | 88500,0.0012627 888 | 88600,0.0011767 889 | 88700,0.00125381 890 | 88800,0.00124107 891 | 88900,0.00132803 892 | 89000,0.00119101 893 | 89100,0.00121053 894 | 89200,0.00130717 895 | 89300,0.0013357 896 | 89400,0.00135215 897 | 89500,0.00126081 898 | 89600,0.00117898 899 | 89700,0.0014398 900 | 89800,0.00125104 901 | 89900,0.00126792 902 | 90000,0.00130155 903 | 90100,0.00125235 904 | 90200,0.00142799 905 | 90300,0.0012254 906 | 90400,0.00123656 907 | 90500,0.00124785 908 | 90600,0.00126777 909 | 90700,0.0014421 910 | 90800,0.00146673 911 | 90900,0.00122206 912 | 91000,0.00130475 913 | 91100,0.00136276 914 | 91200,0.00148573 915 | 91300,0.00142934 916 | 91400,0.001398 917 | 91500,0.00142842 918 | 91600,0.00145168 919 | 91700,0.00127878 920 | 91800,0.00118701 921 | 91900,0.00117128 922 | 92000,0.00133858 923 | 92100,0.00129506 924 | 92200,0.00137313 925 | 92300,0.00124526 926 | 92400,0.00123978 927 | 92500,0.0014432 928 | 92600,0.00129967 929 | 92700,0.00132536 930 | 92800,0.00123084 931 | 92900,0.00127 932 | 93000,0.00130325 933 | 93100,0.00123581 934 | 93200,0.00137206 935 | 93300,0.00146386 936 | 93400,0.00128733 937 | 93500,0.00126897 938 | 93600,0.00119005 939 | 93700,0.00130315 940 | 93800,0.0011803 941 | 93900,0.0011858 942 | 94000,0.00128704 943 | 94100,0.00134156 944 | 94200,0.00123203 945 | 94300,0.0012119 946 | 94400,0.00143629 947 | 94500,0.00144717 948 | 94600,0.00138955 949 | 94700,0.00213993 950 | 94800,0.00126155 951 | 94900,0.00143978 952 | 95000,0.00141736 953 | 95100,0.00125487 954 | 95200,0.00128202 955 | 95300,0.00118192 956 | 95400,0.00114864 957 | 95500,0.00119345 958 | 95600,0.00128356 959 | 95700,0.00129284 960 | 95800,0.00128242 961 | 95900,0.00127102 962 | 96000,0.00115594 963 | 96100,0.00128296 964 | 96200,0.00116988 965 | 96300,0.00122622 966 | 96400,0.00118322 967 | 96500,0.00112986 968 | 96600,0.00126637 969 | 96700,0.00124889 970 | 96800,0.00122706 971 | 96900,0.00123374 972 | 97000,0.0013491 973 | 97100,0.00112714 974 | 97200,0.00122698 975 | 97300,0.00118573 976 | 97400,0.00117441 977 | 97500,0.00142913 978 | 97600,0.00122653 979 | 97700,0.00117581 980 | 97800,0.00120579 981 | 97900,0.0014752 982 | 98000,0.00137299 983 | 98100,0.00130875 984 | 98200,0.00148519 985 | 98300,0.00124466 986 | 98400,0.00126573 987 | 98500,0.00139465 988 | 98600,0.00138198 989 | 98700,0.00138049 990 | 98800,0.00124884 991 | 98900,0.00118487 992 | 99000,0.00129928 993 | 99100,0.00130024 994 | 99200,0.00123633 995 | 99300,0.00134584 996 | 99400,0.00117333 997 | 99500,0.0012871 998 | 99600,0.00126785 999 | 99700,0.00118933 1000 | 99800,0.00119916 1001 | 99900,0.00132369 1002 | 100000,0.00127545 1003 | 100100,0.00123916 1004 | 100200,0.0013244 1005 | 100300,0.00128811 1006 | 100400,0.0013432 1007 | 100500,0.00138082 1008 | 100600,0.00119419 1009 | 100700,0.00116542 1010 | 100800,0.00119881 1011 | 100900,0.00138233 1012 | 101000,0.00117144 1013 | 101100,0.00121192 1014 | 101200,0.00122738 1015 | 101300,0.00124558 1016 | 101400,0.00122856 1017 | 101500,0.00115295 1018 | 101600,0.00121646 1019 | 101700,0.00124708 1020 | 101800,0.00118655 1021 | 101900,0.00129474 1022 | 102000,0.00128724 1023 | 102100,0.00137261 1024 | 102200,0.00111382 1025 | 102300,0.00118499 1026 | 102400,0.00110762 1027 | 102500,0.00121572 1028 | 102600,0.00128229 1029 | 102700,0.00130797 1030 | 102800,0.00109266 1031 | 102900,0.00123657 1032 | 103000,0.0012295 1033 | 103100,0.00118036 1034 | 103200,0.00115597 1035 | 103300,0.0011866 1036 | 103400,0.00128172 1037 | 103500,0.00127518 1038 | 103600,0.00115075 1039 | 103700,0.00130977 1040 | 103800,0.00121985 1041 | 103900,0.00128034 1042 | 104000,0.00132405 1043 | 104100,0.00125346 1044 | 104200,0.0011969 1045 | 104300,0.00123043 1046 | 104400,0.00117399 1047 | 104500,0.0012513 1048 | 104600,0.00124458 1049 | 104700,0.00115547 1050 | 104800,0.001359 1051 | 104900,0.0011086 1052 | 105000,0.00125339 1053 | 105100,0.00126583 1054 | 105200,0.00126462 1055 | 105300,0.00118451 1056 | 105400,0.00119827 1057 | 105500,0.0011788 1058 | 105600,0.00128711 1059 | 105700,0.00112877 1060 | 105800,0.00128098 1061 | 105900,0.00124152 1062 | 106000,0.00128624 1063 | 106100,0.00128459 1064 | 106200,0.0011824 1065 | 106300,0.00117168 1066 | 106400,0.00118152 1067 | 106500,0.00113544 1068 | 106600,0.00124105 1069 | 106700,0.00113198 1070 | 106800,0.00126643 1071 | 106900,0.00128701 1072 | 107000,0.00120756 1073 | 107100,0.00124714 1074 | 107200,0.00122725 1075 | -------------------------------------------------------------------------------- /results/mnist/3_iter_train_acc.csv: -------------------------------------------------------------------------------- 1 | step,train_acc 2 | 0,0.078125 3 | 100,0.9453125 4 | 200,0.9765625 5 | 300,0.984375 6 | 400,0.9765625 7 | 500,0.9921875 8 | 600,0.9921875 9 | 700,0.96875 10 | 800,0.9921875 11 | 900,0.9921875 12 | 1000,0.984375 13 | 1100,0.984375 14 | 1200,0.984375 15 | 1300,1.0 16 | 1400,1.0 17 | 1500,1.0 18 | 1600,0.9765625 19 | 1700,1.0 20 | 1800,0.9921875 21 | 1900,0.9921875 22 | 2000,1.0 23 | 2100,0.9921875 24 | 2200,1.0 25 | 2300,1.0 26 | 2400,1.0 27 | 2500,0.9921875 28 | 2600,1.0 29 | 2700,1.0 30 | 2800,1.0 31 | 2900,0.9921875 32 | 3000,1.0 33 | 3100,0.9921875 34 | 3200,0.9921875 35 | 3300,1.0 36 | 3400,1.0 37 | 3500,1.0 38 | 3600,1.0 39 | 3700,1.0 40 | 3800,1.0 41 | 3900,1.0 42 | 4000,1.0 43 | 4100,1.0 44 | 4200,1.0 45 | 4300,1.0 46 | 4400,1.0 47 | 4500,1.0 48 | 4600,1.0 49 | 4700,1.0 50 | 4800,1.0 51 | 4900,1.0 52 | 5000,1.0 53 | 5100,0.9921875 54 | 5200,1.0 55 | 5300,1.0 56 | 5400,1.0 57 | 5500,0.9921875 58 | 5600,0.9921875 59 | 5700,1.0 60 | 5800,1.0 61 | 5900,1.0 62 | 6000,1.0 63 | 6100,1.0 64 | 6200,1.0 65 | 6300,1.0 66 | 6400,1.0 67 | 6500,1.0 68 | 6600,1.0 69 | 6700,1.0 70 | 6800,1.0 71 | 6900,1.0 72 | 7000,1.0 73 | 7100,1.0 74 | 7200,1.0 75 | 7300,1.0 76 | 7400,1.0 77 | 7500,1.0 78 | 7600,1.0 79 | 7700,1.0 80 | 7800,1.0 81 | 7900,1.0 82 | 8000,1.0 83 | 8100,1.0 84 | 8200,1.0 85 | 8300,1.0 86 | 8400,1.0 87 | 8500,1.0 88 | 8600,1.0 89 | 8700,1.0 90 | 8800,1.0 91 | 8900,1.0 92 | 9000,1.0 93 | 9100,1.0 94 | 9200,1.0 95 | 9300,1.0 96 | 9400,1.0 97 | 9500,1.0 98 | 9600,1.0 99 | 9700,1.0 100 | 9800,1.0 101 | 9900,1.0 102 | 10000,1.0 103 | 10100,1.0 104 | 10200,1.0 105 | 10300,1.0 106 | 10400,1.0 107 | 10500,1.0 108 | 10600,1.0 109 | 10700,1.0 110 | 10800,1.0 111 | 10900,1.0 112 | 11000,1.0 113 | 11100,1.0 114 | 11200,1.0 115 | 11300,1.0 116 | 11400,1.0 117 | 11500,1.0 118 | 11600,1.0 119 | 11700,1.0 120 | 11800,1.0 121 | 11900,1.0 122 | 12000,1.0 123 | 12100,1.0 124 | 12200,1.0 125 | 12300,1.0 126 | 12400,1.0 127 | 12500,1.0 128 | 12600,1.0 129 | 12700,1.0 130 | 12800,1.0 131 | 12900,1.0 132 | 13000,1.0 133 | 13100,1.0 134 | 13200,1.0 135 | 13300,1.0 136 | 13400,1.0 137 | 13500,1.0 138 | 13600,1.0 139 | 13700,1.0 140 | 13800,1.0 141 | 13900,1.0 142 | 14000,1.0 143 | 14100,1.0 144 | 14200,1.0 145 | 14300,1.0 146 | 14400,1.0 147 | 14500,1.0 148 | 14600,1.0 149 | 14700,1.0 150 | 14800,1.0 151 | 14900,1.0 152 | 15000,1.0 153 | 15100,1.0 154 | 15200,1.0 155 | 15300,1.0 156 | 15400,1.0 157 | 15500,1.0 158 | 15600,1.0 159 | 15700,1.0 160 | 15800,1.0 161 | 15900,1.0 162 | 16000,1.0 163 | 16100,1.0 164 | 16200,1.0 165 | 16300,1.0 166 | 16400,1.0 167 | 16500,1.0 168 | 16600,1.0 169 | 16700,1.0 170 | 16800,1.0 171 | 16900,1.0 172 | 17000,1.0 173 | 17100,1.0 174 | 17200,1.0 175 | 17300,1.0 176 | 17400,1.0 177 | 17500,1.0 178 | 17600,1.0 179 | 17700,1.0 180 | 17800,1.0 181 | 17900,1.0 182 | 18000,1.0 183 | 18100,1.0 184 | 18200,1.0 185 | 18300,1.0 186 | 18400,1.0 187 | 18500,1.0 188 | 18600,1.0 189 | 18700,1.0 190 | 18800,1.0 191 | 18900,1.0 192 | 19000,1.0 193 | 19100,1.0 194 | 19200,1.0 195 | 19300,1.0 196 | 19400,1.0 197 | 19500,1.0 198 | 19600,1.0 199 | 19700,1.0 200 | 19800,1.0 201 | 19900,1.0 202 | 20000,1.0 203 | 20100,1.0 204 | 20200,1.0 205 | 20300,1.0 206 | 20400,1.0 207 | 20500,1.0 208 | 20600,1.0 209 | 20700,1.0 210 | 20800,1.0 211 | 20900,1.0 212 | 21000,1.0 213 | 21100,1.0 214 | 21200,1.0 215 | 21300,1.0 216 | 21400,1.0 217 | 21500,1.0 218 | 21600,1.0 219 | 21700,1.0 220 | 21800,1.0 221 | 21900,1.0 222 | 22000,1.0 223 | 22100,1.0 224 | 22200,1.0 225 | 22300,1.0 226 | 22400,1.0 227 | 22500,1.0 228 | 22600,1.0 229 | 22700,1.0 230 | 22800,1.0 231 | 22900,1.0 232 | 23000,1.0 233 | 23100,1.0 234 | 23200,1.0 235 | 23300,1.0 236 | 23400,1.0 237 | 23500,1.0 238 | 23600,1.0 239 | 23700,1.0 240 | 23800,1.0 241 | 23900,1.0 242 | 24000,1.0 243 | 24100,1.0 244 | 24200,1.0 245 | 24300,1.0 246 | 24400,1.0 247 | 24500,1.0 248 | 24600,1.0 249 | 24700,1.0 250 | 24800,1.0 251 | 24900,1.0 252 | 25000,1.0 253 | 25100,1.0 254 | 25200,1.0 255 | 25300,1.0 256 | 25400,1.0 257 | 25500,1.0 258 | 25600,1.0 259 | 25700,1.0 260 | 25800,1.0 261 | 25900,1.0 262 | 26000,1.0 263 | 26100,1.0 264 | 26200,1.0 265 | 26300,1.0 266 | 26400,1.0 267 | 26500,1.0 268 | 26600,1.0 269 | 26700,1.0 270 | 26800,1.0 271 | 26900,1.0 272 | 27000,1.0 273 | 27100,1.0 274 | 27200,1.0 275 | 27300,1.0 276 | 27400,1.0 277 | 27500,1.0 278 | 27600,1.0 279 | 27700,1.0 280 | 27800,1.0 281 | 27900,1.0 282 | 28000,1.0 283 | 28100,1.0 284 | 28200,1.0 285 | 28300,1.0 286 | 28400,1.0 287 | 28500,1.0 288 | 28600,1.0 289 | 28700,1.0 290 | 28800,1.0 291 | 28900,1.0 292 | 29000,1.0 293 | 29100,1.0 294 | 29200,1.0 295 | 29300,1.0 296 | 29400,1.0 297 | 29500,1.0 298 | 29600,1.0 299 | 29700,1.0 300 | 29800,1.0 301 | 29900,1.0 302 | 30000,1.0 303 | 30100,1.0 304 | 30200,1.0 305 | 30300,1.0 306 | 30400,1.0 307 | 30500,1.0 308 | 30600,1.0 309 | 30700,1.0 310 | 30800,1.0 311 | 30900,1.0 312 | 31000,1.0 313 | 31100,1.0 314 | 31200,1.0 315 | 31300,1.0 316 | 31400,1.0 317 | 31500,1.0 318 | 31600,1.0 319 | 31700,1.0 320 | 31800,1.0 321 | 31900,1.0 322 | 32000,1.0 323 | 32100,1.0 324 | 32200,1.0 325 | 32300,1.0 326 | 32400,1.0 327 | 32500,1.0 328 | 32600,1.0 329 | 32700,1.0 330 | 32800,1.0 331 | 32900,1.0 332 | 33000,1.0 333 | 33100,1.0 334 | 33200,1.0 335 | 33300,1.0 336 | 33400,1.0 337 | 33500,1.0 338 | 33600,1.0 339 | 33700,1.0 340 | 33800,1.0 341 | 33900,1.0 342 | 34000,1.0 343 | 34100,1.0 344 | 34200,1.0 345 | 34300,1.0 346 | 34400,1.0 347 | 34500,1.0 348 | 34600,1.0 349 | 34700,1.0 350 | 34800,1.0 351 | 34900,1.0 352 | 35000,1.0 353 | 35100,1.0 354 | 35200,1.0 355 | 35300,1.0 356 | 35400,1.0 357 | 35500,1.0 358 | 35600,1.0 359 | 35700,1.0 360 | 35800,1.0 361 | 35900,1.0 362 | 36000,1.0 363 | 36100,1.0 364 | 36200,1.0 365 | 36300,1.0 366 | 36400,1.0 367 | 36500,1.0 368 | 36600,1.0 369 | 36700,1.0 370 | 36800,1.0 371 | 36900,1.0 372 | 37000,1.0 373 | 37100,1.0 374 | 37200,1.0 375 | 37300,1.0 376 | 37400,1.0 377 | 37500,1.0 378 | 37600,1.0 379 | 37700,1.0 380 | 37800,1.0 381 | 37900,1.0 382 | 38000,1.0 383 | 38100,1.0 384 | 38200,1.0 385 | 38300,1.0 386 | 38400,1.0 387 | 38500,1.0 388 | 38600,1.0 389 | 38700,1.0 390 | 38800,1.0 391 | 38900,1.0 392 | 39000,1.0 393 | 39100,1.0 394 | 39200,1.0 395 | 39300,1.0 396 | 39400,1.0 397 | 39500,1.0 398 | 39600,1.0 399 | 39700,1.0 400 | 39800,1.0 401 | 39900,1.0 402 | 40000,1.0 403 | 40100,1.0 404 | 40200,1.0 405 | 40300,1.0 406 | 40400,1.0 407 | 40500,1.0 408 | 40600,1.0 409 | 40700,1.0 410 | 40800,1.0 411 | 40900,1.0 412 | 41000,1.0 413 | 41100,1.0 414 | 41200,1.0 415 | 41300,1.0 416 | 41400,1.0 417 | 41500,1.0 418 | 41600,1.0 419 | 41700,1.0 420 | 41800,1.0 421 | 41900,1.0 422 | 42000,1.0 423 | 42100,1.0 424 | 42200,1.0 425 | 42300,1.0 426 | 42400,1.0 427 | 42500,1.0 428 | 42600,1.0 429 | 42700,1.0 430 | 42800,1.0 431 | 42900,1.0 432 | 43000,1.0 433 | 43100,1.0 434 | 43200,1.0 435 | 43300,1.0 436 | 43400,1.0 437 | 43500,1.0 438 | 43600,1.0 439 | 43700,1.0 440 | 43800,1.0 441 | 43900,1.0 442 | 44000,1.0 443 | 44100,1.0 444 | 44200,1.0 445 | 44300,1.0 446 | 44400,1.0 447 | 44500,1.0 448 | 44600,1.0 449 | 44700,1.0 450 | 44800,1.0 451 | 44900,1.0 452 | 45000,1.0 453 | 45100,1.0 454 | 45200,1.0 455 | 45300,1.0 456 | 45400,1.0 457 | 45500,1.0 458 | 45600,1.0 459 | 45700,1.0 460 | 45800,1.0 461 | 45900,1.0 462 | 46000,1.0 463 | 46100,1.0 464 | 46200,1.0 465 | 46300,1.0 466 | 46400,1.0 467 | 46500,1.0 468 | 46600,1.0 469 | 46700,1.0 470 | 46800,1.0 471 | 46900,1.0 472 | 47000,1.0 473 | 47100,1.0 474 | 47200,1.0 475 | 47300,1.0 476 | 47400,1.0 477 | 47500,1.0 478 | 47600,1.0 479 | 47700,1.0 480 | 47800,1.0 481 | 47900,1.0 482 | 48000,1.0 483 | 48100,1.0 484 | 48200,1.0 485 | 48300,1.0 486 | 48400,1.0 487 | 48500,1.0 488 | 48600,1.0 489 | 48700,1.0 490 | 48800,1.0 491 | 48900,1.0 492 | 49000,1.0 493 | 49100,1.0 494 | 49200,1.0 495 | 49300,1.0 496 | 49400,1.0 497 | 49500,1.0 498 | 49600,1.0 499 | 49700,1.0 500 | 49800,1.0 501 | 49900,1.0 502 | 50000,1.0 503 | 50100,1.0 504 | 50200,1.0 505 | 50300,1.0 506 | 50400,1.0 507 | 50500,1.0 508 | 50600,1.0 509 | 50700,1.0 510 | 50800,1.0 511 | 50900,1.0 512 | 51000,1.0 513 | 51100,1.0 514 | 51200,1.0 515 | 51300,1.0 516 | 51400,1.0 517 | 51500,1.0 518 | 51600,1.0 519 | 51700,1.0 520 | 51800,1.0 521 | 51900,1.0 522 | 52000,1.0 523 | 52100,1.0 524 | 52200,1.0 525 | 52300,1.0 526 | 52400,1.0 527 | 52500,1.0 528 | 52600,1.0 529 | 52700,1.0 530 | 52800,1.0 531 | 52900,1.0 532 | 53000,1.0 533 | 53100,1.0 534 | 53200,1.0 535 | 53300,1.0 536 | 53400,1.0 537 | 53500,1.0 538 | 53600,1.0 539 | 53700,1.0 540 | 53800,1.0 541 | 53900,1.0 542 | 54000,1.0 543 | 54100,1.0 544 | 54200,1.0 545 | 54300,1.0 546 | 54400,1.0 547 | 54500,1.0 548 | 54600,1.0 549 | 54700,1.0 550 | 54800,1.0 551 | 54900,1.0 552 | 55000,1.0 553 | 55100,1.0 554 | 55200,1.0 555 | 55300,1.0 556 | 55400,1.0 557 | 55500,1.0 558 | 55600,1.0 559 | 55700,1.0 560 | 55800,1.0 561 | 55900,1.0 562 | 56000,1.0 563 | 56100,1.0 564 | 56200,1.0 565 | 56300,1.0 566 | 56400,1.0 567 | 56500,1.0 568 | 56600,1.0 569 | 56700,1.0 570 | 56800,1.0 571 | 56900,1.0 572 | 57000,1.0 573 | 57100,1.0 574 | 57200,1.0 575 | 57300,1.0 576 | 57400,1.0 577 | 57500,1.0 578 | 57600,1.0 579 | 57700,1.0 580 | 57800,1.0 581 | 57900,1.0 582 | 58000,1.0 583 | 58100,1.0 584 | 58200,1.0 585 | 58300,1.0 586 | 58400,1.0 587 | 58500,1.0 588 | 58600,1.0 589 | 58700,1.0 590 | 58800,1.0 591 | 58900,1.0 592 | 59000,1.0 593 | 59100,1.0 594 | 59200,1.0 595 | 59300,1.0 596 | 59400,1.0 597 | 59500,1.0 598 | 59600,1.0 599 | 59700,1.0 600 | 59800,1.0 601 | 59900,1.0 602 | 60000,1.0 603 | 60100,1.0 604 | 60200,1.0 605 | 60300,1.0 606 | 60400,1.0 607 | 60500,1.0 608 | 60600,1.0 609 | 60700,1.0 610 | 60800,1.0 611 | 60900,1.0 612 | 61000,1.0 613 | 61100,1.0 614 | 61200,1.0 615 | 61300,1.0 616 | 61400,1.0 617 | 61500,1.0 618 | 61600,1.0 619 | 61700,1.0 620 | 61800,1.0 621 | 61900,1.0 622 | 62000,1.0 623 | 62100,1.0 624 | 62200,1.0 625 | 62300,1.0 626 | 62400,1.0 627 | 62500,1.0 628 | 62600,1.0 629 | 62700,1.0 630 | 62800,1.0 631 | 62900,1.0 632 | 63000,1.0 633 | 63100,1.0 634 | 63200,1.0 635 | 63300,1.0 636 | 63400,1.0 637 | 63500,1.0 638 | 63600,1.0 639 | 63700,1.0 640 | 63800,1.0 641 | 63900,1.0 642 | 64000,1.0 643 | 64100,1.0 644 | 64200,1.0 645 | 64300,1.0 646 | 64400,1.0 647 | 64500,1.0 648 | 64600,1.0 649 | 64700,1.0 650 | 64800,1.0 651 | 64900,1.0 652 | 65000,1.0 653 | 65100,1.0 654 | 65200,1.0 655 | 65300,1.0 656 | 65400,1.0 657 | 65500,1.0 658 | 65600,1.0 659 | 65700,1.0 660 | 65800,1.0 661 | 65900,1.0 662 | 66000,1.0 663 | 66100,1.0 664 | 66200,1.0 665 | 66300,1.0 666 | 66400,1.0 667 | 66500,1.0 668 | 66600,1.0 669 | 66700,1.0 670 | 66800,1.0 671 | 66900,1.0 672 | 67000,1.0 673 | 67100,1.0 674 | 67200,1.0 675 | 67300,1.0 676 | 67400,1.0 677 | 67500,1.0 678 | 67600,1.0 679 | 67700,1.0 680 | 67800,1.0 681 | 67900,1.0 682 | 68000,1.0 683 | 68100,1.0 684 | 68200,1.0 685 | 68300,1.0 686 | 68400,1.0 687 | 68500,1.0 688 | 68600,1.0 689 | 68700,1.0 690 | 68800,1.0 691 | 68900,1.0 692 | 69000,1.0 693 | 69100,1.0 694 | 69200,1.0 695 | 69300,1.0 696 | 69400,1.0 697 | 69500,1.0 698 | 69600,1.0 699 | 69700,1.0 700 | 69800,1.0 701 | 69900,1.0 702 | 70000,1.0 703 | 70100,1.0 704 | 70200,1.0 705 | 70300,1.0 706 | 70400,1.0 707 | 70500,1.0 708 | 70600,1.0 709 | 70700,1.0 710 | 70800,1.0 711 | 70900,1.0 712 | 71000,1.0 713 | 71100,1.0 714 | 71200,1.0 715 | 71300,1.0 716 | 71400,1.0 717 | 71500,1.0 718 | 71600,1.0 719 | 71700,1.0 720 | 71800,1.0 721 | 71900,1.0 722 | 72000,1.0 723 | 72100,1.0 724 | 72200,1.0 725 | 72300,1.0 726 | 72400,1.0 727 | 72500,1.0 728 | 72600,1.0 729 | 72700,1.0 730 | 72800,1.0 731 | 72900,1.0 732 | 73000,1.0 733 | 73100,1.0 734 | 73200,1.0 735 | 73300,1.0 736 | 73400,1.0 737 | 73500,1.0 738 | 73600,1.0 739 | 73700,1.0 740 | 73800,1.0 741 | 73900,1.0 742 | 74000,1.0 743 | 74100,1.0 744 | 74200,1.0 745 | 74300,1.0 746 | 74400,1.0 747 | 74500,1.0 748 | 74600,1.0 749 | 74700,1.0 750 | 74800,1.0 751 | 74900,1.0 752 | 75000,1.0 753 | 75100,1.0 754 | 75200,1.0 755 | 75300,1.0 756 | 75400,1.0 757 | 75500,1.0 758 | 75600,1.0 759 | 75700,1.0 760 | 75800,1.0 761 | 75900,1.0 762 | 76000,1.0 763 | 76100,1.0 764 | 76200,1.0 765 | 76300,1.0 766 | 76400,1.0 767 | 76500,1.0 768 | 76600,1.0 769 | 76700,1.0 770 | 76800,1.0 771 | 76900,1.0 772 | 77000,1.0 773 | 77100,1.0 774 | 77200,1.0 775 | 77300,1.0 776 | 77400,1.0 777 | 77500,1.0 778 | 77600,1.0 779 | 77700,1.0 780 | 77800,1.0 781 | 77900,1.0 782 | 78000,1.0 783 | 78100,1.0 784 | 78200,1.0 785 | 78300,1.0 786 | 78400,1.0 787 | 78500,1.0 788 | 78600,1.0 789 | 78700,1.0 790 | 78800,1.0 791 | 78900,1.0 792 | 79000,1.0 793 | 79100,1.0 794 | 79200,1.0 795 | 79300,1.0 796 | 79400,1.0 797 | 79500,1.0 798 | 79600,1.0 799 | 79700,1.0 800 | 79800,1.0 801 | 79900,1.0 802 | 80000,1.0 803 | 80100,1.0 804 | 80200,1.0 805 | 80300,1.0 806 | 80400,1.0 807 | 80500,1.0 808 | 80600,1.0 809 | 80700,1.0 810 | 80800,1.0 811 | 80900,1.0 812 | 81000,1.0 813 | 81100,1.0 814 | 81200,1.0 815 | 81300,1.0 816 | 81400,1.0 817 | 81500,1.0 818 | 81600,1.0 819 | 81700,1.0 820 | 81800,1.0 821 | 81900,1.0 822 | 82000,1.0 823 | 82100,1.0 824 | 82200,1.0 825 | 82300,1.0 826 | 82400,1.0 827 | 82500,1.0 828 | 82600,1.0 829 | 82700,1.0 830 | 82800,1.0 831 | 82900,1.0 832 | 83000,1.0 833 | 83100,1.0 834 | 83200,1.0 835 | 83300,1.0 836 | 83400,1.0 837 | 83500,1.0 838 | 83600,1.0 839 | 83700,1.0 840 | 83800,1.0 841 | 83900,1.0 842 | 84000,1.0 843 | 84100,1.0 844 | 84200,1.0 845 | 84300,1.0 846 | 84400,1.0 847 | 84500,1.0 848 | 84600,1.0 849 | 84700,1.0 850 | 84800,1.0 851 | 84900,1.0 852 | 85000,1.0 853 | 85100,1.0 854 | 85200,1.0 855 | 85300,1.0 856 | 85400,1.0 857 | 85500,1.0 858 | 85600,1.0 859 | 85700,1.0 860 | 85800,1.0 861 | 85900,1.0 862 | 86000,1.0 863 | 86100,1.0 864 | 86200,1.0 865 | 86300,1.0 866 | 86400,1.0 867 | 86500,1.0 868 | 86600,1.0 869 | 86700,1.0 870 | 86800,1.0 871 | 86900,1.0 872 | 87000,1.0 873 | 87100,1.0 874 | 87200,1.0 875 | 87300,1.0 876 | 87400,1.0 877 | 87500,1.0 878 | 87600,1.0 879 | 87700,1.0 880 | 87800,1.0 881 | 87900,1.0 882 | 88000,1.0 883 | 88100,1.0 884 | 88200,1.0 885 | 88300,1.0 886 | 88400,1.0 887 | 88500,1.0 888 | 88600,1.0 889 | 88700,1.0 890 | 88800,1.0 891 | 88900,1.0 892 | 89000,1.0 893 | 89100,1.0 894 | 89200,1.0 895 | 89300,1.0 896 | 89400,1.0 897 | 89500,1.0 898 | 89600,1.0 899 | 89700,1.0 900 | 89800,1.0 901 | 89900,1.0 902 | 90000,1.0 903 | 90100,1.0 904 | 90200,1.0 905 | 90300,1.0 906 | 90400,1.0 907 | 90500,1.0 908 | 90600,1.0 909 | 90700,1.0 910 | 90800,1.0 911 | 90900,1.0 912 | 91000,1.0 913 | 91100,1.0 914 | 91200,1.0 915 | 91300,1.0 916 | 91400,1.0 917 | 91500,1.0 918 | 91600,1.0 919 | 91700,1.0 920 | 91800,1.0 921 | 91900,1.0 922 | 92000,1.0 923 | 92100,1.0 924 | 92200,1.0 925 | 92300,1.0 926 | 92400,1.0 927 | 92500,1.0 928 | 92600,1.0 929 | 92700,1.0 930 | 92800,1.0 931 | 92900,1.0 932 | 93000,1.0 933 | 93100,1.0 934 | 93200,1.0 935 | 93300,1.0 936 | 93400,1.0 937 | 93500,1.0 938 | 93600,1.0 939 | 93700,1.0 940 | 93800,1.0 941 | 93900,1.0 942 | 94000,1.0 943 | 94100,1.0 944 | 94200,1.0 945 | 94300,1.0 946 | 94400,1.0 947 | 94500,1.0 948 | 94600,1.0 949 | 94700,1.0 950 | 94800,1.0 951 | 94900,1.0 952 | 95000,1.0 953 | 95100,1.0 954 | 95200,1.0 955 | 95300,1.0 956 | 95400,1.0 957 | 95500,1.0 958 | 95600,1.0 959 | 95700,1.0 960 | 95800,1.0 961 | 95900,1.0 962 | 96000,1.0 963 | 96100,1.0 964 | 96200,1.0 965 | 96300,1.0 966 | 96400,1.0 967 | 96500,1.0 968 | 96600,1.0 969 | 96700,1.0 970 | 96800,1.0 971 | 96900,1.0 972 | 97000,1.0 973 | 97100,1.0 974 | 97200,1.0 975 | 97300,1.0 976 | 97400,1.0 977 | 97500,1.0 978 | 97600,1.0 979 | 97700,1.0 980 | 97800,1.0 981 | 97900,1.0 982 | 98000,1.0 983 | 98100,1.0 984 | 98200,1.0 985 | 98300,1.0 986 | 98400,1.0 987 | 98500,1.0 988 | 98600,1.0 989 | 98700,1.0 990 | 98800,1.0 991 | 98900,1.0 992 | 99000,1.0 993 | 99100,1.0 994 | 99200,1.0 995 | 99300,1.0 996 | 99400,1.0 997 | 99500,1.0 998 | 99600,1.0 999 | 99700,1.0 1000 | 99800,1.0 1001 | 99900,1.0 1002 | 100000,1.0 1003 | 100100,1.0 1004 | 100200,1.0 1005 | 100300,1.0 1006 | 100400,1.0 1007 | 100500,1.0 1008 | 100600,1.0 1009 | 100700,1.0 1010 | 100800,1.0 1011 | 100900,1.0 1012 | 101000,1.0 1013 | 101100,1.0 1014 | 101200,1.0 1015 | 101300,1.0 1016 | 101400,1.0 1017 | 101500,1.0 1018 | 101600,1.0 1019 | 101700,1.0 1020 | 101800,1.0 1021 | 101900,1.0 1022 | 102000,1.0 1023 | 102100,1.0 1024 | 102200,1.0 1025 | 102300,1.0 1026 | 102400,1.0 1027 | 102500,1.0 1028 | 102600,1.0 1029 | 102700,1.0 1030 | 102800,1.0 1031 | 102900,1.0 1032 | 103000,1.0 1033 | 103100,1.0 1034 | 103200,1.0 1035 | 103300,1.0 1036 | 103400,1.0 1037 | 103500,1.0 1038 | 103600,1.0 1039 | 103700,1.0 1040 | 103800,1.0 1041 | 103900,1.0 1042 | 104000,1.0 1043 | 104100,1.0 1044 | 104200,1.0 1045 | 104300,1.0 1046 | 104400,1.0 1047 | 104500,1.0 1048 | 104600,1.0 1049 | 104700,1.0 1050 | 104800,1.0 1051 | 104900,1.0 1052 | 105000,1.0 1053 | 105100,1.0 1054 | 105200,1.0 1055 | 105300,1.0 1056 | 105400,1.0 1057 | 105500,1.0 1058 | 105600,1.0 1059 | 105700,1.0 1060 | 105800,1.0 1061 | 105900,1.0 1062 | 106000,1.0 1063 | 106100,1.0 1064 | 106200,1.0 1065 | 106300,1.0 1066 | 106400,1.0 1067 | 106500,1.0 1068 | 106600,1.0 1069 | 106700,1.0 1070 | 106800,1.0 1071 | 106900,1.0 1072 | 107000,1.0 1073 | 107100,1.0 1074 | 107200,1.0 1075 | -------------------------------------------------------------------------------- /results/mnist/3_iter_val_acc.csv: -------------------------------------------------------------------------------- 1 | step,val_acc 2 | 0,0.0961538461538 3 | 500,0.987980769231 4 | 1000,0.991586538462 5 | 1500,0.99358974359 6 | 2000,0.993790064103 7 | 2500,0.994991987179 8 | 3000,0.994991987179 9 | 3500,0.994391025641 10 | 4000,0.994991987179 11 | 4500,0.994791666667 12 | 5000,0.995192307692 13 | 5500,0.996394230769 14 | 6000,0.994190705128 15 | 6500,0.994991987179 16 | 7000,0.995592948718 17 | 7500,0.994991987179 18 | 8000,0.994991987179 19 | 8500,0.994991987179 20 | 9000,0.994391025641 21 | 9500,0.994791666667 22 | 10000,0.994591346154 23 | 10500,0.994991987179 24 | 11000,0.994190705128 25 | 11500,0.994991987179 26 | 12000,0.995592948718 27 | 12500,0.995192307692 28 | 13000,0.995192307692 29 | 13500,0.995192307692 30 | 14000,0.995392628205 31 | 14500,0.995592948718 32 | 15000,0.994591346154 33 | 15500,0.994591346154 34 | 16000,0.994991987179 35 | 16500,0.995192307692 36 | 17000,0.995392628205 37 | 17500,0.995192307692 38 | 18000,0.995592948718 39 | 18500,0.995192307692 40 | 19000,0.994791666667 41 | 19500,0.994991987179 42 | 20000,0.994991987179 43 | 20500,0.994591346154 44 | 21000,0.995192307692 45 | 21500,0.994190705128 46 | 22000,0.994791666667 47 | 22500,0.995192307692 48 | 23000,0.994991987179 49 | 23500,0.995192307692 50 | 24000,0.994791666667 51 | 24500,0.995192307692 52 | 25000,0.994791666667 53 | 25500,0.993990384615 54 | 26000,0.994591346154 55 | 26500,0.994991987179 56 | 27000,0.994391025641 57 | 27500,0.994591346154 58 | 28000,0.994391025641 59 | 28500,0.994591346154 60 | 29000,0.994791666667 61 | 29500,0.995192307692 62 | 30000,0.994991987179 63 | 30500,0.994591346154 64 | 31000,0.994991987179 65 | 31500,0.995993589744 66 | 32000,0.994791666667 67 | 32500,0.995192307692 68 | 33000,0.995392628205 69 | 33500,0.994791666667 70 | 34000,0.994591346154 71 | 34500,0.994791666667 72 | 35000,0.995592948718 73 | 35500,0.994991987179 74 | 36000,0.994391025641 75 | 36500,0.995192307692 76 | 37000,0.994991987179 77 | 37500,0.994991987179 78 | 38000,0.995592948718 79 | 38500,0.994791666667 80 | 39000,0.994591346154 81 | 39500,0.994791666667 82 | 40000,0.994591346154 83 | 40500,0.994791666667 84 | 41000,0.994791666667 85 | 41500,0.994591346154 86 | 42000,0.994190705128 87 | 42500,0.994591346154 88 | 43000,0.995192307692 89 | 43500,0.995192307692 90 | 44000,0.994591346154 91 | 44500,0.995192307692 92 | 45000,0.994591346154 93 | 45500,0.994991987179 94 | 46000,0.994791666667 95 | 46500,0.994190705128 96 | 47000,0.994791666667 97 | 47500,0.994591346154 98 | 48000,0.994591346154 99 | 48500,0.994991987179 100 | 49000,0.994991987179 101 | 49500,0.994991987179 102 | 50000,0.994791666667 103 | 50500,0.994791666667 104 | 51000,0.994391025641 105 | 51500,0.994391025641 106 | 52000,0.994791666667 107 | 52500,0.994791666667 108 | 53000,0.994591346154 109 | 53500,0.994791666667 110 | 54000,0.995592948718 111 | 54500,0.994791666667 112 | 55000,0.995392628205 113 | 55500,0.994991987179 114 | 56000,0.995392628205 115 | 56500,0.994991987179 116 | 57000,0.995192307692 117 | 57500,0.994991987179 118 | 58000,0.994991987179 119 | 58500,0.994591346154 120 | 59000,0.995392628205 121 | 59500,0.994391025641 122 | 60000,0.995392628205 123 | 60500,0.995192307692 124 | 61000,0.995192307692 125 | 61500,0.994991987179 126 | 62000,0.994791666667 127 | 62500,0.994791666667 128 | 63000,0.994591346154 129 | 63500,0.994591346154 130 | 64000,0.994791666667 131 | 64500,0.994791666667 132 | 65000,0.994991987179 133 | 65500,0.994991987179 134 | 66000,0.995192307692 135 | 66500,0.993990384615 136 | 67000,0.994991987179 137 | 67500,0.994391025641 138 | 68000,0.994991987179 139 | 68500,0.994991987179 140 | 69000,0.995592948718 141 | 69500,0.995793269231 142 | 70000,0.994791666667 143 | 70500,0.994991987179 144 | 71000,0.995392628205 145 | 71500,0.995192307692 146 | 72000,0.994391025641 147 | 72500,0.994591346154 148 | 73000,0.994991987179 149 | 73500,0.994391025641 150 | 74000,0.994991987179 151 | 74500,0.994991987179 152 | 75000,0.994791666667 153 | 75500,0.994791666667 154 | 76000,0.994991987179 155 | 76500,0.994591346154 156 | 77000,0.994791666667 157 | 77500,0.994991987179 158 | 78000,0.994991987179 159 | 78500,0.994391025641 160 | 79000,0.994190705128 161 | 79500,0.994791666667 162 | 80000,0.994791666667 163 | 80500,0.994791666667 164 | 81000,0.994391025641 165 | 81500,0.994791666667 166 | 82000,0.994791666667 167 | 82500,0.995192307692 168 | 83000,0.994391025641 169 | 83500,0.994791666667 170 | 84000,0.994190705128 171 | 84500,0.995392628205 172 | 85000,0.994791666667 173 | 85500,0.995192307692 174 | 86000,0.995192307692 175 | 86500,0.995192307692 176 | 87000,0.994991987179 177 | 87500,0.994791666667 178 | 88000,0.995192307692 179 | 88500,0.994591346154 180 | 89000,0.994591346154 181 | 89500,0.994190705128 182 | 90000,0.994591346154 183 | 90500,0.994391025641 184 | 91000,0.994391025641 185 | 91500,0.993990384615 186 | 92000,0.993990384615 187 | 92500,0.994391025641 188 | 93000,0.993790064103 189 | 93500,0.994391025641 190 | 94000,0.995392628205 191 | 94500,0.994591346154 192 | 95000,0.994791666667 193 | 95500,0.994991987179 194 | 96000,0.994791666667 195 | 96500,0.994391025641 196 | 97000,0.994391025641 197 | 97500,0.994391025641 198 | 98000,0.994391025641 199 | 98500,0.994391025641 200 | 99000,0.994791666667 201 | 99500,0.993990384615 202 | 100000,0.994791666667 203 | 100500,0.994190705128 204 | 101000,0.994591346154 205 | 101500,0.994391025641 206 | 102000,0.994991987179 207 | 102500,0.994791666667 208 | 103000,0.994591346154 209 | 103500,0.994391025641 210 | 104000,0.994391025641 211 | 104500,0.994991987179 212 | 105000,0.994991987179 213 | 105500,0.994591346154 214 | 106000,0.994391025641 215 | 106500,0.994190705128 216 | 107000,0.994190705128 217 | -------------------------------------------------------------------------------- /results/mnist/4_iter_train_acc.csv: -------------------------------------------------------------------------------- 1 | step,train_acc 2 | 0,0.109375 3 | 100,0.9453125 4 | 200,0.9609375 5 | 300,0.9765625 6 | 400,0.984375 7 | 500,0.96875 8 | 600,1.0 9 | 700,0.9765625 10 | 800,0.9921875 11 | 900,0.9765625 12 | 1000,0.9921875 13 | 1100,1.0 14 | 1200,1.0 15 | 1300,0.9921875 16 | 1400,1.0 17 | 1500,1.0 18 | 1600,0.9921875 19 | 1700,1.0 20 | 1800,0.9921875 21 | 1900,1.0 22 | 2000,0.984375 23 | 2100,1.0 24 | 2200,0.9921875 25 | 2300,0.9921875 26 | 2400,1.0 27 | 2500,1.0 28 | 2600,1.0 29 | 2700,1.0 30 | 2800,0.9921875 31 | 2900,1.0 32 | 3000,1.0 33 | 3100,1.0 34 | 3200,1.0 35 | 3300,1.0 36 | 3400,1.0 37 | 3500,1.0 38 | 3600,1.0 39 | 3700,0.9921875 40 | 3800,1.0 41 | 3900,1.0 42 | 4000,1.0 43 | 4100,1.0 44 | 4200,1.0 45 | 4300,1.0 46 | 4400,1.0 47 | 4500,1.0 48 | 4600,1.0 49 | 4700,1.0 50 | 4800,1.0 51 | 4900,1.0 52 | 5000,1.0 53 | 5100,1.0 54 | 5200,1.0 55 | 5300,1.0 56 | 5400,1.0 57 | 5500,1.0 58 | 5600,1.0 59 | 5700,1.0 60 | 5800,1.0 61 | 5900,1.0 62 | 6000,1.0 63 | 6100,1.0 64 | 6200,1.0 65 | 6300,1.0 66 | 6400,1.0 67 | 6500,1.0 68 | 6600,1.0 69 | 6700,1.0 70 | 6800,1.0 71 | 6900,1.0 72 | 7000,1.0 73 | 7100,1.0 74 | 7200,1.0 75 | 7300,0.9921875 76 | 7400,1.0 77 | 7500,1.0 78 | 7600,1.0 79 | 7700,0.9921875 80 | 7800,1.0 81 | 7900,1.0 82 | 8000,1.0 83 | 8100,1.0 84 | 8200,1.0 85 | 8300,1.0 86 | 8400,1.0 87 | 8500,1.0 88 | 8600,1.0 89 | 8700,1.0 90 | 8800,1.0 91 | 8900,1.0 92 | 9000,1.0 93 | 9100,1.0 94 | 9200,1.0 95 | 9300,1.0 96 | 9400,1.0 97 | 9500,1.0 98 | 9600,1.0 99 | 9700,1.0 100 | 9800,1.0 101 | 9900,1.0 102 | 10000,1.0 103 | 10100,1.0 104 | 10200,1.0 105 | 10300,1.0 106 | 10400,1.0 107 | 10500,1.0 108 | 10600,1.0 109 | 10700,1.0 110 | 10800,1.0 111 | 10900,1.0 112 | 11000,1.0 113 | 11100,1.0 114 | 11200,1.0 115 | 11300,1.0 116 | 11400,1.0 117 | 11500,1.0 118 | 11600,1.0 119 | 11700,1.0 120 | 11800,1.0 121 | 11900,1.0 122 | 12000,1.0 123 | 12100,1.0 124 | 12200,1.0 125 | 12300,1.0 126 | 12400,1.0 127 | 12500,1.0 128 | 12600,1.0 129 | 12700,1.0 130 | 12800,1.0 131 | 12900,1.0 132 | 13000,1.0 133 | 13100,1.0 134 | 13200,1.0 135 | 13300,1.0 136 | 13400,1.0 137 | 13500,1.0 138 | 13600,1.0 139 | 13700,1.0 140 | 13800,1.0 141 | 13900,1.0 142 | 14000,1.0 143 | 14100,1.0 144 | 14200,1.0 145 | 14300,1.0 146 | 14400,1.0 147 | 14500,1.0 148 | 14600,1.0 149 | 14700,1.0 150 | 14800,1.0 151 | 14900,1.0 152 | 15000,1.0 153 | 15100,1.0 154 | 15200,1.0 155 | 15300,1.0 156 | 15400,1.0 157 | 15500,1.0 158 | 15600,1.0 159 | 15700,1.0 160 | 15800,1.0 161 | 15900,1.0 162 | 16000,1.0 163 | 16100,1.0 164 | 16200,1.0 165 | 16300,1.0 166 | 16400,1.0 167 | 16500,1.0 168 | 16600,1.0 169 | 16700,1.0 170 | 16800,1.0 171 | 16900,1.0 172 | 17000,1.0 173 | 17100,1.0 174 | 17200,1.0 175 | 17300,1.0 176 | 17400,1.0 177 | 17500,1.0 178 | 17600,1.0 179 | 17700,1.0 180 | 17800,1.0 181 | 17900,1.0 182 | 18000,1.0 183 | 18100,1.0 184 | 18200,1.0 185 | 18300,0.9921875 186 | 18400,1.0 187 | 18500,1.0 188 | 18600,1.0 189 | 18700,1.0 190 | 18800,1.0 191 | 18900,1.0 192 | 19000,1.0 193 | 19100,1.0 194 | 19200,1.0 195 | 19300,1.0 196 | 19400,1.0 197 | 19500,1.0 198 | 19600,1.0 199 | 19700,1.0 200 | 19800,1.0 201 | 19900,1.0 202 | 20000,1.0 203 | 20100,1.0 204 | 20200,1.0 205 | 20300,1.0 206 | 20400,1.0 207 | 20500,1.0 208 | 20600,1.0 209 | 20700,1.0 210 | 20800,1.0 211 | 20900,1.0 212 | 21000,1.0 213 | 21100,1.0 214 | 21200,1.0 215 | 21300,1.0 216 | 21400,1.0 217 | 21500,1.0 218 | 21600,1.0 219 | 21700,1.0 220 | 21800,1.0 221 | 21900,1.0 222 | 22000,1.0 223 | 22100,1.0 224 | 22200,1.0 225 | 22300,1.0 226 | 22400,1.0 227 | 22500,1.0 228 | 22600,1.0 229 | 22700,1.0 230 | 22800,1.0 231 | 22900,1.0 232 | 23000,1.0 233 | 23100,1.0 234 | 23200,1.0 235 | 23300,1.0 236 | 23400,1.0 237 | 23500,1.0 238 | 23600,1.0 239 | 23700,1.0 240 | 23800,1.0 241 | 23900,1.0 242 | 24000,1.0 243 | 24100,1.0 244 | 24200,1.0 245 | 24300,1.0 246 | 24400,1.0 247 | 24500,1.0 248 | 24600,1.0 249 | 24700,1.0 250 | 24800,1.0 251 | 24900,1.0 252 | 25000,1.0 253 | 25100,1.0 254 | 25200,1.0 255 | 25300,1.0 256 | 25400,1.0 257 | 25500,1.0 258 | 25600,1.0 259 | 25700,1.0 260 | 25800,1.0 261 | 25900,1.0 262 | 26000,1.0 263 | 26100,1.0 264 | 26200,1.0 265 | 26300,1.0 266 | 26400,1.0 267 | 26500,1.0 268 | 26600,1.0 269 | 26700,1.0 270 | 26800,1.0 271 | 26900,1.0 272 | 27000,1.0 273 | 27100,1.0 274 | 27200,1.0 275 | 27300,1.0 276 | 27400,1.0 277 | 27500,1.0 278 | 27600,1.0 279 | 27700,1.0 280 | 27800,1.0 281 | 27900,1.0 282 | 28000,1.0 283 | 28100,1.0 284 | 28200,1.0 285 | 28300,1.0 286 | 28400,1.0 287 | 28500,1.0 288 | 28600,1.0 289 | 28700,1.0 290 | 28800,1.0 291 | 28900,1.0 292 | 29000,1.0 293 | 29100,1.0 294 | 29200,1.0 295 | 29300,1.0 296 | 29400,1.0 297 | 29500,1.0 298 | 29600,1.0 299 | 29700,1.0 300 | 29800,1.0 301 | 29900,1.0 302 | 30000,1.0 303 | 30100,1.0 304 | 30200,1.0 305 | 30300,1.0 306 | 30400,1.0 307 | 30500,1.0 308 | 30600,1.0 309 | 30700,1.0 310 | 30800,1.0 311 | 30900,1.0 312 | 31000,1.0 313 | 31100,1.0 314 | 31200,1.0 315 | 31300,1.0 316 | 31400,1.0 317 | 31500,1.0 318 | 31600,1.0 319 | 31700,1.0 320 | 31800,1.0 321 | 31900,1.0 322 | 32000,1.0 323 | 32100,1.0 324 | 32200,1.0 325 | 32300,1.0 326 | 32400,1.0 327 | 32500,1.0 328 | 32600,1.0 329 | 32700,1.0 330 | 32800,1.0 331 | 32900,1.0 332 | 33000,1.0 333 | 33100,1.0 334 | 33200,1.0 335 | 33300,1.0 336 | 33400,1.0 337 | 33500,1.0 338 | 33600,1.0 339 | 33700,1.0 340 | 33800,1.0 341 | 33900,1.0 342 | 34000,1.0 343 | 34100,1.0 344 | 34200,1.0 345 | 34300,1.0 346 | 34400,1.0 347 | 34500,1.0 348 | 34600,1.0 349 | 34700,1.0 350 | 34800,1.0 351 | 34900,1.0 352 | 35000,1.0 353 | 35100,1.0 354 | 35200,1.0 355 | 35300,1.0 356 | 35400,1.0 357 | 35500,1.0 358 | 35600,1.0 359 | 35700,1.0 360 | 35800,1.0 361 | 35900,1.0 362 | 36000,1.0 363 | 36100,1.0 364 | 36200,1.0 365 | 36300,1.0 366 | 36400,1.0 367 | 36500,1.0 368 | 36600,1.0 369 | 36700,1.0 370 | 36800,1.0 371 | 36900,1.0 372 | 37000,1.0 373 | 37100,1.0 374 | 37200,1.0 375 | 37300,1.0 376 | 37400,1.0 377 | 37500,1.0 378 | 37600,1.0 379 | 37700,1.0 380 | 37800,1.0 381 | 37900,1.0 382 | 38000,1.0 383 | 38100,1.0 384 | 38200,1.0 385 | 38300,1.0 386 | 38400,1.0 387 | 38500,1.0 388 | 38600,1.0 389 | 38700,1.0 390 | 38800,1.0 391 | 38900,1.0 392 | 39000,1.0 393 | 39100,1.0 394 | 39200,1.0 395 | 39300,1.0 396 | 39400,1.0 397 | 39500,1.0 398 | 39600,1.0 399 | 39700,1.0 400 | 39800,1.0 401 | 39900,1.0 402 | 40000,1.0 403 | 40100,1.0 404 | 40200,1.0 405 | 40300,1.0 406 | 40400,1.0 407 | 40500,1.0 408 | 40600,1.0 409 | 40700,1.0 410 | 40800,1.0 411 | 40900,1.0 412 | 41000,1.0 413 | 41100,1.0 414 | 41200,1.0 415 | 41300,1.0 416 | 41400,1.0 417 | 41500,1.0 418 | 41600,1.0 419 | 41700,1.0 420 | 41800,1.0 421 | 41900,1.0 422 | 42000,1.0 423 | 42100,1.0 424 | 42200,1.0 425 | 42300,1.0 426 | 42400,1.0 427 | 42500,1.0 428 | 42600,1.0 429 | 42700,1.0 430 | 42800,1.0 431 | 42900,1.0 432 | 43000,1.0 433 | 43100,1.0 434 | 43200,1.0 435 | 43300,1.0 436 | 43400,1.0 437 | 43500,1.0 438 | 43600,1.0 439 | 43700,1.0 440 | 43800,1.0 441 | 43900,1.0 442 | 44000,1.0 443 | 44100,1.0 444 | 44200,1.0 445 | 44300,1.0 446 | 44400,1.0 447 | 44500,1.0 448 | 44600,1.0 449 | 44700,1.0 450 | 44800,1.0 451 | 44900,1.0 452 | 45000,1.0 453 | 45100,1.0 454 | 45200,1.0 455 | 45300,1.0 456 | 45400,1.0 457 | 45500,1.0 458 | 45600,1.0 459 | 45700,1.0 460 | 45800,1.0 461 | 45900,1.0 462 | 46000,1.0 463 | 46100,1.0 464 | 46200,1.0 465 | 46300,1.0 466 | 46400,1.0 467 | 46500,1.0 468 | 46600,1.0 469 | 46700,1.0 470 | 46800,1.0 471 | 46900,1.0 472 | 47000,1.0 473 | 47100,1.0 474 | 47200,1.0 475 | 47300,1.0 476 | 47400,1.0 477 | 47500,1.0 478 | 47600,1.0 479 | 47700,1.0 480 | 47800,1.0 481 | 47900,1.0 482 | 48000,1.0 483 | 48100,1.0 484 | 48200,1.0 485 | 48300,1.0 486 | 48400,1.0 487 | 48500,1.0 488 | 48600,1.0 489 | 48700,1.0 490 | 48800,1.0 491 | 48900,1.0 492 | 49000,1.0 493 | 49100,1.0 494 | 49200,1.0 495 | 49300,1.0 496 | 49400,1.0 497 | 49500,1.0 498 | 49600,1.0 499 | 49700,1.0 500 | 49800,1.0 501 | 49900,1.0 502 | 50000,1.0 503 | 50100,1.0 504 | 50200,1.0 505 | 50300,1.0 506 | 50400,1.0 507 | 50500,1.0 508 | 50600,1.0 509 | 50700,1.0 510 | 50800,1.0 511 | 50900,1.0 512 | 51000,1.0 513 | 51100,1.0 514 | 51200,1.0 515 | 51300,1.0 516 | 51400,1.0 517 | 51500,1.0 518 | 51600,1.0 519 | 51700,1.0 520 | 51800,1.0 521 | 51900,1.0 522 | 52000,1.0 523 | 52100,1.0 524 | 52200,1.0 525 | 52300,1.0 526 | 52400,1.0 527 | 52500,1.0 528 | 52600,1.0 529 | 52700,1.0 530 | 52800,1.0 531 | 52900,1.0 532 | 53000,1.0 533 | 53100,1.0 534 | 53200,1.0 535 | 53300,1.0 536 | 53400,1.0 537 | 53500,1.0 538 | 53600,1.0 539 | 53700,1.0 540 | 53800,1.0 541 | 53900,1.0 542 | 54000,1.0 543 | 54100,1.0 544 | 54200,1.0 545 | 54300,1.0 546 | 54400,1.0 547 | 54500,1.0 548 | 54600,1.0 549 | 54700,1.0 550 | 54800,1.0 551 | 54900,1.0 552 | 55000,1.0 553 | 55100,1.0 554 | 55200,1.0 555 | 55300,1.0 556 | 55400,1.0 557 | 55500,1.0 558 | 55600,1.0 559 | 55700,1.0 560 | 55800,1.0 561 | 55900,1.0 562 | 56000,1.0 563 | 56100,1.0 564 | 56200,1.0 565 | 56300,1.0 566 | 56400,1.0 567 | 56500,1.0 568 | 56600,1.0 569 | 56700,1.0 570 | 56800,1.0 571 | 56900,1.0 572 | 57000,1.0 573 | 57100,1.0 574 | 57200,1.0 575 | 57300,1.0 576 | 57400,1.0 577 | 57500,1.0 578 | 57600,1.0 579 | 57700,1.0 580 | 57800,1.0 581 | 57900,1.0 582 | 58000,1.0 583 | 58100,1.0 584 | 58200,1.0 585 | 58300,1.0 586 | 58400,1.0 587 | 58500,1.0 588 | 58600,1.0 589 | 58700,1.0 590 | 58800,1.0 591 | 58900,1.0 592 | 59000,1.0 593 | 59100,1.0 594 | 59200,1.0 595 | 59300,1.0 596 | 59400,1.0 597 | 59500,1.0 598 | 59600,1.0 599 | 59700,1.0 600 | 59800,1.0 601 | 59900,1.0 602 | 60000,1.0 603 | 60100,1.0 604 | 60200,1.0 605 | 60300,1.0 606 | 60400,1.0 607 | 60500,1.0 608 | 60600,1.0 609 | 60700,1.0 610 | 60800,1.0 611 | 60900,1.0 612 | 61000,1.0 613 | 61100,1.0 614 | 61200,1.0 615 | 61300,1.0 616 | 61400,1.0 617 | 61500,1.0 618 | 61600,1.0 619 | 61700,1.0 620 | 61800,1.0 621 | 61900,1.0 622 | 62000,1.0 623 | 62100,1.0 624 | 62200,1.0 625 | 62300,1.0 626 | 62400,1.0 627 | 62500,1.0 628 | 62600,1.0 629 | 62700,1.0 630 | 62800,1.0 631 | 62900,1.0 632 | 63000,1.0 633 | 63100,1.0 634 | 63200,1.0 635 | 63300,1.0 636 | 63400,1.0 637 | 63500,1.0 638 | 63600,1.0 639 | 63700,1.0 640 | 63800,1.0 641 | 63900,1.0 642 | 64000,1.0 643 | 64100,1.0 644 | 64200,1.0 645 | 64300,1.0 646 | 64400,1.0 647 | 64500,1.0 648 | 64600,1.0 649 | 64700,1.0 650 | 64800,1.0 651 | 64900,1.0 652 | 65000,1.0 653 | 65100,1.0 654 | 65200,1.0 655 | 65300,1.0 656 | 65400,1.0 657 | 65500,1.0 658 | 65600,1.0 659 | 65700,1.0 660 | 65800,1.0 661 | 65900,1.0 662 | 66000,1.0 663 | 66100,1.0 664 | 66200,1.0 665 | 66300,1.0 666 | 66400,1.0 667 | 66500,1.0 668 | 66600,1.0 669 | 66700,1.0 670 | 66800,1.0 671 | 66900,1.0 672 | 67000,1.0 673 | 67100,1.0 674 | 67200,1.0 675 | 67300,1.0 676 | 67400,1.0 677 | 67500,1.0 678 | 67600,1.0 679 | 67700,1.0 680 | 67800,1.0 681 | 67900,1.0 682 | 68000,1.0 683 | 68100,1.0 684 | 68200,1.0 685 | 68300,1.0 686 | 68400,1.0 687 | 68500,1.0 688 | 68600,1.0 689 | 68700,1.0 690 | 68800,1.0 691 | 68900,1.0 692 | 69000,1.0 693 | 69100,1.0 694 | 69200,1.0 695 | 69300,1.0 696 | 69400,1.0 697 | 69500,1.0 698 | 69600,1.0 699 | 69700,1.0 700 | 69800,1.0 701 | 69900,1.0 702 | 70000,1.0 703 | 70100,1.0 704 | 70200,1.0 705 | 70300,1.0 706 | 70400,1.0 707 | 70500,1.0 708 | 70600,1.0 709 | 70700,1.0 710 | 70800,1.0 711 | 70900,1.0 712 | 71000,1.0 713 | 71100,1.0 714 | 71200,1.0 715 | 71300,1.0 716 | 71400,1.0 717 | 71500,1.0 718 | 71600,1.0 719 | 71700,1.0 720 | 71800,1.0 721 | 71900,1.0 722 | 72000,1.0 723 | 72100,1.0 724 | 72200,1.0 725 | 72300,1.0 726 | 72400,1.0 727 | 72500,1.0 728 | 72600,1.0 729 | 72700,1.0 730 | 72800,1.0 731 | 72900,1.0 732 | 73000,1.0 733 | 73100,1.0 734 | 73200,1.0 735 | 73300,1.0 736 | 73400,1.0 737 | 73500,1.0 738 | 73600,1.0 739 | 73700,1.0 740 | 73800,1.0 741 | 73900,1.0 742 | 74000,1.0 743 | 74100,1.0 744 | 74200,1.0 745 | 74300,1.0 746 | 74400,1.0 747 | 74500,1.0 748 | 74600,1.0 749 | 74700,1.0 750 | 74800,1.0 751 | 74900,1.0 752 | 75000,1.0 753 | 75100,1.0 754 | 75200,1.0 755 | 75300,1.0 756 | 75400,1.0 757 | 75500,1.0 758 | 75600,1.0 759 | 75700,1.0 760 | 75800,1.0 761 | 75900,1.0 762 | 76000,1.0 763 | 76100,1.0 764 | 76200,1.0 765 | 76300,1.0 766 | 76400,1.0 767 | 76500,1.0 768 | 76600,1.0 769 | 76700,1.0 770 | 76800,1.0 771 | 76900,1.0 772 | 77000,1.0 773 | 77100,1.0 774 | 77200,1.0 775 | 77300,1.0 776 | 77400,1.0 777 | 77500,1.0 778 | 77600,1.0 779 | 77700,1.0 780 | 77800,1.0 781 | 77900,1.0 782 | 78000,1.0 783 | 78100,1.0 784 | 78200,1.0 785 | 78300,1.0 786 | 78400,1.0 787 | 78500,1.0 788 | 78600,1.0 789 | 78700,1.0 790 | 78800,1.0 791 | 78900,1.0 792 | 79000,1.0 793 | 79100,1.0 794 | 79200,1.0 795 | 79300,1.0 796 | 79400,1.0 797 | 79500,1.0 798 | 79600,1.0 799 | 79700,1.0 800 | 79800,1.0 801 | 79900,1.0 802 | 80000,1.0 803 | 80100,1.0 804 | 80200,1.0 805 | 80300,1.0 806 | 80400,1.0 807 | 80500,1.0 808 | 80600,1.0 809 | 80700,1.0 810 | 80800,1.0 811 | 80900,1.0 812 | 81000,1.0 813 | 81100,1.0 814 | 81200,1.0 815 | 81300,1.0 816 | 81400,1.0 817 | 81500,1.0 818 | 81600,1.0 819 | 81700,1.0 820 | 81800,1.0 821 | 81900,1.0 822 | 82000,1.0 823 | 82100,1.0 824 | 82200,1.0 825 | 82300,1.0 826 | 82400,1.0 827 | 82500,1.0 828 | 82600,1.0 829 | 82700,1.0 830 | 82800,1.0 831 | 82900,1.0 832 | 83000,1.0 833 | 83100,1.0 834 | 83200,1.0 835 | 83300,1.0 836 | 83400,1.0 837 | 83500,1.0 838 | 83600,1.0 839 | 83700,1.0 840 | 83800,1.0 841 | 83900,1.0 842 | 84000,1.0 843 | 84100,1.0 844 | 84200,1.0 845 | 84300,1.0 846 | 84400,1.0 847 | 84500,1.0 848 | 84600,1.0 849 | 84700,1.0 850 | 84800,1.0 851 | 84900,1.0 852 | 85000,1.0 853 | 85100,1.0 854 | 85200,1.0 855 | 85300,1.0 856 | 85400,1.0 857 | 85500,1.0 858 | 85600,1.0 859 | 85700,1.0 860 | 85800,1.0 861 | 85900,1.0 862 | 86000,1.0 863 | 86100,1.0 864 | 86200,1.0 865 | 86300,1.0 866 | 86400,1.0 867 | 86500,1.0 868 | 86600,1.0 869 | 86700,1.0 870 | 86800,1.0 871 | 86900,1.0 872 | 87000,1.0 873 | 87100,1.0 874 | 87200,1.0 875 | 87300,1.0 876 | 87400,1.0 877 | 87500,1.0 878 | 87600,1.0 879 | 87700,1.0 880 | 87800,1.0 881 | 87900,1.0 882 | 88000,1.0 883 | 88100,1.0 884 | 88200,1.0 885 | 88300,1.0 886 | 88400,1.0 887 | 88500,1.0 888 | 88600,1.0 889 | 88700,1.0 890 | 88800,1.0 891 | 88900,1.0 892 | 89000,1.0 893 | 89100,1.0 894 | 89200,1.0 895 | 89300,1.0 896 | 89400,1.0 897 | 89500,1.0 898 | 89600,1.0 899 | 89700,1.0 900 | 89800,1.0 901 | 89900,1.0 902 | 90000,1.0 903 | 90100,1.0 904 | 90200,1.0 905 | 90300,1.0 906 | 90400,1.0 907 | 90500,1.0 908 | 90600,1.0 909 | 90700,1.0 910 | 90800,1.0 911 | 90900,1.0 912 | 91000,1.0 913 | 91100,1.0 914 | 91200,1.0 915 | 91300,1.0 916 | 91400,1.0 917 | 91500,1.0 918 | 91600,1.0 919 | 91700,1.0 920 | 91800,1.0 921 | 91900,1.0 922 | 92000,1.0 923 | 92100,1.0 924 | 92200,1.0 925 | 92300,1.0 926 | 92400,1.0 927 | 92500,1.0 928 | 92600,1.0 929 | 92700,1.0 930 | 92800,1.0 931 | 92900,1.0 932 | 93000,1.0 933 | 93100,1.0 934 | 93200,1.0 935 | 93300,1.0 936 | 93400,1.0 937 | 93500,1.0 938 | 93600,1.0 939 | 93700,1.0 940 | 93800,1.0 941 | 93900,1.0 942 | 94000,1.0 943 | 94100,1.0 944 | 94200,1.0 945 | 94300,1.0 946 | 94400,1.0 947 | 94500,1.0 948 | 94600,1.0 949 | 94700,1.0 950 | 94800,1.0 951 | 94900,1.0 952 | 95000,1.0 953 | 95100,1.0 954 | 95200,1.0 955 | 95300,1.0 956 | 95400,1.0 957 | 95500,1.0 958 | 95600,1.0 959 | 95700,1.0 960 | 95800,1.0 961 | 95900,1.0 962 | 96000,1.0 963 | 96100,1.0 964 | 96200,1.0 965 | 96300,1.0 966 | 96400,1.0 967 | 96500,1.0 968 | 96600,1.0 969 | 96700,1.0 970 | 96800,1.0 971 | 96900,1.0 972 | 97000,1.0 973 | 97100,1.0 974 | 97200,1.0 975 | 97300,1.0 976 | 97400,1.0 977 | 97500,1.0 978 | 97600,1.0 979 | 97700,1.0 980 | 97800,1.0 981 | 97900,1.0 982 | 98000,1.0 983 | 98100,1.0 984 | 98200,1.0 985 | 98300,1.0 986 | 98400,1.0 987 | 98500,1.0 988 | 98600,1.0 989 | 98700,1.0 990 | 98800,1.0 991 | 98900,1.0 992 | 99000,1.0 993 | 99100,1.0 994 | 99200,1.0 995 | 99300,1.0 996 | 99400,1.0 997 | 99500,1.0 998 | 99600,1.0 999 | 99700,1.0 1000 | 99800,1.0 1001 | 99900,1.0 1002 | 100000,1.0 1003 | 100100,1.0 1004 | 100200,1.0 1005 | 100300,1.0 1006 | 100400,1.0 1007 | 100500,1.0 1008 | 100600,1.0 1009 | 100700,1.0 1010 | 100800,1.0 1011 | 100900,1.0 1012 | 101000,1.0 1013 | 101100,1.0 1014 | 101200,1.0 1015 | 101300,1.0 1016 | 101400,1.0 1017 | 101500,1.0 1018 | 101600,1.0 1019 | 101700,1.0 1020 | 101800,1.0 1021 | 101900,1.0 1022 | 102000,1.0 1023 | 102100,1.0 1024 | 102200,1.0 1025 | 102300,1.0 1026 | 102400,1.0 1027 | 102500,1.0 1028 | 102600,1.0 1029 | 102700,1.0 1030 | 102800,1.0 1031 | 102900,1.0 1032 | 103000,1.0 1033 | 103100,1.0 1034 | 103200,1.0 1035 | 103300,1.0 1036 | 103400,1.0 1037 | 103500,1.0 1038 | 103600,1.0 1039 | 103700,1.0 1040 | 103800,1.0 1041 | 103900,1.0 1042 | 104000,1.0 1043 | 104100,1.0 1044 | 104200,1.0 1045 | 104300,1.0 1046 | 104400,1.0 1047 | 104500,1.0 1048 | 104600,1.0 1049 | 104700,1.0 1050 | 104800,1.0 1051 | 104900,1.0 1052 | 105000,1.0 1053 | 105100,1.0 1054 | 105200,1.0 1055 | 105300,1.0 1056 | 105400,1.0 1057 | 105500,1.0 1058 | 105600,1.0 1059 | 105700,1.0 1060 | 105800,1.0 1061 | 105900,1.0 1062 | 106000,1.0 1063 | 106100,1.0 1064 | 106200,1.0 1065 | 106300,1.0 1066 | 106400,1.0 1067 | 106500,1.0 1068 | 106600,1.0 1069 | 106700,1.0 1070 | 106800,1.0 1071 | 106900,1.0 1072 | 107000,1.0 1073 | 107100,1.0 1074 | 107200,1.0 1075 | 107300,1.0 1076 | 107400,1.0 1077 | 107500,1.0 1078 | 107600,1.0 1079 | 107700,1.0 1080 | 107800,1.0 1081 | 107900,1.0 1082 | 108000,1.0 1083 | 108100,1.0 1084 | 108200,1.0 1085 | 108300,1.0 1086 | 108400,1.0 1087 | 108500,1.0 1088 | 108600,1.0 1089 | 108700,1.0 1090 | 108800,1.0 1091 | 108900,1.0 1092 | 109000,1.0 1093 | 109100,1.0 1094 | 109200,1.0 1095 | 109300,1.0 1096 | 109400,1.0 1097 | 109500,1.0 1098 | 109600,1.0 1099 | 109700,1.0 1100 | 109800,1.0 1101 | 109900,1.0 1102 | 110000,1.0 1103 | 110100,1.0 1104 | 110200,1.0 1105 | 110300,1.0 1106 | 110400,1.0 1107 | 110500,1.0 1108 | 110600,1.0 1109 | 110700,1.0 1110 | 110800,1.0 1111 | 110900,1.0 1112 | 111000,1.0 1113 | 111100,1.0 1114 | 111200,1.0 1115 | 111300,1.0 1116 | 111400,1.0 1117 | 111500,1.0 1118 | 111600,1.0 1119 | 111700,1.0 1120 | 111800,1.0 1121 | 111900,1.0 1122 | 112000,1.0 1123 | 112100,1.0 1124 | 112200,1.0 1125 | 112300,1.0 1126 | 112400,1.0 1127 | 112500,1.0 1128 | 112600,1.0 1129 | 112700,1.0 1130 | 112800,1.0 1131 | 112900,1.0 1132 | 113000,1.0 1133 | 113100,1.0 1134 | 113200,1.0 1135 | 113300,1.0 1136 | 113400,1.0 1137 | 113500,1.0 1138 | 113600,1.0 1139 | 113700,1.0 1140 | 113800,1.0 1141 | 113900,1.0 1142 | 114000,1.0 1143 | 114100,1.0 1144 | 114200,1.0 1145 | 114300,1.0 1146 | 114400,1.0 1147 | 114500,1.0 1148 | 114600,1.0 1149 | 114700,1.0 1150 | 114800,1.0 1151 | 114900,1.0 1152 | 115000,1.0 1153 | 115100,1.0 1154 | 115200,1.0 1155 | 115300,1.0 1156 | 115400,1.0 1157 | 115500,1.0 1158 | 115600,1.0 1159 | 115700,1.0 1160 | 115800,1.0 1161 | 115900,1.0 1162 | 116000,1.0 1163 | 116100,1.0 1164 | 116200,1.0 1165 | 116300,1.0 1166 | 116400,1.0 1167 | 116500,1.0 1168 | 116600,1.0 1169 | 116700,1.0 1170 | 116800,1.0 1171 | 116900,1.0 1172 | 117000,1.0 1173 | 117100,1.0 1174 | 117200,1.0 1175 | 117300,1.0 1176 | 117400,1.0 1177 | 117500,1.0 1178 | 117600,1.0 1179 | 117700,1.0 1180 | 117800,1.0 1181 | 117900,1.0 1182 | 118000,1.0 1183 | 118100,1.0 1184 | 118200,1.0 1185 | 118300,1.0 1186 | 118400,1.0 1187 | 118500,1.0 1188 | 118600,1.0 1189 | 118700,1.0 1190 | 118800,1.0 1191 | 118900,1.0 1192 | 119000,1.0 1193 | 119100,1.0 1194 | 119200,1.0 1195 | 119300,1.0 1196 | 119400,1.0 1197 | 119500,1.0 1198 | 119600,1.0 1199 | 119700,1.0 1200 | 119800,1.0 1201 | 119900,1.0 1202 | 120000,1.0 1203 | 120100,1.0 1204 | 120200,1.0 1205 | 120300,1.0 1206 | 120400,1.0 1207 | 120500,1.0 1208 | 120600,1.0 1209 | 120700,1.0 1210 | 120800,1.0 1211 | 120900,1.0 1212 | 121000,1.0 1213 | 121100,1.0 1214 | 121200,1.0 1215 | 121300,1.0 1216 | 121400,1.0 1217 | 121500,1.0 1218 | 121600,1.0 1219 | 121700,1.0 1220 | 121800,1.0 1221 | 121900,1.0 1222 | 122000,1.0 1223 | 122100,1.0 1224 | 122200,1.0 1225 | 122300,1.0 1226 | 122400,1.0 1227 | 122500,1.0 1228 | 122600,1.0 1229 | 122700,1.0 1230 | 122800,1.0 1231 | 122900,1.0 1232 | 123000,1.0 1233 | 123100,1.0 1234 | 123200,1.0 1235 | 123300,1.0 1236 | 123400,1.0 1237 | 123500,1.0 1238 | 123600,1.0 1239 | 123700,1.0 1240 | 123800,1.0 1241 | 123900,1.0 1242 | 124000,1.0 1243 | 124100,1.0 1244 | 124200,1.0 1245 | 124300,1.0 1246 | 124400,1.0 1247 | 124500,1.0 1248 | 124600,1.0 1249 | 124700,1.0 1250 | 124800,1.0 1251 | 124900,1.0 1252 | 125000,1.0 1253 | 125100,1.0 1254 | 125200,1.0 1255 | 125300,1.0 1256 | 125400,1.0 1257 | 125500,1.0 1258 | 125600,1.0 1259 | 125700,1.0 1260 | 125800,1.0 1261 | 125900,1.0 1262 | 126000,1.0 1263 | 126100,1.0 1264 | 126200,1.0 1265 | 126300,1.0 1266 | 126400,1.0 1267 | 126500,1.0 1268 | 126600,1.0 1269 | 126700,1.0 1270 | 126800,1.0 1271 | 126900,1.0 1272 | 127000,1.0 1273 | 127100,1.0 1274 | 127200,1.0 1275 | 127300,1.0 1276 | 127400,1.0 1277 | 127500,1.0 1278 | 127600,1.0 1279 | 127700,1.0 1280 | 127800,1.0 1281 | 127900,1.0 1282 | 128000,1.0 1283 | 128100,1.0 1284 | 128200,1.0 1285 | 128300,1.0 1286 | 128400,1.0 1287 | 128500,1.0 1288 | 128600,1.0 1289 | 128700,1.0 1290 | 128800,1.0 1291 | 128900,1.0 1292 | 129000,1.0 1293 | 129100,1.0 1294 | 129200,1.0 1295 | 129300,1.0 1296 | 129400,1.0 1297 | 129500,1.0 1298 | 129600,1.0 1299 | 129700,1.0 1300 | 129800,1.0 1301 | 129900,1.0 1302 | 130000,1.0 1303 | 130100,1.0 1304 | 130200,1.0 1305 | 130300,1.0 1306 | 130400,1.0 1307 | 130500,1.0 1308 | 130600,1.0 1309 | 130700,1.0 1310 | 130800,1.0 1311 | 130900,1.0 1312 | 131000,1.0 1313 | 131100,1.0 1314 | 131200,1.0 1315 | 131300,1.0 1316 | 131400,1.0 1317 | 131500,1.0 1318 | 131600,1.0 1319 | 131700,1.0 1320 | 131800,1.0 1321 | 131900,1.0 1322 | 132000,1.0 1323 | 132100,1.0 1324 | 132200,1.0 1325 | 132300,1.0 1326 | 132400,1.0 1327 | 132500,1.0 1328 | 132600,1.0 1329 | 132700,1.0 1330 | 132800,1.0 1331 | 132900,1.0 1332 | 133000,1.0 1333 | 133100,1.0 1334 | 133200,1.0 1335 | 133300,1.0 1336 | 133400,1.0 1337 | 133500,1.0 1338 | 133600,1.0 1339 | 133700,1.0 1340 | 133800,1.0 1341 | 133900,1.0 1342 | 134000,1.0 1343 | 134100,1.0 1344 | 134200,1.0 1345 | 134300,1.0 1346 | 134400,1.0 1347 | 134500,1.0 1348 | 134600,1.0 1349 | 134700,1.0 1350 | 134800,1.0 1351 | 134900,1.0 1352 | 135000,1.0 1353 | 135100,1.0 1354 | 135200,1.0 1355 | 135300,1.0 1356 | 135400,1.0 1357 | 135500,1.0 1358 | 135600,1.0 1359 | 135700,1.0 1360 | 135800,1.0 1361 | 135900,1.0 1362 | 136000,1.0 1363 | 136100,1.0 1364 | 136200,1.0 1365 | 136300,1.0 1366 | 136400,1.0 1367 | 136500,1.0 1368 | 136600,1.0 1369 | 136700,1.0 1370 | 136800,1.0 1371 | 136900,1.0 1372 | 137000,1.0 1373 | 137100,1.0 1374 | 137200,1.0 1375 | 137300,1.0 1376 | 137400,1.0 1377 | 137500,1.0 1378 | 137600,1.0 1379 | 137700,1.0 1380 | 137800,1.0 1381 | 137900,1.0 1382 | 138000,1.0 1383 | 138100,1.0 1384 | 138200,1.0 1385 | 138300,1.0 1386 | 138400,1.0 1387 | 138500,1.0 1388 | 138600,1.0 1389 | 138700,1.0 1390 | 138800,1.0 1391 | 138900,1.0 1392 | 139000,1.0 1393 | 139100,1.0 1394 | 139200,1.0 1395 | 139300,1.0 1396 | 139400,1.0 1397 | 139500,1.0 1398 | 139600,1.0 1399 | 139700,1.0 1400 | 139800,1.0 1401 | 139900,1.0 1402 | 140000,1.0 1403 | 140100,1.0 1404 | 140200,1.0 1405 | 140300,1.0 1406 | 140400,1.0 1407 | 140500,1.0 1408 | 140600,1.0 1409 | 140700,1.0 1410 | 140800,1.0 1411 | 140900,1.0 1412 | 141000,1.0 1413 | 141100,1.0 1414 | 141200,1.0 1415 | 141300,1.0 1416 | 141400,1.0 1417 | 141500,1.0 1418 | 141600,1.0 1419 | 141700,1.0 1420 | 141800,1.0 1421 | 141900,1.0 1422 | 142000,1.0 1423 | 142100,1.0 1424 | 142200,1.0 1425 | 142300,1.0 1426 | 142400,1.0 1427 | 142500,1.0 1428 | 142600,1.0 1429 | 142700,1.0 1430 | 142800,1.0 1431 | 142900,1.0 1432 | 143000,1.0 1433 | 143100,1.0 1434 | 143200,1.0 1435 | 143300,1.0 1436 | 143400,1.0 1437 | 143500,1.0 1438 | 143600,1.0 1439 | 143700,1.0 1440 | 143800,1.0 1441 | 143900,1.0 1442 | 144000,1.0 1443 | 144100,1.0 1444 | 144200,1.0 1445 | 144300,1.0 1446 | 144400,1.0 1447 | 144500,1.0 1448 | 144600,1.0 1449 | 144700,1.0 1450 | 144800,1.0 1451 | 144900,1.0 1452 | 145000,1.0 1453 | 145100,1.0 1454 | 145200,1.0 1455 | 145300,1.0 1456 | 145400,1.0 1457 | 145500,1.0 1458 | 145600,1.0 1459 | 145700,1.0 1460 | 145800,1.0 1461 | 145900,1.0 1462 | 146000,1.0 1463 | 146100,1.0 1464 | 146200,1.0 1465 | 146300,1.0 1466 | 146400,1.0 1467 | 146500,1.0 1468 | 146600,1.0 1469 | 146700,1.0 1470 | 146800,1.0 1471 | 146900,1.0 1472 | 147000,1.0 1473 | 147100,1.0 1474 | 147200,1.0 1475 | 147300,1.0 1476 | 147400,1.0 1477 | 147500,1.0 1478 | 147600,1.0 1479 | 147700,1.0 1480 | 147800,1.0 1481 | 147900,1.0 1482 | 148000,1.0 1483 | 148100,1.0 1484 | 148200,1.0 1485 | 148300,1.0 1486 | 148400,1.0 1487 | 148500,1.0 1488 | 148600,1.0 1489 | 148700,1.0 1490 | 148800,1.0 1491 | 148900,1.0 1492 | 149000,1.0 1493 | 149100,1.0 1494 | 149200,1.0 1495 | 149300,1.0 1496 | 149400,1.0 1497 | 149500,1.0 1498 | 149600,1.0 1499 | 149700,1.0 1500 | 149800,1.0 1501 | 149900,1.0 1502 | 150000,1.0 1503 | 150100,1.0 1504 | 150200,1.0 1505 | 150300,1.0 1506 | 150400,1.0 1507 | 150500,1.0 1508 | 150600,1.0 1509 | 150700,1.0 1510 | 150800,1.0 1511 | 150900,1.0 1512 | 151000,1.0 1513 | 151100,1.0 1514 | 151200,1.0 1515 | 151300,1.0 1516 | 151400,1.0 1517 | 151500,1.0 1518 | 151600,1.0 1519 | 151700,1.0 1520 | 151800,1.0 1521 | 151900,1.0 1522 | 152000,1.0 1523 | 152100,1.0 1524 | 152200,1.0 1525 | 152300,1.0 1526 | 152400,1.0 1527 | 152500,1.0 1528 | 152600,1.0 1529 | 152700,1.0 1530 | 152800,1.0 1531 | 152900,1.0 1532 | 153000,1.0 1533 | 153100,1.0 1534 | 153200,1.0 1535 | 153300,1.0 1536 | 153400,1.0 1537 | 153500,1.0 1538 | 153600,1.0 1539 | 153700,1.0 1540 | 153800,1.0 1541 | 153900,1.0 1542 | 154000,1.0 1543 | 154100,1.0 1544 | 154200,1.0 1545 | 154300,1.0 1546 | 154400,1.0 1547 | 154500,1.0 1548 | 154600,1.0 1549 | 154700,1.0 1550 | 154800,1.0 1551 | 154900,1.0 1552 | 155000,1.0 1553 | 155100,1.0 1554 | 155200,1.0 1555 | 155300,1.0 1556 | 155400,1.0 1557 | 155500,1.0 1558 | 155600,1.0 1559 | 155700,1.0 1560 | 155800,1.0 1561 | 155900,1.0 1562 | 156000,1.0 1563 | 156100,1.0 1564 | 156200,1.0 1565 | 156300,1.0 1566 | 156400,1.0 1567 | 156500,1.0 1568 | 156600,1.0 1569 | 156700,1.0 1570 | 156800,1.0 1571 | 156900,1.0 1572 | 157000,1.0 1573 | 157100,1.0 1574 | 157200,1.0 1575 | 157300,1.0 1576 | 157400,1.0 1577 | 157500,1.0 1578 | 157600,1.0 1579 | 157700,1.0 1580 | 157800,1.0 1581 | 157900,1.0 1582 | 158000,1.0 1583 | 158100,1.0 1584 | 158200,1.0 1585 | 158300,1.0 1586 | 158400,1.0 1587 | 158500,1.0 1588 | 158600,1.0 1589 | 158700,1.0 1590 | 158800,1.0 1591 | 158900,1.0 1592 | 159000,1.0 1593 | 159100,1.0 1594 | 159200,1.0 1595 | 159300,1.0 1596 | 159400,1.0 1597 | 159500,1.0 1598 | 159600,1.0 1599 | 159700,1.0 1600 | 159800,1.0 1601 | 159900,1.0 1602 | 160000,1.0 1603 | 160100,1.0 1604 | 160200,1.0 1605 | 160300,1.0 1606 | 160400,1.0 1607 | 160500,1.0 1608 | 160600,1.0 1609 | 160700,1.0 1610 | 160800,1.0 1611 | 160900,1.0 1612 | 161000,1.0 1613 | 161100,1.0 1614 | 161200,1.0 1615 | 161300,1.0 1616 | 161400,1.0 1617 | 161500,1.0 1618 | 161600,1.0 1619 | 161700,1.0 1620 | 161800,1.0 1621 | 161900,1.0 1622 | 162000,1.0 1623 | 162100,1.0 1624 | 162200,1.0 1625 | 162300,1.0 1626 | 162400,1.0 1627 | 162500,1.0 1628 | 162600,1.0 1629 | 162700,1.0 1630 | 162800,1.0 1631 | 162900,1.0 1632 | 163000,1.0 1633 | 163100,1.0 1634 | 163200,1.0 1635 | 163300,1.0 1636 | 163400,1.0 1637 | 163500,1.0 1638 | 163600,1.0 1639 | 163700,1.0 1640 | 163800,1.0 1641 | 163900,1.0 1642 | 164000,1.0 1643 | 164100,1.0 1644 | 164200,1.0 1645 | 164300,1.0 1646 | 164400,1.0 1647 | 164500,1.0 1648 | 164600,1.0 1649 | 164700,1.0 1650 | 164800,1.0 1651 | 164900,1.0 1652 | 165000,1.0 1653 | 165100,1.0 1654 | 165200,1.0 1655 | 165300,1.0 1656 | 165400,1.0 1657 | 165500,1.0 1658 | 165600,1.0 1659 | 165700,1.0 1660 | 165800,1.0 1661 | 165900,1.0 1662 | 166000,1.0 1663 | 166100,1.0 1664 | 166200,1.0 1665 | 166300,1.0 1666 | 166400,1.0 1667 | 166500,1.0 1668 | 166600,1.0 1669 | 166700,1.0 1670 | 166800,1.0 1671 | 166900,1.0 1672 | 167000,1.0 1673 | 167100,1.0 1674 | 167200,1.0 1675 | 167300,1.0 1676 | 167400,1.0 1677 | 167500,1.0 1678 | 167600,1.0 1679 | 167700,1.0 1680 | 167800,1.0 1681 | 167900,1.0 1682 | 168000,1.0 1683 | 168100,1.0 1684 | 168200,1.0 1685 | 168300,1.0 1686 | 168400,1.0 1687 | 168500,1.0 1688 | 168600,1.0 1689 | 168700,1.0 1690 | 168800,1.0 1691 | 168900,1.0 1692 | 169000,1.0 1693 | 169100,1.0 1694 | 169200,1.0 1695 | 169300,1.0 1696 | 169400,1.0 1697 | 169500,1.0 1698 | 169600,1.0 1699 | 169700,1.0 1700 | 169800,1.0 1701 | 169900,1.0 1702 | 170000,1.0 1703 | 170100,1.0 1704 | 170200,1.0 1705 | 170300,1.0 1706 | 170400,1.0 1707 | 170500,1.0 1708 | 170600,1.0 1709 | 170700,1.0 1710 | 170800,1.0 1711 | 170900,1.0 1712 | 171000,1.0 1713 | 171100,1.0 1714 | 171200,1.0 1715 | 171300,1.0 1716 | 171400,1.0 1717 | 171500,1.0 1718 | 171600,1.0 1719 | 171700,1.0 1720 | 171800,1.0 1721 | 171900,1.0 1722 | 172000,1.0 1723 | 172100,1.0 1724 | 172200,1.0 1725 | 172300,1.0 1726 | 172400,1.0 1727 | 172500,1.0 1728 | 172600,1.0 1729 | 172700,1.0 1730 | 172800,1.0 1731 | 172900,1.0 1732 | 173000,1.0 1733 | 173100,1.0 1734 | 173200,1.0 1735 | 173300,1.0 1736 | 173400,1.0 1737 | 173500,1.0 1738 | 173600,1.0 1739 | 173700,1.0 1740 | 173800,1.0 1741 | 173900,1.0 1742 | 174000,1.0 1743 | 174100,1.0 1744 | 174200,1.0 1745 | 174300,1.0 1746 | 174400,1.0 1747 | 174500,1.0 1748 | 174600,1.0 1749 | 174700,1.0 1750 | 174800,1.0 1751 | 174900,1.0 1752 | 175000,1.0 1753 | 175100,1.0 1754 | 175200,1.0 1755 | 175300,1.0 1756 | 175400,1.0 1757 | 175500,1.0 1758 | 175600,1.0 1759 | 175700,1.0 1760 | 175800,1.0 1761 | 175900,1.0 1762 | 176000,1.0 1763 | 176100,1.0 1764 | 176200,1.0 1765 | 176300,1.0 1766 | 176400,1.0 1767 | 176500,1.0 1768 | 176600,1.0 1769 | 176700,1.0 1770 | 176800,1.0 1771 | 176900,1.0 1772 | 177000,1.0 1773 | 177100,1.0 1774 | 177200,1.0 1775 | 177300,1.0 1776 | 177400,1.0 1777 | 177500,1.0 1778 | 177600,1.0 1779 | 177700,1.0 1780 | 177800,1.0 1781 | 177900,1.0 1782 | 178000,1.0 1783 | 178100,1.0 1784 | 178200,1.0 1785 | 178300,1.0 1786 | 178400,1.0 1787 | 178500,1.0 1788 | 178600,1.0 1789 | 178700,1.0 1790 | 178800,1.0 1791 | 178900,1.0 1792 | 179000,1.0 1793 | 179100,1.0 1794 | 179200,1.0 1795 | 179300,1.0 1796 | 179400,1.0 1797 | 179500,1.0 1798 | 179600,1.0 1799 | 179700,1.0 1800 | 179800,1.0 1801 | 179900,1.0 1802 | 180000,1.0 1803 | 180100,1.0 1804 | 180200,1.0 1805 | 180300,1.0 1806 | 180400,1.0 1807 | 180500,1.0 1808 | 180600,1.0 1809 | 180700,1.0 1810 | 180800,1.0 1811 | 180900,1.0 1812 | 181000,1.0 1813 | 181100,1.0 1814 | 181200,1.0 1815 | 181300,1.0 1816 | 181400,1.0 1817 | 181500,1.0 1818 | 181600,1.0 1819 | 181700,1.0 1820 | 181800,1.0 1821 | 181900,1.0 1822 | 182000,1.0 1823 | 182100,1.0 1824 | 182200,1.0 1825 | 182300,1.0 1826 | 182400,1.0 1827 | 182500,1.0 1828 | 182600,1.0 1829 | 182700,1.0 1830 | 182800,1.0 1831 | 182900,1.0 1832 | 183000,1.0 1833 | 183100,1.0 1834 | 183200,1.0 1835 | 183300,1.0 1836 | 183400,1.0 1837 | 183500,1.0 1838 | 183600,1.0 1839 | 183700,1.0 1840 | 183800,1.0 1841 | 183900,1.0 1842 | 184000,1.0 1843 | 184100,1.0 1844 | 184200,1.0 1845 | 184300,1.0 1846 | 184400,1.0 1847 | 184500,1.0 1848 | 184600,1.0 1849 | 184700,1.0 1850 | 184800,1.0 1851 | 184900,1.0 1852 | 185000,1.0 1853 | 185100,1.0 1854 | 185200,1.0 1855 | 185300,1.0 1856 | 185400,1.0 1857 | 185500,1.0 1858 | 185600,1.0 1859 | 185700,1.0 1860 | 185800,1.0 1861 | 185900,1.0 1862 | 186000,1.0 1863 | 186100,1.0 1864 | 186200,1.0 1865 | 186300,1.0 1866 | 186400,1.0 1867 | 186500,1.0 1868 | 186600,1.0 1869 | 186700,1.0 1870 | 186800,1.0 1871 | 186900,1.0 1872 | 187000,1.0 1873 | 187100,1.0 1874 | 187200,1.0 1875 | 187300,1.0 1876 | 187400,1.0 1877 | 187500,1.0 1878 | 187600,1.0 1879 | 187700,1.0 1880 | 187800,1.0 1881 | 187900,1.0 1882 | 188000,1.0 1883 | 188100,1.0 1884 | 188200,1.0 1885 | 188300,1.0 1886 | -------------------------------------------------------------------------------- /results/mnist/4_iter_val_acc.csv: -------------------------------------------------------------------------------- 1 | step,val_acc 2 | 0,0.0941506410256 3 | 500,0.989383012821 4 | 1000,0.993189102564 5 | 1500,0.992387820513 6 | 2000,0.994591346154 7 | 2500,0.994791666667 8 | 3000,0.994991987179 9 | 3500,0.994991987179 10 | 4000,0.995392628205 11 | 4500,0.995392628205 12 | 5000,0.995592948718 13 | 5500,0.995592948718 14 | 6000,0.994991987179 15 | 6500,0.994791666667 16 | 7000,0.993790064103 17 | 7500,0.99358974359 18 | 8000,0.994591346154 19 | 8500,0.994791666667 20 | 9000,0.995392628205 21 | 9500,0.995392628205 22 | 10000,0.995392628205 23 | 10500,0.993790064103 24 | 11000,0.995392628205 25 | 11500,0.995392628205 26 | 12000,0.995592948718 27 | 12500,0.995592948718 28 | 13000,0.994991987179 29 | 13500,0.995592948718 30 | 14000,0.995192307692 31 | 14500,0.995192307692 32 | 15000,0.995592948718 33 | 15500,0.994991987179 34 | 16000,0.995592948718 35 | 16500,0.994791666667 36 | 17000,0.995793269231 37 | 17500,0.995392628205 38 | 18000,0.994591346154 39 | 18500,0.994791666667 40 | 19000,0.994791666667 41 | 19500,0.995392628205 42 | 20000,0.995392628205 43 | 20500,0.995592948718 44 | 21000,0.994991987179 45 | 21500,0.994991987179 46 | 22000,0.995793269231 47 | 22500,0.994991987179 48 | 23000,0.994791666667 49 | 23500,0.994591346154 50 | 24000,0.995392628205 51 | 24500,0.995592948718 52 | 25000,0.995793269231 53 | 25500,0.995592948718 54 | 26000,0.995793269231 55 | 26500,0.995592948718 56 | 27000,0.995592948718 57 | 27500,0.995993589744 58 | 28000,0.995993589744 59 | 28500,0.995392628205 60 | 29000,0.995192307692 61 | 29500,0.995392628205 62 | 30000,0.995592948718 63 | 30500,0.995392628205 64 | 31000,0.994791666667 65 | 31500,0.995192307692 66 | 32000,0.995592948718 67 | 32500,0.995793269231 68 | 33000,0.995392628205 69 | 33500,0.995592948718 70 | 34000,0.995392628205 71 | 34500,0.995392628205 72 | 35000,0.994991987179 73 | 35500,0.994791666667 74 | 36000,0.995793269231 75 | 36500,0.995192307692 76 | 37000,0.995192307692 77 | 37500,0.995592948718 78 | 38000,0.995793269231 79 | 38500,0.995793269231 80 | 39000,0.995993589744 81 | 39500,0.995993589744 82 | 40000,0.995793269231 83 | 40500,0.995392628205 84 | 41000,0.994791666667 85 | 41500,0.995192307692 86 | 42000,0.994991987179 87 | 42500,0.995592948718 88 | 43000,0.994991987179 89 | 43500,0.995392628205 90 | 44000,0.994991987179 91 | 44500,0.995392628205 92 | 45000,0.995592948718 93 | 45500,0.995392628205 94 | 46000,0.994591346154 95 | 46500,0.995392628205 96 | 47000,0.994591346154 97 | 47500,0.995392628205 98 | 48000,0.995192307692 99 | 48500,0.995592948718 100 | 49000,0.995192307692 101 | 49500,0.995392628205 102 | 50000,0.995793269231 103 | 50500,0.995192307692 104 | 51000,0.995192307692 105 | 51500,0.995392628205 106 | 52000,0.995993589744 107 | 52500,0.995993589744 108 | 53000,0.995192307692 109 | 53500,0.994791666667 110 | 54000,0.995793269231 111 | 54500,0.995592948718 112 | 55000,0.994791666667 113 | 55500,0.995392628205 114 | 56000,0.995192307692 115 | 56500,0.995993589744 116 | 57000,0.995392628205 117 | 57500,0.995793269231 118 | 58000,0.995592948718 119 | 58500,0.994991987179 120 | 59000,0.994991987179 121 | 59500,0.995392628205 122 | 60000,0.995192307692 123 | 60500,0.994791666667 124 | 61000,0.995192307692 125 | 61500,0.995192307692 126 | 62000,0.995192307692 127 | 62500,0.995192307692 128 | 63000,0.994991987179 129 | 63500,0.994791666667 130 | 64000,0.995793269231 131 | 64500,0.995793269231 132 | 65000,0.995392628205 133 | 65500,0.995592948718 134 | 66000,0.995192307692 135 | 66500,0.995392628205 136 | 67000,0.995392628205 137 | 67500,0.995592948718 138 | 68000,0.995793269231 139 | 68500,0.995793269231 140 | 69000,0.995592948718 141 | 69500,0.995592948718 142 | 70000,0.995993589744 143 | 70500,0.995392628205 144 | 71000,0.995592948718 145 | 71500,0.995592948718 146 | 72000,0.995592948718 147 | 72500,0.995192307692 148 | 73000,0.994991987179 149 | 73500,0.995192307692 150 | 74000,0.995392628205 151 | 74500,0.995592948718 152 | 75000,0.995392628205 153 | 75500,0.995192307692 154 | 76000,0.995392628205 155 | 76500,0.995392628205 156 | 77000,0.995793269231 157 | 77500,0.995993589744 158 | 78000,0.995392628205 159 | 78500,0.995793269231 160 | 79000,0.995592948718 161 | 79500,0.995592948718 162 | 80000,0.995592948718 163 | 80500,0.995192307692 164 | 81000,0.995592948718 165 | 81500,0.995592948718 166 | 82000,0.995192307692 167 | 82500,0.994991987179 168 | 83000,0.995592948718 169 | 83500,0.995192307692 170 | 84000,0.994791666667 171 | 84500,0.995793269231 172 | 85000,0.995192307692 173 | 85500,0.995192307692 174 | 86000,0.995192307692 175 | 86500,0.995392628205 176 | 87000,0.995592948718 177 | 87500,0.995392628205 178 | 88000,0.995392628205 179 | 88500,0.995592948718 180 | 89000,0.995192307692 181 | 89500,0.995392628205 182 | 90000,0.995392628205 183 | 90500,0.994991987179 184 | 91000,0.995192307692 185 | 91500,0.995192307692 186 | 92000,0.995392628205 187 | 92500,0.995392628205 188 | 93000,0.995392628205 189 | 93500,0.995793269231 190 | 94000,0.994591346154 191 | 94500,0.995592948718 192 | 95000,0.994991987179 193 | 95500,0.994591346154 194 | 96000,0.995192307692 195 | 96500,0.994991987179 196 | 97000,0.994791666667 197 | 97500,0.994791666667 198 | 98000,0.994991987179 199 | 98500,0.995392628205 200 | 99000,0.995592948718 201 | 99500,0.994991987179 202 | 100000,0.995793269231 203 | 100500,0.995592948718 204 | 101000,0.995392628205 205 | 101500,0.995192307692 206 | 102000,0.995192307692 207 | 102500,0.995392628205 208 | 103000,0.995592948718 209 | 103500,0.995592948718 210 | 104000,0.994591346154 211 | 104500,0.995192307692 212 | 105000,0.995192307692 213 | 105500,0.994190705128 214 | 106000,0.995192307692 215 | 106500,0.994991987179 216 | 107000,0.995392628205 217 | 107500,0.995592948718 218 | 108000,0.995592948718 219 | 108500,0.995192307692 220 | 109000,0.994791666667 221 | 109500,0.995192307692 222 | 110000,0.995192307692 223 | 110500,0.995192307692 224 | 111000,0.994591346154 225 | 111500,0.994791666667 226 | 112000,0.995392628205 227 | 112500,0.994991987179 228 | 113000,0.995192307692 229 | 113500,0.994991987179 230 | 114000,0.995392628205 231 | 114500,0.994591346154 232 | 115000,0.994991987179 233 | 115500,0.994591346154 234 | 116000,0.995392628205 235 | 116500,0.995192307692 236 | 117000,0.995392628205 237 | 117500,0.994991987179 238 | 118000,0.995192307692 239 | 118500,0.994991987179 240 | 119000,0.995392628205 241 | 119500,0.994991987179 242 | 120000,0.995192307692 243 | 120500,0.995392628205 244 | 121000,0.994991987179 245 | 121500,0.995392628205 246 | 122000,0.995392628205 247 | 122500,0.995392628205 248 | 123000,0.995592948718 249 | 123500,0.995392628205 250 | 124000,0.995592948718 251 | 124500,0.994991987179 252 | 125000,0.994791666667 253 | 125500,0.995592948718 254 | 126000,0.995192307692 255 | 126500,0.995392628205 256 | 127000,0.994991987179 257 | 127500,0.994791666667 258 | 128000,0.995192307692 259 | 128500,0.995392628205 260 | 129000,0.994791666667 261 | 129500,0.995192307692 262 | 130000,0.994991987179 263 | 130500,0.994791666667 264 | 131000,0.995192307692 265 | 131500,0.994991987179 266 | 132000,0.995192307692 267 | 132500,0.994991987179 268 | 133000,0.994991987179 269 | 133500,0.995192307692 270 | 134000,0.994991987179 271 | 134500,0.995192307692 272 | 135000,0.995392628205 273 | 135500,0.995392628205 274 | 136000,0.994991987179 275 | 136500,0.995392628205 276 | 137000,0.995192307692 277 | 137500,0.994791666667 278 | 138000,0.995392628205 279 | 138500,0.995192307692 280 | 139000,0.995192307692 281 | 139500,0.995592948718 282 | 140000,0.995392628205 283 | 140500,0.995192307692 284 | 141000,0.995192307692 285 | 141500,0.995592948718 286 | 142000,0.995793269231 287 | 142500,0.995592948718 288 | 143000,0.994991987179 289 | 143500,0.995592948718 290 | 144000,0.994991987179 291 | 144500,0.995392628205 292 | 145000,0.995592948718 293 | 145500,0.995392628205 294 | 146000,0.995192307692 295 | 146500,0.995592948718 296 | 147000,0.995592948718 297 | 147500,0.995392628205 298 | 148000,0.995793269231 299 | 148500,0.995392628205 300 | 149000,0.995793269231 301 | 149500,0.995592948718 302 | 150000,0.995392628205 303 | 150500,0.995392628205 304 | 151000,0.995392628205 305 | 151500,0.995192307692 306 | 152000,0.995192307692 307 | 152500,0.995592948718 308 | 153000,0.995392628205 309 | 153500,0.995592948718 310 | 154000,0.995392628205 311 | 154500,0.995392628205 312 | 155000,0.994591346154 313 | 155500,0.995392628205 314 | 156000,0.994991987179 315 | 156500,0.995192307692 316 | 157000,0.995392628205 317 | 157500,0.995592948718 318 | 158000,0.994991987179 319 | 158500,0.994991987179 320 | 159000,0.994791666667 321 | 159500,0.994991987179 322 | 160000,0.994991987179 323 | 160500,0.995192307692 324 | 161000,0.995192307692 325 | 161500,0.994991987179 326 | 162000,0.994791666667 327 | 162500,0.994991987179 328 | 163000,0.994791666667 329 | 163500,0.994591346154 330 | 164000,0.994591346154 331 | 164500,0.995392628205 332 | 165000,0.994991987179 333 | 165500,0.994991987179 334 | 166000,0.994791666667 335 | 166500,0.995192307692 336 | 167000,0.994791666667 337 | 167500,0.994791666667 338 | 168000,0.994791666667 339 | 168500,0.994791666667 340 | 169000,0.995192307692 341 | 169500,0.994991987179 342 | 170000,0.994591346154 343 | 170500,0.994991987179 344 | 171000,0.995592948718 345 | 171500,0.994791666667 346 | 172000,0.994791666667 347 | 172500,0.994391025641 348 | 173000,0.994391025641 349 | 173500,0.993990384615 350 | 174000,0.993790064103 351 | 174500,0.994791666667 352 | 175000,0.994591346154 353 | 175500,0.994391025641 354 | 176000,0.994791666667 355 | 176500,0.994791666667 356 | 177000,0.994190705128 357 | 177500,0.994190705128 358 | 178000,0.994791666667 359 | 178500,0.994591346154 360 | 179000,0.994791666667 361 | 179500,0.993990384615 362 | 180000,0.994591346154 363 | 180500,0.994791666667 364 | 181000,0.994591346154 365 | 181500,0.99358974359 366 | 182000,0.994391025641 367 | 182500,0.994591346154 368 | 183000,0.994791666667 369 | 183500,0.994591346154 370 | 184000,0.994391025641 371 | 184500,0.995392628205 372 | 185000,0.994791666667 373 | 185500,0.994591346154 374 | 186000,0.994591346154 375 | 186500,0.994391025641 376 | 187000,0.994391025641 377 | 187500,0.993389423077 378 | 188000,0.994190705128 379 | -------------------------------------------------------------------------------- /results/reconstruction_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/naturomics/CapsNet-Tensorflow/5b464caab361ec402c1b17acb9bc2680e5fbb7de/results/reconstruction_loss.png -------------------------------------------------------------------------------- /results/routing_trials.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/naturomics/CapsNet-Tensorflow/5b464caab361ec402c1b17acb9bc2680e5fbb7de/results/routing_trials.png -------------------------------------------------------------------------------- /results/total_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/naturomics/CapsNet-Tensorflow/5b464caab361ec402c1b17acb9bc2680e5fbb7de/results/total_loss.png -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import scipy 3 | import numpy as np 4 | import tensorflow as tf 5 | 6 | 7 | def load_mnist(batch_size, is_training=True): 8 | path = os.path.join('data', 'mnist') 9 | if is_training: 10 | fd = open(os.path.join(path, 'train-images-idx3-ubyte')) 11 | loaded = np.fromfile(file=fd, dtype=np.uint8) 12 | trainX = loaded[16:].reshape((60000, 28, 28, 1)).astype(np.float32) 13 | 14 | fd = open(os.path.join(path, 'train-labels-idx1-ubyte')) 15 | loaded = np.fromfile(file=fd, dtype=np.uint8) 16 | trainY = loaded[8:].reshape((60000)).astype(np.int32) 17 | 18 | trX = trainX[:55000] / 255. 19 | trY = trainY[:55000] 20 | 21 | valX = trainX[55000:, ] / 255. 22 | valY = trainY[55000:] 23 | 24 | num_tr_batch = 55000 // batch_size 25 | num_val_batch = 5000 // batch_size 26 | 27 | return trX, trY, num_tr_batch, valX, valY, num_val_batch 28 | else: 29 | fd = open(os.path.join(path, 't10k-images-idx3-ubyte')) 30 | loaded = np.fromfile(file=fd, dtype=np.uint8) 31 | teX = loaded[16:].reshape((10000, 28, 28, 1)).astype(np.float) 32 | 33 | fd = open(os.path.join(path, 't10k-labels-idx1-ubyte')) 34 | loaded = np.fromfile(file=fd, dtype=np.uint8) 35 | teY = loaded[8:].reshape((10000)).astype(np.int32) 36 | 37 | num_te_batch = 10000 // batch_size 38 | return teX / 255., teY, num_te_batch 39 | 40 | 41 | def load_fashion_mnist(batch_size, is_training=True): 42 | path = os.path.join('data', 'fashion-mnist') 43 | if is_training: 44 | fd = open(os.path.join(path, 'train-images-idx3-ubyte')) 45 | loaded = np.fromfile(file=fd, dtype=np.uint8) 46 | trainX = loaded[16:].reshape((60000, 28, 28, 1)).astype(np.float32) 47 | 48 | fd = open(os.path.join(path, 'train-labels-idx1-ubyte')) 49 | loaded = np.fromfile(file=fd, dtype=np.uint8) 50 | trainY = loaded[8:].reshape((60000)).astype(np.int32) 51 | 52 | trX = trainX[:55000] / 255. 53 | trY = trainY[:55000] 54 | 55 | valX = trainX[55000:, ] / 255. 56 | valY = trainY[55000:] 57 | 58 | num_tr_batch = 55000 // batch_size 59 | num_val_batch = 5000 // batch_size 60 | 61 | return trX, trY, num_tr_batch, valX, valY, num_val_batch 62 | else: 63 | fd = open(os.path.join(path, 't10k-images-idx3-ubyte')) 64 | loaded = np.fromfile(file=fd, dtype=np.uint8) 65 | teX = loaded[16:].reshape((10000, 28, 28, 1)).astype(np.float) 66 | 67 | fd = open(os.path.join(path, 't10k-labels-idx1-ubyte')) 68 | loaded = np.fromfile(file=fd, dtype=np.uint8) 69 | teY = loaded[8:].reshape((10000)).astype(np.int32) 70 | 71 | num_te_batch = 10000 // batch_size 72 | return teX / 255., teY, num_te_batch 73 | 74 | 75 | def load_data(dataset, batch_size, is_training=True, one_hot=False): 76 | if dataset == 'mnist': 77 | return load_mnist(batch_size, is_training) 78 | elif dataset == 'fashion-mnist': 79 | return load_fashion_mnist(batch_size, is_training) 80 | else: 81 | raise Exception('Invalid dataset, please check the name of dataset:', dataset) 82 | 83 | 84 | def get_batch_data(dataset, batch_size, num_threads): 85 | if dataset == 'mnist': 86 | trX, trY, num_tr_batch, valX, valY, num_val_batch = load_mnist(batch_size, is_training=True) 87 | elif dataset == 'fashion-mnist': 88 | trX, trY, num_tr_batch, valX, valY, num_val_batch = load_fashion_mnist(batch_size, is_training=True) 89 | data_queues = tf.train.slice_input_producer([trX, trY]) 90 | X, Y = tf.train.shuffle_batch(data_queues, num_threads=num_threads, 91 | batch_size=batch_size, 92 | capacity=batch_size * 64, 93 | min_after_dequeue=batch_size * 32, 94 | allow_smaller_final_batch=False) 95 | 96 | return(X, Y) 97 | 98 | 99 | def save_images(imgs, size, path): 100 | ''' 101 | Args: 102 | imgs: [batch_size, image_height, image_width] 103 | size: a list with tow int elements, [image_height, image_width] 104 | path: the path to save images 105 | ''' 106 | imgs = (imgs + 1.) / 2 # inverse_transform 107 | return(scipy.misc.imsave(path, mergeImgs(imgs, size))) 108 | 109 | 110 | def mergeImgs(images, size): 111 | h, w = images.shape[1], images.shape[2] 112 | imgs = np.zeros((h * size[0], w * size[1], 3)) 113 | for idx, image in enumerate(images): 114 | i = idx % size[1] 115 | j = idx // size[1] 116 | imgs[j * h:j * h + h, i * w:i * w + w, :] = image 117 | 118 | return imgs 119 | 120 | 121 | # For version compatibility 122 | def reduce_sum(input_tensor, axis=None, keepdims=False): 123 | try: 124 | return tf.reduce_sum(input_tensor, axis=axis, keepdims=keepdims) 125 | except: 126 | return tf.reduce_sum(input_tensor, axis=axis, keep_dims=keepdims) 127 | 128 | 129 | # For version compatibility 130 | def softmax(logits, axis=None): 131 | try: 132 | return tf.nn.softmax(logits, axis=axis) 133 | except: 134 | return tf.nn.softmax(logits, dim=axis) 135 | 136 | 137 | def get_shape(inputs, name=None): 138 | name = "shape" if name is None else name 139 | with tf.name_scope(name): 140 | static_shape = inputs.get_shape().as_list() 141 | dynamic_shape = tf.shape(inputs) 142 | shape = [] 143 | for i, dim in enumerate(static_shape): 144 | dim = dim if dim is not None else dynamic_shape[i] 145 | shape.append(dim) 146 | return(shape) 147 | --------------------------------------------------------------------------------