├── image └── pipeline.png ├── results.txt ├── README.md ├── ConfusionMatrix.py └── Train_validation_test_release.ipynb /image/pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icemansina/IJCAI2016/HEAD/image/pipeline.png -------------------------------------------------------------------------------- /results.txt: -------------------------------------------------------------------------------- 1 | Our Results on CB513 (Q8 accuracy is 69.7%, Q3 accuracy is 84% for single model): 2 | 3 | Confusion matrix: 4 | 5 | [[11679 9 2888 216 0 974 987 1167] 6 | [ 588 31 294 12 0 98 66 92] 7 | [ 2107 8 14919 41 0 410 201 330] 8 | [ 635 0 172 790 0 904 80 551] 9 | [ 4 0 2 3 0 17 0 4] 10 | [ 678 2 216 250 0 24394 76 541] 11 | [ 3077 5 888 146 0 688 2072 1440] 12 | [ 1616 1 447 385 0 1822 520 5222]] 13 | 14 | Our Results on CASP10 (Q8 accuracy is 76.9%, Q3 accuracy is 87.8% for single model): 15 | 16 | casp10 confusion matrix 17 | 18 | [[3894 4 402 42 0 113 182 163] 19 | [ 69 3 23 3 0 10 5 3] 20 | [ 316 2 2855 21 0 59 64 35] 21 | [ 118 0 12 125 0 81 29 51] 22 | [ 0 0 0 0 0 0 0 0] 23 | [ 102 0 46 48 0 3535 9 86] 24 | [ 392 1 98 22 0 58 314 140] 25 | [ 239 1 59 70 0 212 95 918]] 26 | 27 | Our Results on CASP11 (Q8 accuracy is 73.1%, Q3 accuracy is 85.3% for single model): 28 | 29 | casp11 confusion matrix 30 | 31 | [[1921 2 359 24 0 93 134 97] 32 | [ 50 5 30 4 0 6 8 5] 33 | [ 288 1 2368 16 0 35 55 35] 34 | [ 79 0 35 89 0 60 24 41] 35 | [ 0 0 0 0 0 0 0 0] 36 | [ 115 0 68 43 0 2851 17 57] 37 | [ 314 0 102 23 0 73 236 131] 38 | [ 176 0 45 49 0 190 90 581]] -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # IJCAI2016 2 | 3 | This repository is for the replication of our published paper [Protein Secondary Structure Prediction Using Cascaded Convolutional and Recurrent Neural Networks](https://www.ijcai.org/Proceedings/16/Papers/364.pdf) on IJCAI2016. Our whole pipeline is listed as follows. 4 |

5 | 6 | # Download data 7 | 8 | For cb513+profile_split1.npy.gz, cullpdb+profile_6133_filtered.npy.gz, please download from this [website](http://www.princeton.edu/~jzthree/datasets/ICML2014/). 9 | For CASP10 and CASP11, please download from this [website](https://goo.gl/tjJttR). 10 | Download data and put them in ./data folder. 11 | 12 | # Project Settings 13 | 14 | 1. Install the requirements (you can use pip or [Anaconda](https://www.continuum.io/downloads)): 15 | 16 | ``` 17 | conda install pip h5py cython numpy scipy 18 | conda install -c conda-forge theano 19 | conda install -c toli lasagne 20 | ``` 21 | 22 | 2. You can do training/validation/test through this IPython notebook file Train_validation_test_release.ipynb. 23 | PS: The demo code use splited cullpdb+profile_6133_filtered for training/validation and then test on CB513 and CASP dataset. You can use whole cullpdb+profile_6133_filtered for training to obtain better performace. 24 | 25 | # Progress 26 | - [x] README for training 27 | - [x] README for project settings 28 | - [x] Dynamic training codes 29 | - [x] Dynamic evaluation codes 30 | - [ ] Multi-GPU support 31 | 32 | # Acknowledgement 33 | 34 | We thank [Jian Zhou](http://www.princeton.edu/~jzthree/) and [Sheng Wang](http://ttic.uchicago.edu/~wangsheng/) for CASP dataset generation. 35 | 36 | # Reference 37 | If you find this code useful for your research, please cite 38 | 39 | ``` 40 | @inproceedings{li2016protein, 41 | title={Protein secondary structure prediction using cascaded convolutional and recurrent neural networks}, 42 | author={Li, Zhen and Yu, Yizhou}, 43 | booktitle={Proceedings of the Twenty-Fifth International Joint Conference on Artificial Intelligence}, 44 | pages={2560--2567}, 45 | year={2016}, 46 | organization={AAAI Press} 47 | } 48 | ``` -------------------------------------------------------------------------------- /ConfusionMatrix.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | class ConfusionMatrix: 3 | """ 4 | Simple confusion matrix class 5 | row is the true class, column is the predicted class 6 | """ 7 | def __init__(self, n_classes): 8 | self.n_classes = n_classes 9 | self.mat = np.zeros((n_classes,n_classes),dtype='int') 10 | 11 | def __str__(self): 12 | return np.array_str(self.mat) 13 | 14 | def batchAdd(self,y_true,y_pred): 15 | assert len(y_true) == len(y_pred) 16 | assert max(y_true) < self.n_classes 17 | assert max(y_pred) < self.n_classes 18 | for i in range(len(y_true)): 19 | self.mat[y_true[i],y_pred[i]] += 1 20 | 21 | def zero(self): 22 | self.mat.fill(0) 23 | 24 | def getErrors(self): 25 | """ 26 | Calculate differetn error types 27 | :return: vetors of true postives (tp) false negatives (fn), false positives (fp) and true negatives (tn) 28 | pos 0 is first class, pos 1 is second class etc. 29 | """ 30 | tp = np.asarray(np.diag(self.mat).flatten(),dtype='float') 31 | fn = np.asarray(np.sum(self.mat, axis=1).flatten(),dtype='float') - tp 32 | fp = np.asarray(np.sum(self.mat, axis=0).flatten(),dtype='float') - tp 33 | tn = np.asarray(np.sum(self.mat)*np.ones(self.n_classes).flatten(),dtype='float') - tp - fn - fp 34 | return tp,tn,fp,fn 35 | 36 | def accuracy(self): 37 | """ 38 | Calculates global accuracy 39 | :return: accuracyn 40 | :example: >>> conf = ConfusionMatrix(3) 41 | >>> conf.batchAdd([0,0,1],[0,0,2]) 42 | >>> print conf.accuracy() 43 | """ 44 | tp, _, _, _ = self.getErrors() 45 | n_samples = np.sum(self.mat) 46 | return np.sum(tp) / n_samples 47 | 48 | 49 | def sensitivity(self): 50 | tp, tn, fp, fn = self.getErrors() 51 | res = tp / (tp + fn) 52 | res = res[~np.isnan(res)] 53 | return res 54 | 55 | def specificity(self): 56 | tp, tn, fp, fn = self.getErrors() 57 | res = tn / (tn + fp) 58 | res = res[~np.isnan(res)] 59 | return res 60 | 61 | def positivePredictiveValue(self): 62 | tp, tn, fp, fn = self.getErrors() 63 | res = tp / (tp + fp) 64 | res = res[~np.isnan(res)] 65 | return res 66 | 67 | def negativePredictiveValue(self): 68 | tp, tn, fp, fn = self.getErrors() 69 | res = tn / (tn + fn) 70 | res = res[~np.isnan(res)] 71 | return res 72 | 73 | def falsePositiveRate(self): 74 | tp, tn, fp, fn = self.getErrors() 75 | res = fp / (fp + tn) 76 | res = res[~np.isnan(res)] 77 | return res 78 | 79 | def falseDiscoveryRate(self): 80 | tp, tn, fp, fn = self.getErrors() 81 | res = fp / (tp + fp) 82 | res = res[~np.isnan(res)] 83 | return res 84 | 85 | def F1(self): 86 | tp, tn, fp, fn = self.getErrors() 87 | res = (2*tp) / (2*tp + fp + fn) 88 | res = res[~np.isnan(res)] 89 | return res 90 | 91 | def matthewsCorrelation(self): 92 | tp, tn, fp, fn = self.getErrors() 93 | numerator = tp*tn - fp*fn 94 | denominator = np.sqrt((tp + fp)*(tp + fn)*(tn + fp)*(tn + fn)) 95 | res = numerator / denominator 96 | res = res[~np.isnan(res)] 97 | return res 98 | def getMat(self): 99 | return self.mat 100 | 101 | 102 | 103 | 104 | -------------------------------------------------------------------------------- /Train_validation_test_release.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [ 10 | { 11 | "name": "stderr", 12 | "output_type": "stream", 13 | "text": [ 14 | "WARNING (theano.sandbox.cuda): The cuda backend is deprecated and will be removed in the next release (v0.10). Please switch to the gpuarray backend. You can get more information about how to switch at this URL:\n", 15 | " https://github.com/Theano/Theano/wiki/Converting-to-the-new-gpu-back-end%28gpuarray%29\n", 16 | "\n", 17 | "WARNING:theano.sandbox.cuda:The cuda backend is deprecated and will be removed in the next release (v0.10). Please switch to the gpuarray backend. You can get more information about how to switch at this URL:\n", 18 | " https://github.com/Theano/Theano/wiki/Converting-to-the-new-gpu-back-end%28gpuarray%29\n", 19 | "\n", 20 | "Using gpu device 0: GeForce GTX TITAN X (CNMeM is disabled, cuDNN 5110)\n" 21 | ] 22 | } 23 | ], 24 | "source": [ 25 | "import numpy as np\n", 26 | "import theano\n", 27 | "import theano.tensor as T\n", 28 | "import lasagne\n", 29 | "import lasagne.layers.dnn\n", 30 | "import string\n", 31 | "import sys\n", 32 | "from datetime import datetime, timedelta\n", 33 | "import time\n", 34 | "import cPickle as pickle\n", 35 | "import gzip\n", 36 | "import h5py\n", 37 | "from ConfusionMatrix import ConfusionMatrix\n", 38 | "import matplotlib.pylab as plt\n", 39 | "%matplotlib inline\n", 40 | "\n", 41 | "np.random.seed(2345)" 42 | ] 43 | }, 44 | { 45 | "cell_type": "markdown", 46 | "metadata": {}, 47 | "source": [ 48 | "# Prepare train and test dataset\n", 49 | "\n", 50 | "Train: Cullpdb+profile_6133_filtered (0:5022, 700, 57) \n", 51 | "
\n", 52 | "Validation: Cullpdb+profile_6133_filtered (5022:5534, 700, 57)\n", 53 | "
\n", 54 | "\n", 55 | "Test: CB513 (514, 700, 57) CASP10 (123, 700, 43)" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 2, 61 | "metadata": { 62 | "collapsed": false 63 | }, 64 | "outputs": [ 65 | { 66 | "name": "stdout", 67 | "output_type": "stream", 68 | "text": [ 69 | "Loading train data (Cullpdb_filted)...\n", 70 | "Train data shape is (5120, 700, 42) , two labels shapes are (5120, 700) and (5120, 700) , mask shape is (5120, 700) \n", 71 | "\n", 72 | "Validation data shape is (512, 700, 42) , two labels shapes are (512, 700) and (512, 700) , mask shape is (512, 700) \n", 73 | "\n", 74 | "Loading Test data (CB513)...\n", 75 | "Test data shape is (514, 700, 42) , two labels shapes are (514, 700) and (514, 700) , mask shape is (514, 700) \n", 76 | "\n", 77 | "Loading Test data (CASP10)...\n", 78 | "casp10 data shape is (123, 700, 42) , labels shape is (123, 700) , mask shape is (123, 700) \n", 79 | "\n", 80 | "load all data takes time 7.835487s\n" 81 | ] 82 | } 83 | ], 84 | "source": [ 85 | "#############################################################################\n", 86 | "##### TRAIN DATA Cullpdb+profile_6133_filtered #####\n", 87 | "############################################################################\n", 88 | "time_start = time.time()\n", 89 | "print(\"Loading train data (Cullpdb_filted)...\")\n", 90 | "X_in = np.load(gzip.open('data/cullpdb+profile_6133_filtered.npy.gz', 'rb'))\n", 91 | "X = np.reshape(X_in,(5534,700,57))\n", 92 | "del X_in\n", 93 | "\n", 94 | "# for joint learning\n", 95 | "labels_1 = X[:, :, 22:30] # secondary struture label\n", 96 | "labels_2 = X[:, :, 33:35] # solvent accessibility label\n", 97 | "####\n", 98 | "mask = 1 - X[:, :, 30] \n", 99 | "feature_index = np.hstack((np.arange(0,21),np.arange(35,56))) # 42-d features\n", 100 | "X = X[ :, :, feature_index]\n", 101 | "\n", 102 | "# getting meta\n", 103 | "num_seqs, seqlen, feature_dim = np.shape(X)\n", 104 | "num_classes = 8\n", 105 | "\n", 106 | "X = X.astype(theano.config.floatX)\n", 107 | "mask = mask.astype(theano.config.floatX)\n", 108 | "vals = np.arange(0,8)\n", 109 | "\n", 110 | "# secondary structure label\n", 111 | "labels_new_1 = np.zeros((num_seqs,seqlen))\n", 112 | "for i in xrange(np.size(labels_1,axis=0)):\n", 113 | " labels_new_1[i,:] = np.dot(labels_1[i,:,:], vals)\n", 114 | "labels_new_1 = labels_new_1.astype('int32')\n", 115 | "labels_1 = labels_new_1\n", 116 | "\n", 117 | "# solvent accessibility label\n", 118 | "labels_new_2 = np.zeros((num_seqs, seqlen))\n", 119 | "vals2 = np.array([2, 1])\n", 120 | "for i in xrange(np.size(labels_2,axis=0)):\n", 121 | " labels_new_2[i,:] = np.dot(labels_2[i,:,:], vals2)\n", 122 | "labels_new_2 = labels_new_2.astype('int32')\n", 123 | "labels_2 = labels_new_2\n", 124 | "\n", 125 | "seq_names = np.arange(0, num_seqs)\n", 126 | "np.random.shuffle(seq_names)\n", 127 | "X_train = X[seq_names[0:5022]]\n", 128 | "labels_1_train = labels_1[seq_names[0:5022]]\n", 129 | "labels_2_train = labels_2[seq_names[0:5022]]\n", 130 | "mask_train = mask[seq_names[0:5022]]\n", 131 | "\n", 132 | "# Using padding for batch usage\n", 133 | "X_add = np.zeros((98, seqlen, feature_dim))\n", 134 | "label_add = np.zeros((98, seqlen))\n", 135 | "mask_add = np.zeros((98, seqlen))\n", 136 | "\n", 137 | "X_train = np.concatenate((X_train,X_add), axis=0).astype(theano.config.floatX)\n", 138 | "labels_1_train = np.concatenate((labels_1_train, label_add), axis=0).astype('int32')\n", 139 | "labels_2_train = np.concatenate((labels_2_train, label_add), axis=0).astype('int32')\n", 140 | "mask_train = np.concatenate((mask_train, mask_add), axis=0).astype('uint8')\n", 141 | "\n", 142 | "print \"Train data shape is\", X_train.shape, \", two labels shapes are\", labels_1_train.shape, \\\n", 143 | "\"and\", labels_2_train.shape, \", mask shape is\", mask_train.shape, \"\\n\"\n", 144 | "\n", 145 | "\n", 146 | "\n", 147 | "####################################################################\n", 148 | "##### Validation DATA #####\n", 149 | "###################################################################\n", 150 | "X_valid = X[seq_names[5022:5534]]\n", 151 | "labels_1_valid = labels_1[seq_names[5022:5534]]\n", 152 | "labels_2_valid = labels_2[seq_names[5022:5534]]\n", 153 | "mask_valid = mask[seq_names[5022:5534]]\n", 154 | "\n", 155 | "print \"Validation data shape is\", X_valid.shape, \", two labels shapes are\", labels_1_valid.shape, \\\n", 156 | "\"and\", labels_2_valid.shape, \", mask shape is\", mask_valid.shape, \"\\n\"\n", 157 | "\n", 158 | "###########################################################################\n", 159 | "##### CB513 Test DATA #####\n", 160 | "##########################################################################\n", 161 | "print(\"Loading Test data (CB513)...\")\n", 162 | "X_in = np.load(gzip.open('data/cb513+profile_split1.npy.gz', 'rb'))\n", 163 | "X1 = np.reshape(X_in,(514,700,57))\n", 164 | "X = X1\n", 165 | "del X_in, X1\n", 166 | "\n", 167 | "# for joint learning\n", 168 | "labels_1 = X[:, :, 22:30] # secondary struture label\n", 169 | "labels_2 = X[:, :, 33:35] # solvent accessibility label\n", 170 | "####\n", 171 | "mask = 1 - X[:,:,30]\n", 172 | "\n", 173 | "feature_index = np.hstack((np.arange(0,21),np.arange(35,56))) # 42-d features\n", 174 | "X = X[ :, :, feature_index]\n", 175 | "\n", 176 | "# getting meta\n", 177 | "num_seqs, seqlen, feature_dim = np.shape(X)\n", 178 | "num_classes = 8\n", 179 | "\n", 180 | "X = X.astype(theano.config.floatX)\n", 181 | "mask = mask.astype(theano.config.floatX)\n", 182 | "vals = np.arange(0,8)\n", 183 | "\n", 184 | "# secondary structure label\n", 185 | "labels_new_1 = np.zeros((num_seqs, seqlen))\n", 186 | "for i in xrange(np.size(labels_1,axis=0)):\n", 187 | " labels_new_1[i,:] = np.dot(labels_1[i,:,:], vals)\n", 188 | "labels_new_1 = labels_new_1.astype('int32')\n", 189 | "labels_1 = labels_new_1\n", 190 | "# solvent accessibility label\n", 191 | "labels_new_2 = np.zeros((num_seqs,seqlen))\n", 192 | "vals2 = np.array([2, 1])\n", 193 | "for i in xrange(np.size(labels_2,axis=0)):\n", 194 | " labels_new_2[i,:] = np.dot(labels_2[i,:,:], vals2)\n", 195 | "labels_new_2 = labels_new_2.astype('int32')\n", 196 | "labels_2 = labels_new_2\n", 197 | "\n", 198 | "# When test, we use batch_size = 1, so there is no need for padding\n", 199 | "X_test_cb513 = X\n", 200 | "labels_1_test_cb513 = labels_1\n", 201 | "labels_2_test_cb513 = labels_2\n", 202 | "mask_test_cb513 = mask.astype('uint8')\n", 203 | "\n", 204 | "print \"Test data shape is\", X_test_cb513.shape, \", two labels shapes are\", labels_1_test_cb513.shape, \\\n", 205 | "\"and\", labels_2_test_cb513.shape, \", mask shape is\", mask_test_cb513.shape, \"\\n\"\n", 206 | "\n", 207 | "###############################################################################\n", 208 | "# CASP10\n", 209 | "##############################################################################\n", 210 | "print(\"Loading Test data (CASP10)...\")\n", 211 | "casp10 = h5py.File(\"data/casp10.h5\")\n", 212 | "casp10_feature = casp10['features'][:, :, 0:42].astype(\"float32\")\n", 213 | "\n", 214 | "casp10_labels_1 = casp10['labels'][:, :, 0:8]\n", 215 | "num_seqs, seqlen, feature_dim = np.shape(casp10_feature)\n", 216 | "num_classes = 8\n", 217 | "vals = np.arange(0,8)\n", 218 | "# secondary structure label\n", 219 | "labels_new_1 = np.zeros((num_seqs, seqlen))\n", 220 | "for i in xrange(num_seqs):\n", 221 | " labels_new_1[i,:] = np.dot(casp10_labels_1[i,:,:], vals)\n", 222 | "casp10_labels_1 = labels_new_1.astype('int32')\n", 223 | "\n", 224 | "casp10_mask = 1-casp10['features'][:, :, -1].astype('uint8')\n", 225 | "\n", 226 | "\n", 227 | "print \"casp10 data shape is\", casp10_feature.shape, \", labels shape is\", casp10_labels_1.shape, \\\n", 228 | "\", mask shape is\", casp10_mask.shape, \"\\n\"\n", 229 | "\n", 230 | "print \"load all data takes time %fs\" %(time.time()- time_start)" 231 | ] 232 | }, 233 | { 234 | "cell_type": "markdown", 235 | "metadata": {}, 236 | "source": [ 237 | "# Define pipeline, loss and compile" 238 | ] 239 | }, 240 | { 241 | "cell_type": "code", 242 | "execution_count": 3, 243 | "metadata": { 244 | "collapsed": false 245 | }, 246 | "outputs": [], 247 | "source": [ 248 | "###############################################################\n", 249 | "#### define pipeline ##########################################\n", 250 | "###############################################################\n", 251 | "# 1. Input layer\n", 252 | "l_in_1 = lasagne.layers.InputLayer(shape=(None, 700, 21))\n", 253 | "l_in_2 = lasagne.layers.InputLayer(shape=(None, 700, 21))\n", 254 | "\n", 255 | "# 1_1. Encode first 21 one-hot vector Layer and the concatenate with the last 21\n", 256 | "l_reshape_a1 = lasagne.layers.ReshapeLayer(l_in_1, (-1, 21))\n", 257 | "l_reshape_a2 = lasagne.layers.ReshapeLayer(l_in_2, (-1, 21)) \n", 258 | "\n", 259 | "l_in_1_embed = lasagne.layers.DenseLayer(l_reshape_a1, num_units=50, nonlinearity=lasagne.nonlinearities.rectify)\n", 260 | "l_1 = lasagne.layers.ConcatLayer(incomings=[l_in_1_embed, l_reshape_a2], axis=1) \n", 261 | "\n", 262 | "l_in = lasagne.layers.ReshapeLayer(l_1, (-1, 1, 700, 21+50))\n", 263 | "\n", 264 | "# multi-CNN\n", 265 | "# kernel_size = 3\n", 266 | "l_conv_a = lasagne.layers.Conv2DLayer(incoming=l_in, num_filters=64, pad= (3//2, 0),\n", 267 | " filter_size=(3, 21+50), stride=1, nonlinearity=lasagne.nonlinearities.rectify)\n", 268 | "l_conv_a1 = lasagne.layers.DimshuffleLayer(l_conv_a, (0,2,1,3))\n", 269 | "l_conv_a2 = lasagne.layers.ReshapeLayer(l_conv_a1, (-1, 700, 64))\n", 270 | "l_conv_a2 = lasagne.layers.batch_norm(l_conv_a2)\n", 271 | "\n", 272 | "# kernel_size = 7\n", 273 | "l_conv_b = lasagne.layers.Conv2DLayer(incoming=l_in, num_filters=64, pad= (7//2, 0),\n", 274 | " filter_size=(7, 21+50), stride=1, nonlinearity=lasagne.nonlinearities.rectify)\n", 275 | "l_conv_b1 = lasagne.layers.DimshuffleLayer(l_conv_b, (0,2,1,3))\n", 276 | "l_conv_b2 = lasagne.layers.ReshapeLayer(l_conv_b1, (-1, 700, 64))\n", 277 | "l_conv_b2 = lasagne.layers.batch_norm(l_conv_b2)\n", 278 | "# kernel_size = 11\n", 279 | "l_conv_c = lasagne.layers.Conv2DLayer(incoming=l_in, num_filters=64, pad= (11//2, 0),\n", 280 | " filter_size=(11, 21+50), stride=1, nonlinearity=lasagne.nonlinearities.rectify)\n", 281 | "l_conv_c1 = lasagne.layers.DimshuffleLayer(l_conv_c, (0,2,1,3))\n", 282 | "l_conv_c2 = lasagne.layers.ReshapeLayer(l_conv_c1, (-1, 700, 64))\n", 283 | "l_conv_c2 = lasagne.layers.batch_norm(l_conv_c2)\n", 284 | "\n", 285 | "l_c_a = lasagne.layers.ConcatLayer([l_conv_a2, l_conv_b2, l_conv_c2], axis=2) \n", 286 | "l_c_a_bn = lasagne.layers.batch_norm(l_c_a)\n", 287 | "# 3. GRU Layers\n", 288 | "# first GRU\n", 289 | "l_forward1 = lasagne.layers.GRULayer(lasagne.layers.dropout(l_c_a_bn, p=0.5), 300)\n", 290 | "l_backward1 = lasagne.layers.GRULayer(lasagne.layers.dropout(l_c_a_bn, p=0.5), 300, backwards=True) \n", 291 | "l_bgru1 = lasagne.layers.ConcatLayer(incomings=[l_forward1, l_backward1], axis=2) \n", 292 | "\n", 293 | "# second GRU \n", 294 | "l_forward2 = lasagne.layers.GRULayer(lasagne.layers.dropout(l_bgru1, p=0.5), 300)\n", 295 | "l_backward2 = lasagne.layers.GRULayer(lasagne.layers.dropout(l_bgru1, p=0.5), 300, backwards=True)\n", 296 | "l_bgru2 = lasagne.layers.ConcatLayer(incomings=[l_forward2, l_backward2], axis=2) \n", 297 | "\n", 298 | "# third GRU\n", 299 | "l_forward3 = lasagne.layers.GRULayer(lasagne.layers.dropout(l_bgru2, p=0.5), 300)\n", 300 | "l_backward3 = lasagne.layers.GRULayer(lasagne.layers.dropout(l_bgru2, p=0.5), 300, backwards=True) \n", 301 | "\n", 302 | "# Concat layer + local (contributution)\n", 303 | "l_sum = lasagne.layers.ConcatLayer(incomings=[l_forward3, l_backward3, l_c_a], axis=2) # add local context l_dim_b or l_c_b\n", 304 | "\n", 305 | "# 4. Second Dense Layer\n", 306 | "l_reshape_b = lasagne.layers.ReshapeLayer(l_sum, (-1, 300+300+64+64+64))\n", 307 | "\n", 308 | "# multi-output through joint feature learning, add two layers FC layers, mayer using Layer Normalization here as LN is better for FC layers\n", 309 | "l_2 = lasagne.layers.DenseLayer(lasagne.layers.dropout(l_reshape_b, p=0.5), num_units=300, nonlinearity=lasagne.nonlinearities.rectify)\n", 310 | "\n", 311 | "l_3 = lasagne.layers.DenseLayer(lasagne.layers.dropout(l_2, p=0.5), num_units=300, nonlinearity=lasagne.nonlinearities.rectify)\n", 312 | "\n", 313 | "# 5. Output Layer\n", 314 | "l_recurrent_out1 = lasagne.layers.DenseLayer( l_3, num_units=8, nonlinearity=lasagne.nonlinearities.softmax) # SS output\n", 315 | "l_recurrent_out2 = lasagne.layers.DenseLayer( l_3, num_units=4, nonlinearity=lasagne.nonlinearities.softmax) # SA output\n", 316 | "\n", 317 | "# Now, reshape the output back to the RNN format\n", 318 | "l_out_1 = lasagne.layers.ReshapeLayer( l_recurrent_out1, (-1, 700, 8))\n", 319 | "l_out_2 = lasagne.layers.ReshapeLayer( l_recurrent_out2, (-1, 700, 4))\n", 320 | "\n", 321 | "l_out = lasagne.layers.ConcatLayer(incomings=[l_out_1, l_out_2], axis=2)" 322 | ] 323 | }, 324 | { 325 | "cell_type": "code", 326 | "execution_count": 4, 327 | "metadata": { 328 | "collapsed": false, 329 | "scrolled": false 330 | }, 331 | "outputs": [ 332 | { 333 | "name": "stdout", 334 | "output_type": "stream", 335 | "text": [ 336 | "Building network ...\n", 337 | " number of parameters: 4572528\n", 338 | " layer output shapes:\n", 339 | " InputLayer (None, 700, 21)\n", 340 | " ReshapeLayer (None, 21)\n", 341 | " DenseLayer (None, 50)\n", 342 | " InputLayer (None, 700, 21)\n", 343 | " ReshapeLayer (None, 21)\n", 344 | " ConcatLayer (None, 71)\n", 345 | " ReshapeLayer (None, 1, 700, 71)\n", 346 | " Conv2DLayer (None, 64, 700, 1)\n", 347 | " DimshuffleLayer (None, 700, 64, 1)\n", 348 | " ReshapeLayer (None, 700, 64)\n", 349 | " BatchNormLayer (None, 700, 64)\n", 350 | " Conv2DLayer (None, 64, 700, 1)\n", 351 | " DimshuffleLayer (None, 700, 64, 1)\n", 352 | " ReshapeLayer (None, 700, 64)\n", 353 | " BatchNormLayer (None, 700, 64)\n", 354 | " Conv2DLayer (None, 64, 700, 1)\n", 355 | " DimshuffleLayer (None, 700, 64, 1)\n", 356 | " ReshapeLayer (None, 700, 64)\n", 357 | " BatchNormLayer (None, 700, 64)\n", 358 | " ConcatLayer (None, 700, 192)\n", 359 | " BatchNormLayer (None, 700, 192)\n", 360 | " DropoutLayer (None, 700, 192)\n", 361 | " GRULayer (None, 700, 300)\n", 362 | " DropoutLayer (None, 700, 192)\n", 363 | " GRULayer (None, 700, 300)\n", 364 | " ConcatLayer (None, 700, 600)\n", 365 | " DropoutLayer (None, 700, 600)\n", 366 | " GRULayer (None, 700, 300)\n", 367 | " DropoutLayer (None, 700, 600)\n", 368 | " GRULayer (None, 700, 300)\n", 369 | " ConcatLayer (None, 700, 600)\n", 370 | " DropoutLayer (None, 700, 600)\n", 371 | " GRULayer (None, 700, 300)\n", 372 | " DropoutLayer (None, 700, 600)\n", 373 | " GRULayer (None, 700, 300)\n", 374 | " ConcatLayer (None, 700, 792)\n", 375 | " ReshapeLayer (None, 792)\n", 376 | " DropoutLayer (None, 792)\n", 377 | " DenseLayer (None, 300)\n", 378 | " DropoutLayer (None, 300)\n", 379 | " DenseLayer (None, 300)\n", 380 | " DenseLayer (None, 8)\n", 381 | " ReshapeLayer (None, 700, 8)\n", 382 | " DenseLayer (None, 4)\n", 383 | " ReshapeLayer (None, 700, 4)\n", 384 | " ConcatLayer (None, 700, 12)\n", 385 | "Creating cost function\n", 386 | "Elemwise{mul,no_inplace}.0\n", 387 | "Elemwise{add,no_inplace}.0\n", 388 | "InplaceDimShuffle{1,0,2}.0\n", 389 | "InplaceDimShuffle{1,0,2}.0\n", 390 | "InplaceDimShuffle{1,0,2}.0\n", 391 | "Reshape{3}.0\n", 392 | "Elemwise{mul,no_inplace}.0\n", 393 | "Elemwise{add,no_inplace}.0\n", 394 | "InplaceDimShuffle{1,0,2}.0\n", 395 | "InplaceDimShuffle{1,0,2}.0\n", 396 | "InplaceDimShuffle{1,0,2}.0\n", 397 | "Reshape{3}.0\n", 398 | "Computing updates ...\n", 399 | "Compiling functions ...\n", 400 | "compile time 60.437900s\n" 401 | ] 402 | }, 403 | { 404 | "name": "stderr", 405 | "output_type": "stream", 406 | "text": [ 407 | "/home/zli/anaconda/lib/python2.7/site-packages/Lasagne-0.2.dev1-py2.7.egg/lasagne/layers/helper.py:212: UserWarning: get_output() was called with unused kwargs:\n", 408 | "\tmask\n", 409 | " % \"\\n\\t\".join(suggestions))\n" 410 | ] 411 | } 412 | ], 413 | "source": [ 414 | "################################################################\n", 415 | "#### Compile model\n", 416 | "################################################################\n", 417 | "sym_y_1 = T.imatrix('target_output_8')\n", 418 | "sym_y_2 = T.imatrix('target_output_4')\n", 419 | "sym_mask = T.imatrix('mask')\n", 420 | "sym_x_1 = T.tensor3(name='feature_one_hot', dtype='float32')\n", 421 | "sym_x_2 = T.tensor3(name='feature_float', dtype='float32')\n", 422 | "\n", 423 | "TOL = 1e-5\n", 424 | "\n", 425 | "print(\"Building network ...\")\n", 426 | "\n", 427 | "##########################DEBUG##########################\n", 428 | "all_layers = lasagne.layers.get_all_layers(l_out)\n", 429 | "num_params = lasagne.layers.count_params(l_out)\n", 430 | "print(\" number of parameters: %d\" % num_params)\n", 431 | "print(\" layer output shapes:\")\n", 432 | "for layer in all_layers:\n", 433 | " name = string.ljust(layer.__class__.__name__, 32)\n", 434 | " print(\" %s %s\" % (name, lasagne.layers.get_output_shape(layer)))\n", 435 | "print(\"Creating cost function\")\n", 436 | "# lasagne.layers.get_output produces a variable for the output of the net\n", 437 | "out_train = lasagne.layers.get_output(l_out, {l_in_1: sym_x_1, l_in_2: sym_x_2}, \n", 438 | " mask=sym_mask, deterministic=False)\n", 439 | "out_eval = lasagne.layers.get_output(l_out, {l_in_1: sym_x_1, l_in_2: sym_x_2}, \n", 440 | " mask=sym_mask, deterministic=True)\n", 441 | "\n", 442 | "ss = out_train[:,:,0:8].reshape((-1, 8))\n", 443 | "sa = out_train[:,:,8:12].reshape((-1, 4))\n", 444 | "#probs_flat = out_train.reshape((-1, num_classes))\n", 445 | "\n", 446 | "lambda_reg = 0.0001\n", 447 | "params = lasagne.layers.get_all_params(l_out, regularizable=True)\n", 448 | "reg_term = sum(T.sum(p**2) for p in params)\n", 449 | "######### calculate cost function #######\n", 450 | "cost11 = T.nnet.categorical_crossentropy(T.clip(ss, TOL, 1-TOL), sym_y_1.flatten())\n", 451 | "cost21 = T.nnet.categorical_crossentropy(T.clip(sa, TOL, 1-TOL), sym_y_2.flatten())\n", 452 | "\n", 453 | "cost1 = T.sum(cost11*sym_mask.flatten()) / T.sum(sym_mask)\n", 454 | "cost2 = T.sum(cost21*sym_mask.flatten()) / T.sum(sym_mask)\n", 455 | "\n", 456 | "cost = cost1 + cost2 + lambda_reg * reg_term\n", 457 | "\n", 458 | "# Retrieve all parameters from the network\n", 459 | "all_params = lasagne.layers.get_all_params(l_out, trainable=True)\n", 460 | "# Compute SGD updates for training\n", 461 | "print(\"Computing updates ...\")\n", 462 | "#learning_rate = theano.shared(0.001)\n", 463 | "all_grads = T.grad(cost, all_params)\n", 464 | "updates, norm_calc = lasagne.updates.total_norm_constraint(all_grads, max_norm=15, return_norm=True)\n", 465 | "updates = lasagne.updates.adam(updates, all_params, learning_rate=0.001)\n", 466 | "\n", 467 | "\n", 468 | "print(\"Compiling functions ...\")\n", 469 | "t_compile = time.time()\n", 470 | "train = theano.function( [sym_x_1, sym_x_2, sym_y_1, sym_y_2, sym_mask], \n", 471 | " [cost1, cost2, cost, out_train, norm_calc], \n", 472 | " updates=updates, allow_input_downcast=True)\n", 473 | "\n", 474 | "eval = theano.function([sym_x_1, sym_x_2, sym_y_1, sym_y_2, sym_mask], \n", 475 | " [cost1, cost2, cost, out_eval], \n", 476 | " allow_input_downcast=True)\n", 477 | "\n", 478 | "print \"compile time %fs\" %(time.time()-t_compile)" 479 | ] 480 | }, 481 | { 482 | "cell_type": "markdown", 483 | "metadata": { 484 | "collapsed": false 485 | }, 486 | "source": [ 487 | "# Training, validation and testing" 488 | ] 489 | }, 490 | { 491 | "cell_type": "code", 492 | "execution_count": 5, 493 | "metadata": { 494 | "collapsed": true 495 | }, 496 | "outputs": [], 497 | "source": [ 498 | "def confmatrix(out, label, mask, num_classes):\n", 499 | " mask1 = np.zeros(mask.shape, dtype=np.bool)\n", 500 | " for i in range(mask.shape[0]):\n", 501 | " for j in range(mask.shape[1]):\n", 502 | " mask1[i][j] = mask[i][j]\n", 503 | "\n", 504 | " confmatrix = ConfusionMatrix(num_classes)\n", 505 | " mask_flat = mask1.flatten()\n", 506 | " out_preds = np.argmax(out, axis=2).flatten()\n", 507 | " label1 = label.flatten()\n", 508 | " confmatrix.batchAdd(label1[mask_flat], out_preds[mask_flat])\n", 509 | " return confmatrix" 510 | ] 511 | }, 512 | { 513 | "cell_type": "code", 514 | "execution_count": 6, 515 | "metadata": { 516 | "collapsed": false, 517 | "scrolled": true 518 | }, 519 | "outputs": [ 520 | { 521 | "name": "stdout", 522 | "output_type": "stream", 523 | "text": [ 524 | "Epoch 1 of 200\n", 525 | "One epoch training cost time 163.456431s\n", 526 | "0th training epoch totally loss is 6.767520\n", 527 | "0th training epoch secondary structure Q8 mean accuracy is 0.344183\n", 528 | "0th training epoch solvent accessibility Q4 mean accuracy is 0.596803\n", 529 | "One epoch training cost time 163.399285s\n", 530 | "1th training epoch totally loss is 5.292805\n", 531 | "1th training epoch secondary structure Q8 mean accuracy is 0.533833\n", 532 | "1th training epoch solvent accessibility Q4 mean accuracy is 0.698404\n", 533 | "One epoch training cost time 163.384790s\n", 534 | "2th training epoch totally loss is 4.378830\n", 535 | "2th training epoch secondary structure Q8 mean accuracy is 0.612936\n", 536 | "2th training epoch solvent accessibility Q4 mean accuracy is 0.716157\n", 537 | "One epoch training cost time 163.597021s\n", 538 | "3th training epoch totally loss is 3.765926\n", 539 | "3th training epoch secondary structure Q8 mean accuracy is 0.637823\n", 540 | "3th training epoch solvent accessibility Q4 mean accuracy is 0.725040\n", 541 | "One epoch training cost time 163.676915s\n", 542 | "4th training epoch totally loss is 3.319596\n", 543 | "4th training epoch secondary structure Q8 mean accuracy is 0.651715\n", 544 | "4th training epoch solvent accessibility Q4 mean accuracy is 0.729329\n", 545 | "One epoch validation cost time 13.279331s\n", 546 | "4th validation epoch secondary structure Q8 mean accuracy is 0.678124\n", 547 | "4th validation epoch solvent accessibility Q4 mean accuracy is 0.743762 \n", 548 | "\n", 549 | "One epoch training cost time 163.715295s\n", 550 | "5th training epoch totally loss is 2.988540\n", 551 | "5th training epoch secondary structure Q8 mean accuracy is 0.658810\n", 552 | "5th training epoch solvent accessibility Q4 mean accuracy is 0.733683\n", 553 | "One epoch training cost time 162.803671s\n", 554 | "6th training epoch totally loss is 2.737146\n", 555 | "6th training epoch secondary structure Q8 mean accuracy is 0.664386\n", 556 | "6th training epoch solvent accessibility Q4 mean accuracy is 0.736767\n", 557 | "One epoch training cost time 162.244224s\n", 558 | "7th training epoch totally loss is 2.542383\n", 559 | "7th training epoch secondary structure Q8 mean accuracy is 0.669254\n", 560 | "7th training epoch solvent accessibility Q4 mean accuracy is 0.737604\n", 561 | "One epoch training cost time 162.400461s\n", 562 | "8th training epoch totally loss is 2.389239\n", 563 | "8th training epoch secondary structure Q8 mean accuracy is 0.672649\n", 564 | "8th training epoch solvent accessibility Q4 mean accuracy is 0.739941\n", 565 | "One epoch training cost time 162.510310s\n", 566 | "9th training epoch totally loss is 2.262817\n", 567 | "9th training epoch secondary structure Q8 mean accuracy is 0.676493\n", 568 | "9th training epoch solvent accessibility Q4 mean accuracy is 0.741218\n", 569 | "One epoch validation cost time 13.313993s\n", 570 | "9th validation epoch secondary structure Q8 mean accuracy is 0.697698\n", 571 | "9th validation epoch solvent accessibility Q4 mean accuracy is 0.751917 \n", 572 | "\n", 573 | "Epoch 11 of 200\n", 574 | "One epoch training cost time 162.813334s\n", 575 | "10th training epoch totally loss is 2.163592\n", 576 | "10th training epoch secondary structure Q8 mean accuracy is 0.679781\n", 577 | "10th training epoch solvent accessibility Q4 mean accuracy is 0.742472\n", 578 | "One epoch training cost time 162.462113s\n", 579 | "11th training epoch totally loss is 2.084252\n", 580 | "11th training epoch secondary structure Q8 mean accuracy is 0.681331\n", 581 | "11th training epoch solvent accessibility Q4 mean accuracy is 0.743335\n", 582 | "One epoch training cost time 163.387861s\n", 583 | "12th training epoch totally loss is 2.012107\n", 584 | "12th training epoch secondary structure Q8 mean accuracy is 0.684040\n", 585 | "12th training epoch solvent accessibility Q4 mean accuracy is 0.744846\n", 586 | "One epoch training cost time 162.677129s\n", 587 | "13th training epoch totally loss is 1.967229\n", 588 | "13th training epoch secondary structure Q8 mean accuracy is 0.684221\n", 589 | "13th training epoch solvent accessibility Q4 mean accuracy is 0.743468\n", 590 | "One epoch training cost time 162.439028s\n", 591 | "14th training epoch totally loss is 1.906868\n", 592 | "14th training epoch secondary structure Q8 mean accuracy is 0.687505\n", 593 | "14th training epoch solvent accessibility Q4 mean accuracy is 0.746055\n", 594 | "One epoch validation cost time 13.271579s\n", 595 | "14th validation epoch secondary structure Q8 mean accuracy is 0.705130\n", 596 | "14th validation epoch solvent accessibility Q4 mean accuracy is 0.755069 \n", 597 | "\n", 598 | "One epoch training cost time 162.551641s\n", 599 | "15th training epoch totally loss is 1.863219\n", 600 | "15th training epoch secondary structure Q8 mean accuracy is 0.689449\n", 601 | "15th training epoch solvent accessibility Q4 mean accuracy is 0.746678\n", 602 | "One epoch training cost time 162.289224s\n", 603 | "16th training epoch totally loss is 1.830614\n", 604 | "16th training epoch secondary structure Q8 mean accuracy is 0.689774\n", 605 | "16th training epoch solvent accessibility Q4 mean accuracy is 0.747002\n", 606 | "One epoch training cost time 162.213595s\n", 607 | "17th training epoch totally loss is 1.792755\n", 608 | "17th training epoch secondary structure Q8 mean accuracy is 0.692170\n", 609 | "17th training epoch solvent accessibility Q4 mean accuracy is 0.748306\n", 610 | "One epoch training cost time 162.422325s\n", 611 | "18th training epoch totally loss is 1.762141\n", 612 | "18th training epoch secondary structure Q8 mean accuracy is 0.694008\n", 613 | "18th training epoch solvent accessibility Q4 mean accuracy is 0.748951\n", 614 | "One epoch training cost time 162.276581s\n", 615 | "19th training epoch totally loss is 1.741771\n", 616 | "19th training epoch secondary structure Q8 mean accuracy is 0.694478\n", 617 | "19th training epoch solvent accessibility Q4 mean accuracy is 0.748290\n", 618 | "One epoch validation cost time 13.358646s\n", 619 | "19th validation epoch secondary structure Q8 mean accuracy is 0.710526\n", 620 | "19th validation epoch solvent accessibility Q4 mean accuracy is 0.757021 \n", 621 | "\n", 622 | "One epoch cb513 test cost time 109.444684s\n", 623 | "19th cb513 test epoch secondary structure Q8 mean accuracy is 0.668743\n", 624 | "19th cb513 test epoch solvent accessibility Q4 mean accuracy is 0.370412\n", 625 | "One epoch casp10 test cost time 26.180744s\n", 626 | "19th casp10 test epoch secondary structure Q8 mean accuracy is 0.697019\n", 627 | "Epoch 21 of 200\n", 628 | "One epoch training cost time 162.361153s\n", 629 | "20th training epoch totally loss is 1.720088\n", 630 | "20th training epoch secondary structure Q8 mean accuracy is 0.695118\n", 631 | "20th training epoch solvent accessibility Q4 mean accuracy is 0.749128\n", 632 | "One epoch training cost time 162.524833s\n", 633 | "21th training epoch totally loss is 1.696352\n", 634 | "21th training epoch secondary structure Q8 mean accuracy is 0.696591\n", 635 | "21th training epoch solvent accessibility Q4 mean accuracy is 0.750101\n", 636 | "One epoch training cost time 162.952172s\n", 637 | "22th training epoch totally loss is 1.679996\n", 638 | "22th training epoch secondary structure Q8 mean accuracy is 0.696902\n", 639 | "22th training epoch solvent accessibility Q4 mean accuracy is 0.750815\n", 640 | "One epoch training cost time 162.717349s\n", 641 | "23th training epoch totally loss is 1.663509\n", 642 | "23th training epoch secondary structure Q8 mean accuracy is 0.697338\n", 643 | "23th training epoch solvent accessibility Q4 mean accuracy is 0.751343\n", 644 | "One epoch training cost time 162.442907s\n", 645 | "24th training epoch totally loss is 1.648205\n", 646 | "24th training epoch secondary structure Q8 mean accuracy is 0.698781\n", 647 | "24th training epoch solvent accessibility Q4 mean accuracy is 0.751596\n", 648 | "One epoch validation cost time 13.281857s\n", 649 | "24th validation epoch secondary structure Q8 mean accuracy is 0.714026\n", 650 | "24th validation epoch solvent accessibility Q4 mean accuracy is 0.757688 \n", 651 | "\n", 652 | "One epoch training cost time 162.626333s\n", 653 | "25th training epoch totally loss is 1.629889\n", 654 | "25th training epoch secondary structure Q8 mean accuracy is 0.701173\n", 655 | "25th training epoch solvent accessibility Q4 mean accuracy is 0.752209\n", 656 | "One epoch training cost time 162.291147s\n", 657 | "26th training epoch totally loss is 1.622761\n", 658 | "26th training epoch secondary structure Q8 mean accuracy is 0.699820\n", 659 | "26th training epoch solvent accessibility Q4 mean accuracy is 0.752514\n", 660 | "One epoch training cost time 162.697999s\n", 661 | "27th training epoch totally loss is 1.620378\n", 662 | "27th training epoch secondary structure Q8 mean accuracy is 0.699723\n", 663 | "27th training epoch solvent accessibility Q4 mean accuracy is 0.750625\n", 664 | "One epoch training cost time 162.307866s\n", 665 | "28th training epoch totally loss is 1.596780\n", 666 | "28th training epoch secondary structure Q8 mean accuracy is 0.702580\n", 667 | "28th training epoch solvent accessibility Q4 mean accuracy is 0.753366\n", 668 | "One epoch training cost time 162.601193s\n", 669 | "29th training epoch totally loss is 1.588354\n", 670 | "29th training epoch secondary structure Q8 mean accuracy is 0.702811\n", 671 | "29th training epoch solvent accessibility Q4 mean accuracy is 0.754000\n", 672 | "One epoch validation cost time 13.258520s\n", 673 | "29th validation epoch secondary structure Q8 mean accuracy is 0.715940\n", 674 | "29th validation epoch solvent accessibility Q4 mean accuracy is 0.759705 \n", 675 | "\n", 676 | "Epoch 31 of 200\n", 677 | "One epoch training cost time 162.467532s\n", 678 | "30th training epoch totally loss is 1.579856\n", 679 | "30th training epoch secondary structure Q8 mean accuracy is 0.704151\n", 680 | "30th training epoch solvent accessibility Q4 mean accuracy is 0.753959\n", 681 | "One epoch training cost time 162.645181s\n", 682 | "31th training epoch totally loss is 1.576357\n", 683 | "31th training epoch secondary structure Q8 mean accuracy is 0.703674\n", 684 | "31th training epoch solvent accessibility Q4 mean accuracy is 0.753522\n", 685 | "One epoch training cost time 162.445379s\n", 686 | "32th training epoch totally loss is 1.569378\n", 687 | "32th training epoch secondary structure Q8 mean accuracy is 0.704537\n", 688 | "32th training epoch solvent accessibility Q4 mean accuracy is 0.753997\n", 689 | "One epoch training cost time 162.295338s\n", 690 | "33th training epoch totally loss is 1.559616\n", 691 | "33th training epoch secondary structure Q8 mean accuracy is 0.705358\n", 692 | "33th training epoch solvent accessibility Q4 mean accuracy is 0.754315\n", 693 | "One epoch training cost time 162.402974s\n", 694 | "34th training epoch totally loss is 1.554862\n", 695 | "34th training epoch secondary structure Q8 mean accuracy is 0.705950\n", 696 | "34th training epoch solvent accessibility Q4 mean accuracy is 0.754827\n", 697 | "One epoch validation cost time 13.297301s\n", 698 | "34th validation epoch secondary structure Q8 mean accuracy is 0.717695\n", 699 | "34th validation epoch solvent accessibility Q4 mean accuracy is 0.761676 \n", 700 | "\n", 701 | "One epoch training cost time 162.320167s\n", 702 | "35th training epoch totally loss is 1.549023\n", 703 | "35th training epoch secondary structure Q8 mean accuracy is 0.706022\n", 704 | "35th training epoch solvent accessibility Q4 mean accuracy is 0.755058\n", 705 | "One epoch training cost time 162.308239s\n", 706 | "36th training epoch totally loss is 1.544834\n", 707 | "36th training epoch secondary structure Q8 mean accuracy is 0.706556\n", 708 | "36th training epoch solvent accessibility Q4 mean accuracy is 0.754873\n", 709 | "One epoch training cost time 162.503692s\n", 710 | "37th training epoch totally loss is 1.538368\n", 711 | "37th training epoch secondary structure Q8 mean accuracy is 0.707911\n", 712 | "37th training epoch solvent accessibility Q4 mean accuracy is 0.754983\n", 713 | "One epoch training cost time 162.440773s\n", 714 | "38th training epoch totally loss is 1.532394\n", 715 | "38th training epoch secondary structure Q8 mean accuracy is 0.708537\n", 716 | "38th training epoch solvent accessibility Q4 mean accuracy is 0.755668\n", 717 | "One epoch training cost time 162.506856s\n", 718 | "39th training epoch totally loss is 1.525135\n", 719 | "39th training epoch secondary structure Q8 mean accuracy is 0.708993\n", 720 | "39th training epoch solvent accessibility Q4 mean accuracy is 0.756257\n", 721 | "One epoch validation cost time 13.293106s\n", 722 | "39th validation epoch secondary structure Q8 mean accuracy is 0.718483\n", 723 | "39th validation epoch solvent accessibility Q4 mean accuracy is 0.759733 \n", 724 | "\n", 725 | "One epoch cb513 test cost time 110.186140s\n", 726 | "39th cb513 test epoch secondary structure Q8 mean accuracy is 0.675114\n", 727 | "39th cb513 test epoch solvent accessibility Q4 mean accuracy is 0.292444\n", 728 | "One epoch casp10 test cost time 26.261494s\n", 729 | "39th casp10 test epoch secondary structure Q8 mean accuracy is 0.712626\n", 730 | "Epoch 41 of 200\n", 731 | "One epoch training cost time 162.207479s\n", 732 | "40th training epoch totally loss is 1.524959\n", 733 | "40th training epoch secondary structure Q8 mean accuracy is 0.708633\n", 734 | "40th training epoch solvent accessibility Q4 mean accuracy is 0.756228\n", 735 | "One epoch training cost time 162.861079s\n", 736 | "41th training epoch totally loss is 1.520993\n", 737 | "41th training epoch secondary structure Q8 mean accuracy is 0.709015\n", 738 | "41th training epoch solvent accessibility Q4 mean accuracy is 0.756597\n", 739 | "One epoch training cost time 162.630279s\n", 740 | "42th training epoch totally loss is 1.515070\n", 741 | "42th training epoch secondary structure Q8 mean accuracy is 0.710292\n", 742 | "42th training epoch solvent accessibility Q4 mean accuracy is 0.757004\n", 743 | "One epoch training cost time 162.397524s\n", 744 | "43th training epoch totally loss is 1.510492\n", 745 | "43th training epoch secondary structure Q8 mean accuracy is 0.710770\n", 746 | "43th training epoch solvent accessibility Q4 mean accuracy is 0.757344\n", 747 | "One epoch training cost time 162.300596s\n", 748 | "44th training epoch totally loss is 1.517041\n", 749 | "44th training epoch secondary structure Q8 mean accuracy is 0.708932\n", 750 | "44th training epoch solvent accessibility Q4 mean accuracy is 0.756546\n", 751 | "One epoch validation cost time 13.288466s\n", 752 | "44th validation epoch secondary structure Q8 mean accuracy is 0.715715\n", 753 | "44th validation epoch solvent accessibility Q4 mean accuracy is 0.760897 \n", 754 | "\n", 755 | "One epoch training cost time 162.306470s\n", 756 | "45th training epoch totally loss is 1.515725\n", 757 | "45th training epoch secondary structure Q8 mean accuracy is 0.709352\n", 758 | "45th training epoch solvent accessibility Q4 mean accuracy is 0.756776\n", 759 | "One epoch training cost time 162.297966s\n", 760 | "46th training epoch totally loss is 1.508556\n", 761 | "46th training epoch secondary structure Q8 mean accuracy is 0.710606\n", 762 | "46th training epoch solvent accessibility Q4 mean accuracy is 0.757289\n", 763 | "One epoch training cost time 162.285701s\n", 764 | "47th training epoch totally loss is 1.501611\n", 765 | "47th training epoch secondary structure Q8 mean accuracy is 0.712285\n", 766 | "47th training epoch solvent accessibility Q4 mean accuracy is 0.757462\n", 767 | "One epoch training cost time 162.820768s\n", 768 | "48th training epoch totally loss is 1.496308\n", 769 | "48th training epoch secondary structure Q8 mean accuracy is 0.713239\n", 770 | "48th training epoch solvent accessibility Q4 mean accuracy is 0.757489\n", 771 | "One epoch training cost time 162.469818s\n", 772 | "49th training epoch totally loss is 1.500599\n", 773 | "49th training epoch secondary structure Q8 mean accuracy is 0.712120\n", 774 | "49th training epoch solvent accessibility Q4 mean accuracy is 0.757019\n", 775 | "One epoch validation cost time 13.286005s\n", 776 | "49th validation epoch secondary structure Q8 mean accuracy is 0.717385\n", 777 | "49th validation epoch solvent accessibility Q4 mean accuracy is 0.761966 \n", 778 | "\n", 779 | "Epoch 51 of 200\n", 780 | "One epoch training cost time 162.303976s\n", 781 | "50th training epoch totally loss is 1.492371\n", 782 | "50th training epoch secondary structure Q8 mean accuracy is 0.713637\n", 783 | "50th training epoch solvent accessibility Q4 mean accuracy is 0.757997\n", 784 | "One epoch training cost time 162.257096s\n", 785 | "51th training epoch totally loss is 1.495148\n", 786 | "51th training epoch secondary structure Q8 mean accuracy is 0.712421\n", 787 | "51th training epoch solvent accessibility Q4 mean accuracy is 0.758260\n", 788 | "One epoch training cost time 162.630488s\n", 789 | "52th training epoch totally loss is 1.492434\n", 790 | "52th training epoch secondary structure Q8 mean accuracy is 0.713892\n", 791 | "52th training epoch solvent accessibility Q4 mean accuracy is 0.757841\n", 792 | "One epoch training cost time 162.547182s\n", 793 | "53th training epoch totally loss is 1.486185\n", 794 | "53th training epoch secondary structure Q8 mean accuracy is 0.715019\n", 795 | "53th training epoch solvent accessibility Q4 mean accuracy is 0.758222\n", 796 | "One epoch training cost time 162.454626s\n", 797 | "54th training epoch totally loss is 1.490497\n", 798 | "54th training epoch secondary structure Q8 mean accuracy is 0.714002\n", 799 | "54th training epoch solvent accessibility Q4 mean accuracy is 0.757591\n", 800 | "One epoch validation cost time 13.265314s\n", 801 | "54th validation epoch secondary structure Q8 mean accuracy is 0.721842\n", 802 | "54th validation epoch solvent accessibility Q4 mean accuracy is 0.762764 \n", 803 | "\n", 804 | "One epoch training cost time 162.435677s\n", 805 | "55th training epoch totally loss is 1.486517\n", 806 | "55th training epoch secondary structure Q8 mean accuracy is 0.714161\n", 807 | "55th training epoch solvent accessibility Q4 mean accuracy is 0.758320\n", 808 | "One epoch training cost time 162.523041s\n", 809 | "56th training epoch totally loss is 1.483784\n", 810 | "56th training epoch secondary structure Q8 mean accuracy is 0.715299\n", 811 | "56th training epoch solvent accessibility Q4 mean accuracy is 0.758843\n", 812 | "One epoch training cost time 162.491099s\n", 813 | "57th training epoch totally loss is 1.480366\n", 814 | "57th training epoch secondary structure Q8 mean accuracy is 0.716003\n", 815 | "57th training epoch solvent accessibility Q4 mean accuracy is 0.758359\n", 816 | "One epoch training cost time 163.110932s\n", 817 | "58th training epoch totally loss is 1.477032\n", 818 | "58th training epoch secondary structure Q8 mean accuracy is 0.717088\n", 819 | "58th training epoch solvent accessibility Q4 mean accuracy is 0.758507\n", 820 | "One epoch training cost time 162.533791s\n", 821 | "59th training epoch totally loss is 1.477839\n", 822 | "59th training epoch secondary structure Q8 mean accuracy is 0.716316\n", 823 | "59th training epoch solvent accessibility Q4 mean accuracy is 0.758661\n", 824 | "One epoch validation cost time 13.312988s\n", 825 | "59th validation epoch secondary structure Q8 mean accuracy is 0.722377\n", 826 | "59th validation epoch solvent accessibility Q4 mean accuracy is 0.763177 \n", 827 | "\n", 828 | "One epoch cb513 test cost time 109.758375s\n", 829 | "59th cb513 test epoch secondary structure Q8 mean accuracy is 0.677025\n", 830 | "59th cb513 test epoch solvent accessibility Q4 mean accuracy is 0.353743\n", 831 | "One epoch casp10 test cost time 26.135666s\n", 832 | "59th casp10 test epoch secondary structure Q8 mean accuracy is 0.711674\n", 833 | "Epoch 61 of 200\n", 834 | "One epoch training cost time 162.797268s\n", 835 | "60th training epoch totally loss is 1.480659\n", 836 | "60th training epoch secondary structure Q8 mean accuracy is 0.716062\n", 837 | "60th training epoch solvent accessibility Q4 mean accuracy is 0.758706\n", 838 | "One epoch training cost time 162.943838s\n", 839 | "61th training epoch totally loss is 1.476625\n", 840 | "61th training epoch secondary structure Q8 mean accuracy is 0.716876\n", 841 | "61th training epoch solvent accessibility Q4 mean accuracy is 0.758721\n", 842 | "One epoch training cost time 163.929018s\n", 843 | "62th training epoch totally loss is 1.472632\n", 844 | "62th training epoch secondary structure Q8 mean accuracy is 0.717469\n", 845 | "62th training epoch solvent accessibility Q4 mean accuracy is 0.759427\n", 846 | "One epoch training cost time 162.778641s\n", 847 | "63th training epoch totally loss is 1.471074\n", 848 | "63th training epoch secondary structure Q8 mean accuracy is 0.717324\n", 849 | "63th training epoch solvent accessibility Q4 mean accuracy is 0.759868\n", 850 | "One epoch training cost time 163.401410s\n", 851 | "64th training epoch totally loss is 1.471965\n", 852 | "64th training epoch secondary structure Q8 mean accuracy is 0.717671\n", 853 | "64th training epoch solvent accessibility Q4 mean accuracy is 0.759631\n", 854 | "One epoch validation cost time 13.271884s\n", 855 | "64th validation epoch secondary structure Q8 mean accuracy is 0.723212\n", 856 | "64th validation epoch solvent accessibility Q4 mean accuracy is 0.760878 \n", 857 | "\n", 858 | "One epoch training cost time 163.021272s\n", 859 | "65th training epoch totally loss is 1.469335\n", 860 | "65th training epoch secondary structure Q8 mean accuracy is 0.717526\n", 861 | "65th training epoch solvent accessibility Q4 mean accuracy is 0.760217\n", 862 | "One epoch training cost time 163.759352s\n", 863 | "66th training epoch totally loss is 1.467968\n", 864 | "66th training epoch secondary structure Q8 mean accuracy is 0.718532\n", 865 | "66th training epoch solvent accessibility Q4 mean accuracy is 0.759941\n", 866 | "One epoch training cost time 163.541954s\n", 867 | "67th training epoch totally loss is 1.466028\n", 868 | "67th training epoch secondary structure Q8 mean accuracy is 0.718723\n", 869 | "67th training epoch solvent accessibility Q4 mean accuracy is 0.760335\n", 870 | "One epoch training cost time 163.667050s\n", 871 | "68th training epoch totally loss is 1.467066\n", 872 | "68th training epoch secondary structure Q8 mean accuracy is 0.719194\n", 873 | "68th training epoch solvent accessibility Q4 mean accuracy is 0.759240\n", 874 | "One epoch training cost time 163.469247s\n", 875 | "69th training epoch totally loss is 1.466792\n", 876 | "69th training epoch secondary structure Q8 mean accuracy is 0.718716\n", 877 | "69th training epoch solvent accessibility Q4 mean accuracy is 0.759988\n", 878 | "One epoch validation cost time 13.294188s\n", 879 | "69th validation epoch secondary structure Q8 mean accuracy is 0.723043\n", 880 | "69th validation epoch solvent accessibility Q4 mean accuracy is 0.763787 \n", 881 | "\n", 882 | "Epoch 71 of 200\n", 883 | "One epoch training cost time 163.599038s\n", 884 | "70th training epoch totally loss is 1.460511\n", 885 | "70th training epoch secondary structure Q8 mean accuracy is 0.720515\n", 886 | "70th training epoch solvent accessibility Q4 mean accuracy is 0.760182\n", 887 | "One epoch training cost time 163.648231s\n", 888 | "71th training epoch totally loss is 1.459459\n", 889 | "71th training epoch secondary structure Q8 mean accuracy is 0.720146\n", 890 | "71th training epoch solvent accessibility Q4 mean accuracy is 0.760397\n", 891 | "One epoch training cost time 162.556562s\n", 892 | "72th training epoch totally loss is 1.462411\n", 893 | "72th training epoch secondary structure Q8 mean accuracy is 0.719569\n", 894 | "72th training epoch solvent accessibility Q4 mean accuracy is 0.760186\n", 895 | "One epoch training cost time 162.310735s\n", 896 | "73th training epoch totally loss is 1.467809\n", 897 | "73th training epoch secondary structure Q8 mean accuracy is 0.718148\n", 898 | "73th training epoch solvent accessibility Q4 mean accuracy is 0.759879\n", 899 | "One epoch training cost time 162.087431s\n", 900 | "74th training epoch totally loss is 1.460010\n", 901 | "74th training epoch secondary structure Q8 mean accuracy is 0.720637\n", 902 | "74th training epoch solvent accessibility Q4 mean accuracy is 0.759782\n", 903 | "One epoch validation cost time 13.284131s\n", 904 | "74th validation epoch secondary structure Q8 mean accuracy is 0.725774\n", 905 | "74th validation epoch solvent accessibility Q4 mean accuracy is 0.765870 \n", 906 | "\n", 907 | "One epoch training cost time 162.220149s\n", 908 | "75th training epoch totally loss is 1.460059\n", 909 | "75th training epoch secondary structure Q8 mean accuracy is 0.720369\n", 910 | "75th training epoch solvent accessibility Q4 mean accuracy is 0.760826\n", 911 | "One epoch training cost time 162.212999s\n", 912 | "76th training epoch totally loss is 1.460844\n", 913 | "76th training epoch secondary structure Q8 mean accuracy is 0.720316\n", 914 | "76th training epoch solvent accessibility Q4 mean accuracy is 0.760328\n", 915 | "One epoch training cost time 162.451052s\n", 916 | "77th training epoch totally loss is 1.454412\n", 917 | "77th training epoch secondary structure Q8 mean accuracy is 0.722016\n", 918 | "77th training epoch solvent accessibility Q4 mean accuracy is 0.760987\n", 919 | "One epoch training cost time 162.336045s\n", 920 | "78th training epoch totally loss is 1.459314\n", 921 | "78th training epoch secondary structure Q8 mean accuracy is 0.720812\n", 922 | "78th training epoch solvent accessibility Q4 mean accuracy is 0.760407\n", 923 | "One epoch training cost time 161.389879s\n", 924 | "79th training epoch totally loss is 1.455540\n", 925 | "79th training epoch secondary structure Q8 mean accuracy is 0.721015\n", 926 | "79th training epoch solvent accessibility Q4 mean accuracy is 0.761060\n", 927 | "One epoch validation cost time 13.254555s\n", 928 | "79th validation epoch secondary structure Q8 mean accuracy is 0.727576\n", 929 | "79th validation epoch solvent accessibility Q4 mean accuracy is 0.765223 \n", 930 | "\n", 931 | "One epoch cb513 test cost time 107.404497s\n", 932 | "79th cb513 test epoch secondary structure Q8 mean accuracy is 0.678193\n", 933 | "79th cb513 test epoch solvent accessibility Q4 mean accuracy is 0.316156\n", 934 | "One epoch casp10 test cost time 25.432087s\n", 935 | "79th casp10 test epoch secondary structure Q8 mean accuracy is 0.709405\n", 936 | "Epoch 81 of 200\n", 937 | "One epoch training cost time 161.738271s\n", 938 | "80th training epoch totally loss is 1.455020\n", 939 | "80th training epoch secondary structure Q8 mean accuracy is 0.721655\n", 940 | "80th training epoch solvent accessibility Q4 mean accuracy is 0.761189\n", 941 | "One epoch training cost time 162.571980s\n", 942 | "81th training epoch totally loss is 1.457978\n", 943 | "81th training epoch secondary structure Q8 mean accuracy is 0.721001\n", 944 | "81th training epoch solvent accessibility Q4 mean accuracy is 0.760772\n", 945 | "One epoch training cost time 161.930329s\n", 946 | "82th training epoch totally loss is 1.451732\n", 947 | "82th training epoch secondary structure Q8 mean accuracy is 0.722457\n", 948 | "82th training epoch solvent accessibility Q4 mean accuracy is 0.760963\n", 949 | "One epoch training cost time 161.713671s\n", 950 | "83th training epoch totally loss is 1.452105\n", 951 | "83th training epoch secondary structure Q8 mean accuracy is 0.722469\n", 952 | "83th training epoch solvent accessibility Q4 mean accuracy is 0.761168\n", 953 | "One epoch training cost time 162.117240s\n", 954 | "84th training epoch totally loss is 1.449863\n", 955 | "84th training epoch secondary structure Q8 mean accuracy is 0.723284\n", 956 | "84th training epoch solvent accessibility Q4 mean accuracy is 0.761032\n", 957 | "One epoch validation cost time 13.290713s\n", 958 | "84th validation epoch secondary structure Q8 mean accuracy is 0.725530\n", 959 | "84th validation epoch solvent accessibility Q4 mean accuracy is 0.762483 \n", 960 | "\n", 961 | "One epoch training cost time 162.700548s\n", 962 | "85th training epoch totally loss is 1.449249\n", 963 | "85th training epoch secondary structure Q8 mean accuracy is 0.723066\n", 964 | "85th training epoch solvent accessibility Q4 mean accuracy is 0.761420\n", 965 | "One epoch training cost time 163.202049s\n", 966 | "86th training epoch totally loss is 1.449259\n", 967 | "86th training epoch secondary structure Q8 mean accuracy is 0.723130\n", 968 | "86th training epoch solvent accessibility Q4 mean accuracy is 0.761272\n", 969 | "One epoch training cost time 163.337679s\n", 970 | "87th training epoch totally loss is 1.447051\n", 971 | "87th training epoch secondary structure Q8 mean accuracy is 0.723934\n", 972 | "87th training epoch solvent accessibility Q4 mean accuracy is 0.761269\n", 973 | "One epoch training cost time 163.615783s\n", 974 | "88th training epoch totally loss is 1.450079\n", 975 | "88th training epoch secondary structure Q8 mean accuracy is 0.723451\n", 976 | "88th training epoch solvent accessibility Q4 mean accuracy is 0.760748\n", 977 | "One epoch training cost time 163.759408s\n", 978 | "89th training epoch totally loss is 1.447294\n", 979 | "89th training epoch secondary structure Q8 mean accuracy is 0.723886\n", 980 | "89th training epoch solvent accessibility Q4 mean accuracy is 0.761531\n", 981 | "One epoch validation cost time 13.390603s\n", 982 | "89th validation epoch secondary structure Q8 mean accuracy is 0.728298\n", 983 | "89th validation epoch solvent accessibility Q4 mean accuracy is 0.766311 \n", 984 | "\n", 985 | "Epoch 91 of 200\n", 986 | "One epoch training cost time 163.631249s\n", 987 | "90th training epoch totally loss is 1.444990\n", 988 | "90th training epoch secondary structure Q8 mean accuracy is 0.724281\n", 989 | "90th training epoch solvent accessibility Q4 mean accuracy is 0.761927\n", 990 | "One epoch training cost time 163.553572s\n", 991 | "91th training epoch totally loss is 1.444295\n", 992 | "91th training epoch secondary structure Q8 mean accuracy is 0.724819\n", 993 | "91th training epoch solvent accessibility Q4 mean accuracy is 0.761911\n", 994 | "One epoch training cost time 163.295597s\n", 995 | "92th training epoch totally loss is 1.445365\n", 996 | "92th training epoch secondary structure Q8 mean accuracy is 0.724810\n", 997 | "92th training epoch solvent accessibility Q4 mean accuracy is 0.761512\n", 998 | "One epoch training cost time 163.193276s\n", 999 | "93th training epoch totally loss is 1.448443\n", 1000 | "93th training epoch secondary structure Q8 mean accuracy is 0.723989\n", 1001 | "93th training epoch solvent accessibility Q4 mean accuracy is 0.761585\n", 1002 | "One epoch training cost time 162.163659s\n", 1003 | "94th training epoch totally loss is 1.446569\n", 1004 | "94th training epoch secondary structure Q8 mean accuracy is 0.724011\n", 1005 | "94th training epoch solvent accessibility Q4 mean accuracy is 0.762257\n", 1006 | "One epoch validation cost time 13.289689s\n", 1007 | "94th validation epoch secondary structure Q8 mean accuracy is 0.725765\n", 1008 | "94th validation epoch solvent accessibility Q4 mean accuracy is 0.765298 \n", 1009 | "\n", 1010 | "One epoch training cost time 162.153820s\n", 1011 | "95th training epoch totally loss is 1.442371\n", 1012 | "95th training epoch secondary structure Q8 mean accuracy is 0.725097\n", 1013 | "95th training epoch solvent accessibility Q4 mean accuracy is 0.762246\n", 1014 | "One epoch training cost time 161.930068s\n", 1015 | "96th training epoch totally loss is 1.443793\n", 1016 | "96th training epoch secondary structure Q8 mean accuracy is 0.724859\n", 1017 | "96th training epoch solvent accessibility Q4 mean accuracy is 0.762092\n", 1018 | "One epoch training cost time 161.298355s\n", 1019 | "97th training epoch totally loss is 1.448701\n", 1020 | "97th training epoch secondary structure Q8 mean accuracy is 0.723734\n", 1021 | "97th training epoch solvent accessibility Q4 mean accuracy is 0.761990\n", 1022 | "One epoch training cost time 162.167410s\n", 1023 | "98th training epoch totally loss is 1.440854\n", 1024 | "98th training epoch secondary structure Q8 mean accuracy is 0.726165\n", 1025 | "98th training epoch solvent accessibility Q4 mean accuracy is 0.762595\n", 1026 | "One epoch training cost time 161.805005s\n", 1027 | "99th training epoch totally loss is 1.442680\n", 1028 | "99th training epoch secondary structure Q8 mean accuracy is 0.725330\n", 1029 | "99th training epoch solvent accessibility Q4 mean accuracy is 0.762377\n", 1030 | "One epoch validation cost time 13.284225s\n", 1031 | "99th validation epoch secondary structure Q8 mean accuracy is 0.726797\n", 1032 | "99th validation epoch solvent accessibility Q4 mean accuracy is 0.765316 \n", 1033 | "\n", 1034 | "One epoch cb513 test cost time 108.366551s\n", 1035 | "99th cb513 test epoch secondary structure Q8 mean accuracy is 0.674925\n", 1036 | "99th cb513 test epoch solvent accessibility Q4 mean accuracy is 0.328025\n", 1037 | "One epoch casp10 test cost time 25.615579s\n", 1038 | "99th casp10 test epoch secondary structure Q8 mean accuracy is 0.708180\n", 1039 | "Epoch 101 of 200\n", 1040 | "One epoch training cost time 161.736384s\n", 1041 | "100th training epoch totally loss is 1.438613\n", 1042 | "100th training epoch secondary structure Q8 mean accuracy is 0.726621\n", 1043 | "100th training epoch solvent accessibility Q4 mean accuracy is 0.762823\n", 1044 | "One epoch training cost time 161.484059s\n", 1045 | "101th training epoch totally loss is 1.437132\n", 1046 | "101th training epoch secondary structure Q8 mean accuracy is 0.726348\n", 1047 | "101th training epoch solvent accessibility Q4 mean accuracy is 0.762669\n", 1048 | "One epoch training cost time 162.378237s\n", 1049 | "102th training epoch totally loss is 1.440242\n", 1050 | "102th training epoch secondary structure Q8 mean accuracy is 0.726546\n", 1051 | "102th training epoch solvent accessibility Q4 mean accuracy is 0.762056\n", 1052 | "One epoch training cost time 162.071164s\n", 1053 | "103th training epoch totally loss is 1.440546\n", 1054 | "103th training epoch secondary structure Q8 mean accuracy is 0.726011\n", 1055 | "103th training epoch solvent accessibility Q4 mean accuracy is 0.762748\n", 1056 | "One epoch training cost time 161.684160s\n", 1057 | "104th training epoch totally loss is 1.436510\n", 1058 | "104th training epoch secondary structure Q8 mean accuracy is 0.727078\n", 1059 | "104th training epoch solvent accessibility Q4 mean accuracy is 0.762961\n", 1060 | "One epoch validation cost time 13.276669s\n", 1061 | "104th validation epoch secondary structure Q8 mean accuracy is 0.726750\n", 1062 | "104th validation epoch solvent accessibility Q4 mean accuracy is 0.766152 \n", 1063 | "\n", 1064 | "One epoch training cost time 162.140080s\n", 1065 | "105th training epoch totally loss is 1.435964\n", 1066 | "105th training epoch secondary structure Q8 mean accuracy is 0.727269\n", 1067 | "105th training epoch solvent accessibility Q4 mean accuracy is 0.762785\n", 1068 | "One epoch training cost time 162.058815s\n", 1069 | "106th training epoch totally loss is 1.433858\n", 1070 | "106th training epoch secondary structure Q8 mean accuracy is 0.728486\n", 1071 | "106th training epoch solvent accessibility Q4 mean accuracy is 0.762876\n", 1072 | "One epoch training cost time 163.003961s\n", 1073 | "107th training epoch totally loss is 1.435357\n", 1074 | "107th training epoch secondary structure Q8 mean accuracy is 0.727273\n", 1075 | "107th training epoch solvent accessibility Q4 mean accuracy is 0.762990\n", 1076 | "One epoch training cost time 162.541086s\n", 1077 | "108th training epoch totally loss is 1.437801\n", 1078 | "108th training epoch secondary structure Q8 mean accuracy is 0.726648\n", 1079 | "108th training epoch solvent accessibility Q4 mean accuracy is 0.763052\n", 1080 | "One epoch training cost time 162.751943s\n", 1081 | "109th training epoch totally loss is 1.434155\n", 1082 | "109th training epoch secondary structure Q8 mean accuracy is 0.728034\n", 1083 | "109th training epoch solvent accessibility Q4 mean accuracy is 0.763418\n", 1084 | "One epoch validation cost time 13.316053s\n", 1085 | "109th validation epoch secondary structure Q8 mean accuracy is 0.727106\n", 1086 | "109th validation epoch solvent accessibility Q4 mean accuracy is 0.764247 \n", 1087 | "\n", 1088 | "Epoch 111 of 200\n", 1089 | "One epoch training cost time 162.511970s\n", 1090 | "110th training epoch totally loss is 1.436407\n", 1091 | "110th training epoch secondary structure Q8 mean accuracy is 0.726908\n", 1092 | "110th training epoch solvent accessibility Q4 mean accuracy is 0.763715\n", 1093 | "One epoch training cost time 163.220383s\n", 1094 | "111th training epoch totally loss is 1.431939\n", 1095 | "111th training epoch secondary structure Q8 mean accuracy is 0.729189\n", 1096 | "111th training epoch solvent accessibility Q4 mean accuracy is 0.762689\n", 1097 | "One epoch training cost time 162.951871s\n", 1098 | "112th training epoch totally loss is 1.432998\n", 1099 | "112th training epoch secondary structure Q8 mean accuracy is 0.728746\n", 1100 | "112th training epoch solvent accessibility Q4 mean accuracy is 0.762794\n", 1101 | "One epoch training cost time 163.471860s\n", 1102 | "113th training epoch totally loss is 1.435653\n", 1103 | "113th training epoch secondary structure Q8 mean accuracy is 0.728061\n", 1104 | "113th training epoch solvent accessibility Q4 mean accuracy is 0.763195\n", 1105 | "One epoch training cost time 163.496391s\n", 1106 | "114th training epoch totally loss is 1.430654\n", 1107 | "114th training epoch secondary structure Q8 mean accuracy is 0.728921\n", 1108 | "114th training epoch solvent accessibility Q4 mean accuracy is 0.763738\n", 1109 | "One epoch validation cost time 13.502199s\n", 1110 | "114th validation epoch secondary structure Q8 mean accuracy is 0.728354\n", 1111 | "114th validation epoch solvent accessibility Q4 mean accuracy is 0.763787 \n", 1112 | "\n", 1113 | "One epoch training cost time 163.433137s\n", 1114 | "115th training epoch totally loss is 1.430671\n", 1115 | "115th training epoch secondary structure Q8 mean accuracy is 0.728816\n", 1116 | "115th training epoch solvent accessibility Q4 mean accuracy is 0.763883\n", 1117 | "One epoch training cost time 163.359501s\n", 1118 | "116th training epoch totally loss is 1.430346\n", 1119 | "116th training epoch secondary structure Q8 mean accuracy is 0.729588\n", 1120 | "116th training epoch solvent accessibility Q4 mean accuracy is 0.763836\n", 1121 | "One epoch training cost time 163.126978s\n", 1122 | "117th training epoch totally loss is 1.429978\n", 1123 | "117th training epoch secondary structure Q8 mean accuracy is 0.729293\n", 1124 | "117th training epoch solvent accessibility Q4 mean accuracy is 0.763610\n", 1125 | "One epoch training cost time 162.008628s\n", 1126 | "118th training epoch totally loss is 1.427306\n", 1127 | "118th training epoch secondary structure Q8 mean accuracy is 0.730236\n", 1128 | "118th training epoch solvent accessibility Q4 mean accuracy is 0.764388\n", 1129 | "One epoch training cost time 162.406003s\n", 1130 | "119th training epoch totally loss is 1.430723\n", 1131 | "119th training epoch secondary structure Q8 mean accuracy is 0.729430\n", 1132 | "119th training epoch solvent accessibility Q4 mean accuracy is 0.763452\n", 1133 | "One epoch validation cost time 13.254876s\n", 1134 | "119th validation epoch secondary structure Q8 mean accuracy is 0.731601\n", 1135 | "119th validation epoch solvent accessibility Q4 mean accuracy is 0.766001 \n", 1136 | "\n", 1137 | "One epoch cb513 test cost time 107.026038s\n", 1138 | "119th cb513 test epoch secondary structure Q8 mean accuracy is 0.681201\n", 1139 | "119th cb513 test epoch solvent accessibility Q4 mean accuracy is 0.296231\n", 1140 | "One epoch casp10 test cost time 25.804831s\n", 1141 | "119th casp10 test epoch secondary structure Q8 mean accuracy is 0.716619\n", 1142 | "Epoch 121 of 200\n", 1143 | "One epoch training cost time 161.920302s\n", 1144 | "120th training epoch totally loss is 1.433148\n", 1145 | "120th training epoch secondary structure Q8 mean accuracy is 0.728380\n", 1146 | "120th training epoch solvent accessibility Q4 mean accuracy is 0.763871\n", 1147 | "One epoch training cost time 161.787137s\n", 1148 | "121th training epoch totally loss is 1.432066\n", 1149 | "121th training epoch secondary structure Q8 mean accuracy is 0.729141\n", 1150 | "121th training epoch solvent accessibility Q4 mean accuracy is 0.763564\n", 1151 | "One epoch training cost time 162.280727s\n", 1152 | "122th training epoch totally loss is 1.430883\n", 1153 | "122th training epoch secondary structure Q8 mean accuracy is 0.729465\n", 1154 | "122th training epoch solvent accessibility Q4 mean accuracy is 0.763643\n", 1155 | "One epoch training cost time 162.592026s\n", 1156 | "123th training epoch totally loss is 1.437993\n", 1157 | "123th training epoch secondary structure Q8 mean accuracy is 0.728266\n", 1158 | "123th training epoch solvent accessibility Q4 mean accuracy is 0.762401\n", 1159 | "One epoch training cost time 163.198782s\n", 1160 | "124th training epoch totally loss is 1.429224\n", 1161 | "124th training epoch secondary structure Q8 mean accuracy is 0.729871\n", 1162 | "124th training epoch solvent accessibility Q4 mean accuracy is 0.763582\n", 1163 | "One epoch validation cost time 13.277220s\n", 1164 | "124th validation epoch secondary structure Q8 mean accuracy is 0.731526\n", 1165 | "124th validation epoch solvent accessibility Q4 mean accuracy is 0.767615 \n", 1166 | "\n", 1167 | "One epoch training cost time 163.189356s\n", 1168 | "125th training epoch totally loss is 1.428615\n", 1169 | "125th training epoch secondary structure Q8 mean accuracy is 0.729890\n", 1170 | "125th training epoch solvent accessibility Q4 mean accuracy is 0.763803\n", 1171 | "One epoch training cost time 162.915364s\n", 1172 | "126th training epoch totally loss is 1.428165\n", 1173 | "126th training epoch secondary structure Q8 mean accuracy is 0.730179\n", 1174 | "126th training epoch solvent accessibility Q4 mean accuracy is 0.763847\n", 1175 | "One epoch training cost time 164.249982s\n", 1176 | "127th training epoch totally loss is 1.429220\n", 1177 | "127th training epoch secondary structure Q8 mean accuracy is 0.730394\n", 1178 | "127th training epoch solvent accessibility Q4 mean accuracy is 0.764038\n", 1179 | "One epoch training cost time 163.141143s\n", 1180 | "128th training epoch totally loss is 1.423922\n", 1181 | "128th training epoch secondary structure Q8 mean accuracy is 0.731249\n", 1182 | "128th training epoch solvent accessibility Q4 mean accuracy is 0.764569\n", 1183 | "One epoch training cost time 162.272982s\n", 1184 | "129th training epoch totally loss is 1.426067\n", 1185 | "129th training epoch secondary structure Q8 mean accuracy is 0.731018\n", 1186 | "129th training epoch solvent accessibility Q4 mean accuracy is 0.763577\n", 1187 | "One epoch validation cost time 13.293626s\n", 1188 | "129th validation epoch secondary structure Q8 mean accuracy is 0.729199\n", 1189 | "129th validation epoch solvent accessibility Q4 mean accuracy is 0.766114 \n", 1190 | "\n", 1191 | "Epoch 131 of 200\n", 1192 | "One epoch training cost time 162.511296s\n", 1193 | "130th training epoch totally loss is 1.426810\n", 1194 | "130th training epoch secondary structure Q8 mean accuracy is 0.730466\n", 1195 | "130th training epoch solvent accessibility Q4 mean accuracy is 0.764440\n", 1196 | "One epoch training cost time 162.509979s\n", 1197 | "131th training epoch totally loss is 1.426455\n", 1198 | "131th training epoch secondary structure Q8 mean accuracy is 0.730651\n", 1199 | "131th training epoch solvent accessibility Q4 mean accuracy is 0.764512\n", 1200 | "One epoch training cost time 163.340389s\n", 1201 | "132th training epoch totally loss is 1.422531\n", 1202 | "132th training epoch secondary structure Q8 mean accuracy is 0.731553\n", 1203 | "132th training epoch solvent accessibility Q4 mean accuracy is 0.764949\n", 1204 | "One epoch training cost time 162.774629s\n", 1205 | "133th training epoch totally loss is 1.425888\n", 1206 | "133th training epoch secondary structure Q8 mean accuracy is 0.730763\n", 1207 | "133th training epoch solvent accessibility Q4 mean accuracy is 0.764209\n", 1208 | "One epoch training cost time 162.879775s\n", 1209 | "134th training epoch totally loss is 1.428444\n", 1210 | "134th training epoch secondary structure Q8 mean accuracy is 0.731074\n", 1211 | "134th training epoch solvent accessibility Q4 mean accuracy is 0.763758\n", 1212 | "One epoch validation cost time 13.263988s\n", 1213 | "134th validation epoch secondary structure Q8 mean accuracy is 0.731798\n", 1214 | "134th validation epoch solvent accessibility Q4 mean accuracy is 0.767109 \n", 1215 | "\n", 1216 | "One epoch training cost time 162.695999s\n", 1217 | "135th training epoch totally loss is 1.421021\n", 1218 | "135th training epoch secondary structure Q8 mean accuracy is 0.732637\n", 1219 | "135th training epoch solvent accessibility Q4 mean accuracy is 0.764565\n", 1220 | "One epoch training cost time 162.921979s\n", 1221 | "136th training epoch totally loss is 1.424642\n", 1222 | "136th training epoch secondary structure Q8 mean accuracy is 0.731573\n", 1223 | "136th training epoch solvent accessibility Q4 mean accuracy is 0.765071\n", 1224 | "One epoch training cost time 162.796231s\n", 1225 | "137th training epoch totally loss is 1.423570\n", 1226 | "137th training epoch secondary structure Q8 mean accuracy is 0.732041\n", 1227 | "137th training epoch solvent accessibility Q4 mean accuracy is 0.763867\n", 1228 | "One epoch training cost time 162.716055s\n", 1229 | "138th training epoch totally loss is 1.425022\n", 1230 | "138th training epoch secondary structure Q8 mean accuracy is 0.731644\n", 1231 | "138th training epoch solvent accessibility Q4 mean accuracy is 0.764664\n", 1232 | "One epoch training cost time 163.051600s\n", 1233 | "139th training epoch totally loss is 1.419758\n", 1234 | "139th training epoch secondary structure Q8 mean accuracy is 0.733203\n", 1235 | "139th training epoch solvent accessibility Q4 mean accuracy is 0.764364\n", 1236 | "One epoch validation cost time 13.262340s\n", 1237 | "139th validation epoch secondary structure Q8 mean accuracy is 0.732352\n", 1238 | "139th validation epoch solvent accessibility Q4 mean accuracy is 0.765851 \n", 1239 | "\n", 1240 | "One epoch cb513 test cost time 110.556233s\n", 1241 | "139th cb513 test epoch secondary structure Q8 mean accuracy is 0.681378\n", 1242 | "139th cb513 test epoch solvent accessibility Q4 mean accuracy is 0.313030\n", 1243 | "One epoch casp10 test cost time 26.280934s\n", 1244 | "139th casp10 test epoch secondary structure Q8 mean accuracy is 0.714033\n", 1245 | "Epoch 141 of 200\n", 1246 | "One epoch training cost time 162.742053s\n", 1247 | "140th training epoch totally loss is 1.416993\n", 1248 | "140th training epoch secondary structure Q8 mean accuracy is 0.733472\n", 1249 | "140th training epoch solvent accessibility Q4 mean accuracy is 0.765296\n", 1250 | "One epoch training cost time 162.867599s\n", 1251 | "141th training epoch totally loss is 1.426468\n", 1252 | "141th training epoch secondary structure Q8 mean accuracy is 0.731395\n", 1253 | "141th training epoch solvent accessibility Q4 mean accuracy is 0.764410\n", 1254 | "One epoch training cost time 163.444804s\n", 1255 | "142th training epoch totally loss is 1.421046\n", 1256 | "142th training epoch secondary structure Q8 mean accuracy is 0.733170\n", 1257 | "142th training epoch solvent accessibility Q4 mean accuracy is 0.764619\n", 1258 | "One epoch training cost time 163.461040s\n", 1259 | "143th training epoch totally loss is 1.425856\n", 1260 | "143th training epoch secondary structure Q8 mean accuracy is 0.731596\n", 1261 | "143th training epoch solvent accessibility Q4 mean accuracy is 0.764811\n", 1262 | "One epoch training cost time 162.964980s\n", 1263 | "144th training epoch totally loss is 1.419968\n", 1264 | "144th training epoch secondary structure Q8 mean accuracy is 0.733408\n", 1265 | "144th training epoch solvent accessibility Q4 mean accuracy is 0.764538\n", 1266 | "One epoch validation cost time 13.270020s\n", 1267 | "144th validation epoch secondary structure Q8 mean accuracy is 0.727688\n", 1268 | "144th validation epoch solvent accessibility Q4 mean accuracy is 0.766593 \n", 1269 | "\n", 1270 | "One epoch training cost time 162.773000s\n", 1271 | "145th training epoch totally loss is 1.423087\n", 1272 | "145th training epoch secondary structure Q8 mean accuracy is 0.731927\n", 1273 | "145th training epoch solvent accessibility Q4 mean accuracy is 0.764782\n", 1274 | "One epoch training cost time 162.816727s\n", 1275 | "146th training epoch totally loss is 1.418504\n", 1276 | "146th training epoch secondary structure Q8 mean accuracy is 0.734049\n", 1277 | "146th training epoch solvent accessibility Q4 mean accuracy is 0.764739\n", 1278 | "One epoch training cost time 163.366979s\n", 1279 | "147th training epoch totally loss is 1.417739\n", 1280 | "147th training epoch secondary structure Q8 mean accuracy is 0.733693\n", 1281 | "147th training epoch solvent accessibility Q4 mean accuracy is 0.765357\n", 1282 | "One epoch training cost time 163.210811s\n", 1283 | "148th training epoch totally loss is 1.413384\n", 1284 | "148th training epoch secondary structure Q8 mean accuracy is 0.734703\n", 1285 | "148th training epoch solvent accessibility Q4 mean accuracy is 0.765688\n", 1286 | "One epoch training cost time 163.156012s\n", 1287 | "149th training epoch totally loss is 1.414742\n", 1288 | "149th training epoch secondary structure Q8 mean accuracy is 0.734093\n", 1289 | "149th training epoch solvent accessibility Q4 mean accuracy is 0.765583\n", 1290 | "One epoch validation cost time 13.297147s\n", 1291 | "149th validation epoch secondary structure Q8 mean accuracy is 0.730944\n", 1292 | "149th validation epoch solvent accessibility Q4 mean accuracy is 0.767662 \n", 1293 | "\n", 1294 | "Epoch 151 of 200\n", 1295 | "One epoch training cost time 162.474523s\n", 1296 | "150th training epoch totally loss is 1.419452\n", 1297 | "150th training epoch secondary structure Q8 mean accuracy is 0.732887\n", 1298 | "150th training epoch solvent accessibility Q4 mean accuracy is 0.765228\n", 1299 | "One epoch training cost time 162.991061s\n", 1300 | "151th training epoch totally loss is 1.419335\n", 1301 | "151th training epoch secondary structure Q8 mean accuracy is 0.733636\n", 1302 | "151th training epoch solvent accessibility Q4 mean accuracy is 0.765006\n", 1303 | "One epoch training cost time 163.223360s\n", 1304 | "152th training epoch totally loss is 1.415921\n", 1305 | "152th training epoch secondary structure Q8 mean accuracy is 0.734338\n", 1306 | "152th training epoch solvent accessibility Q4 mean accuracy is 0.765112\n", 1307 | "One epoch training cost time 162.698810s\n", 1308 | "153th training epoch totally loss is 1.413750\n", 1309 | "153th training epoch secondary structure Q8 mean accuracy is 0.734732\n", 1310 | "153th training epoch solvent accessibility Q4 mean accuracy is 0.765586\n", 1311 | "One epoch training cost time 162.738305s\n", 1312 | "154th training epoch totally loss is 1.416377\n", 1313 | "154th training epoch secondary structure Q8 mean accuracy is 0.733784\n", 1314 | "154th training epoch solvent accessibility Q4 mean accuracy is 0.765428\n", 1315 | "One epoch validation cost time 13.297469s\n", 1316 | "154th validation epoch secondary structure Q8 mean accuracy is 0.731976\n", 1317 | "154th validation epoch solvent accessibility Q4 mean accuracy is 0.766311 \n", 1318 | "\n", 1319 | "One epoch training cost time 162.576985s\n", 1320 | "155th training epoch totally loss is 1.413080\n", 1321 | "155th training epoch secondary structure Q8 mean accuracy is 0.735103\n", 1322 | "155th training epoch solvent accessibility Q4 mean accuracy is 0.765968\n", 1323 | "One epoch training cost time 162.522731s\n", 1324 | "156th training epoch totally loss is 1.413698\n", 1325 | "156th training epoch secondary structure Q8 mean accuracy is 0.735463\n", 1326 | "156th training epoch solvent accessibility Q4 mean accuracy is 0.765456\n", 1327 | "One epoch training cost time 163.106925s\n", 1328 | "157th training epoch totally loss is 1.413739\n", 1329 | "157th training epoch secondary structure Q8 mean accuracy is 0.735062\n", 1330 | "157th training epoch solvent accessibility Q4 mean accuracy is 0.765889\n", 1331 | "One epoch training cost time 162.790821s\n", 1332 | "158th training epoch totally loss is 1.419482\n", 1333 | "158th training epoch secondary structure Q8 mean accuracy is 0.733554\n", 1334 | "158th training epoch solvent accessibility Q4 mean accuracy is 0.765649\n", 1335 | "One epoch training cost time 162.528914s\n", 1336 | "159th training epoch totally loss is 1.412258\n", 1337 | "159th training epoch secondary structure Q8 mean accuracy is 0.735506\n", 1338 | "159th training epoch solvent accessibility Q4 mean accuracy is 0.765626\n", 1339 | "One epoch validation cost time 13.284406s\n", 1340 | "159th validation epoch secondary structure Q8 mean accuracy is 0.733863\n", 1341 | "159th validation epoch solvent accessibility Q4 mean accuracy is 0.767963 \n", 1342 | "\n", 1343 | "One epoch cb513 test cost time 110.534071s\n", 1344 | "159th cb513 test epoch secondary structure Q8 mean accuracy is 0.680175\n", 1345 | "159th cb513 test epoch solvent accessibility Q4 mean accuracy is 0.308406\n", 1346 | "One epoch casp10 test cost time 26.406729s\n", 1347 | "159th casp10 test epoch secondary structure Q8 mean accuracy is 0.713806\n", 1348 | "Epoch 161 of 200\n", 1349 | "One epoch training cost time 163.937505s\n", 1350 | "160th training epoch totally loss is 1.412607\n", 1351 | "160th training epoch secondary structure Q8 mean accuracy is 0.735069\n", 1352 | "160th training epoch solvent accessibility Q4 mean accuracy is 0.765878\n", 1353 | "One epoch training cost time 163.083568s\n", 1354 | "161th training epoch totally loss is 1.414874\n", 1355 | "161th training epoch secondary structure Q8 mean accuracy is 0.735406\n", 1356 | "161th training epoch solvent accessibility Q4 mean accuracy is 0.764993\n", 1357 | "One epoch training cost time 163.391270s\n", 1358 | "162th training epoch totally loss is 1.412973\n", 1359 | "162th training epoch secondary structure Q8 mean accuracy is 0.735774\n", 1360 | "162th training epoch solvent accessibility Q4 mean accuracy is 0.765987\n", 1361 | "One epoch training cost time 162.872799s\n", 1362 | "163th training epoch totally loss is 1.414514\n", 1363 | "163th training epoch secondary structure Q8 mean accuracy is 0.735243\n", 1364 | "163th training epoch solvent accessibility Q4 mean accuracy is 0.765736\n", 1365 | "One epoch training cost time 162.662455s\n", 1366 | "164th training epoch totally loss is 1.410336\n", 1367 | "164th training epoch secondary structure Q8 mean accuracy is 0.736489\n", 1368 | "164th training epoch solvent accessibility Q4 mean accuracy is 0.766486\n", 1369 | "One epoch validation cost time 13.281994s\n", 1370 | "164th validation epoch secondary structure Q8 mean accuracy is 0.734341\n", 1371 | "164th validation epoch solvent accessibility Q4 mean accuracy is 0.765110 \n", 1372 | "\n", 1373 | "One epoch training cost time 163.213203s\n", 1374 | "165th training epoch totally loss is 1.410313\n", 1375 | "165th training epoch secondary structure Q8 mean accuracy is 0.736056\n", 1376 | "165th training epoch solvent accessibility Q4 mean accuracy is 0.766383\n", 1377 | "One epoch training cost time 163.487318s\n", 1378 | "166th training epoch totally loss is 1.409802\n", 1379 | "166th training epoch secondary structure Q8 mean accuracy is 0.736772\n", 1380 | "166th training epoch solvent accessibility Q4 mean accuracy is 0.765805\n", 1381 | "One epoch training cost time 163.224493s\n", 1382 | "167th training epoch totally loss is 1.412365\n", 1383 | "167th training epoch secondary structure Q8 mean accuracy is 0.735748\n", 1384 | "167th training epoch solvent accessibility Q4 mean accuracy is 0.766180\n", 1385 | "One epoch training cost time 162.767302s\n", 1386 | "168th training epoch totally loss is 1.411771\n", 1387 | "168th training epoch secondary structure Q8 mean accuracy is 0.735938\n", 1388 | "168th training epoch solvent accessibility Q4 mean accuracy is 0.766148\n", 1389 | "One epoch training cost time 162.832939s\n", 1390 | "169th training epoch totally loss is 1.412060\n", 1391 | "169th training epoch secondary structure Q8 mean accuracy is 0.735774\n", 1392 | "169th training epoch solvent accessibility Q4 mean accuracy is 0.766140\n", 1393 | "One epoch validation cost time 13.457569s\n", 1394 | "169th validation epoch secondary structure Q8 mean accuracy is 0.730316\n", 1395 | "169th validation epoch solvent accessibility Q4 mean accuracy is 0.766320 \n", 1396 | "\n", 1397 | "Epoch 171 of 200\n", 1398 | "One epoch training cost time 162.786096s\n", 1399 | "170th training epoch totally loss is 1.409047\n", 1400 | "170th training epoch secondary structure Q8 mean accuracy is 0.737170\n", 1401 | "170th training epoch solvent accessibility Q4 mean accuracy is 0.766353\n", 1402 | "One epoch training cost time 162.984277s\n", 1403 | "171th training epoch totally loss is 1.406939\n", 1404 | "171th training epoch secondary structure Q8 mean accuracy is 0.737115\n", 1405 | "171th training epoch solvent accessibility Q4 mean accuracy is 0.766730\n", 1406 | "One epoch training cost time 163.091774s\n", 1407 | "172th training epoch totally loss is 1.409597\n", 1408 | "172th training epoch secondary structure Q8 mean accuracy is 0.737207\n", 1409 | "172th training epoch solvent accessibility Q4 mean accuracy is 0.765977\n", 1410 | "One epoch training cost time 162.823517s\n", 1411 | "173th training epoch totally loss is 1.408947\n", 1412 | "173th training epoch secondary structure Q8 mean accuracy is 0.737331\n", 1413 | "173th training epoch solvent accessibility Q4 mean accuracy is 0.766310\n", 1414 | "One epoch training cost time 163.328133s\n", 1415 | "174th training epoch totally loss is 1.408187\n", 1416 | "174th training epoch secondary structure Q8 mean accuracy is 0.737997\n", 1417 | "174th training epoch solvent accessibility Q4 mean accuracy is 0.766437\n", 1418 | "One epoch validation cost time 13.332338s\n", 1419 | "174th validation epoch secondary structure Q8 mean accuracy is 0.733637\n", 1420 | "174th validation epoch solvent accessibility Q4 mean accuracy is 0.767794 \n", 1421 | "\n", 1422 | "One epoch training cost time 162.371088s\n", 1423 | "175th training epoch totally loss is 1.407530\n", 1424 | "175th training epoch secondary structure Q8 mean accuracy is 0.737252\n", 1425 | "175th training epoch solvent accessibility Q4 mean accuracy is 0.766321\n", 1426 | "One epoch training cost time 162.867660s\n", 1427 | "176th training epoch totally loss is 1.408291\n", 1428 | "176th training epoch secondary structure Q8 mean accuracy is 0.737261\n", 1429 | "176th training epoch solvent accessibility Q4 mean accuracy is 0.766643\n", 1430 | "One epoch training cost time 162.759683s\n", 1431 | "177th training epoch totally loss is 1.409519\n", 1432 | "177th training epoch secondary structure Q8 mean accuracy is 0.736459\n", 1433 | "177th training epoch solvent accessibility Q4 mean accuracy is 0.767002\n", 1434 | "One epoch training cost time 162.728837s\n", 1435 | "178th training epoch totally loss is 1.405326\n", 1436 | "178th training epoch secondary structure Q8 mean accuracy is 0.737872\n", 1437 | "178th training epoch solvent accessibility Q4 mean accuracy is 0.767145\n", 1438 | "One epoch training cost time 162.726995s\n", 1439 | "179th training epoch totally loss is 1.406382\n", 1440 | "179th training epoch secondary structure Q8 mean accuracy is 0.737970\n", 1441 | "179th training epoch solvent accessibility Q4 mean accuracy is 0.766756\n", 1442 | "One epoch validation cost time 13.296016s\n", 1443 | "179th validation epoch secondary structure Q8 mean accuracy is 0.732014\n", 1444 | "179th validation epoch solvent accessibility Q4 mean accuracy is 0.768178 \n", 1445 | "\n", 1446 | "One epoch cb513 test cost time 111.113669s\n", 1447 | "179th cb513 test epoch secondary structure Q8 mean accuracy is 0.679880\n", 1448 | "179th cb513 test epoch solvent accessibility Q4 mean accuracy is 0.293836\n", 1449 | "One epoch casp10 test cost time 26.476046s\n", 1450 | "179th casp10 test epoch secondary structure Q8 mean accuracy is 0.717889\n", 1451 | "Epoch 181 of 200\n", 1452 | "One epoch training cost time 162.804221s\n", 1453 | "180th training epoch totally loss is 1.411167\n", 1454 | "180th training epoch secondary structure Q8 mean accuracy is 0.736495\n", 1455 | "180th training epoch solvent accessibility Q4 mean accuracy is 0.766841\n", 1456 | "One epoch training cost time 162.670608s\n", 1457 | "181th training epoch totally loss is 1.404506\n", 1458 | "181th training epoch secondary structure Q8 mean accuracy is 0.738348\n", 1459 | "181th training epoch solvent accessibility Q4 mean accuracy is 0.766911\n", 1460 | "One epoch training cost time 162.733235s\n", 1461 | "182th training epoch totally loss is 1.409681\n", 1462 | "182th training epoch secondary structure Q8 mean accuracy is 0.737530\n", 1463 | "182th training epoch solvent accessibility Q4 mean accuracy is 0.766091\n", 1464 | "One epoch training cost time 163.364066s\n", 1465 | "183th training epoch totally loss is 1.402690\n", 1466 | "183th training epoch secondary structure Q8 mean accuracy is 0.738506\n", 1467 | "183th training epoch solvent accessibility Q4 mean accuracy is 0.767458\n", 1468 | "One epoch training cost time 162.741471s\n", 1469 | "184th training epoch totally loss is 1.404324\n", 1470 | "184th training epoch secondary structure Q8 mean accuracy is 0.738619\n", 1471 | "184th training epoch solvent accessibility Q4 mean accuracy is 0.767083\n", 1472 | "One epoch validation cost time 13.309688s\n", 1473 | "184th validation epoch secondary structure Q8 mean accuracy is 0.731245\n", 1474 | "184th validation epoch solvent accessibility Q4 mean accuracy is 0.768131 \n", 1475 | "\n", 1476 | "One epoch training cost time 163.268300s\n", 1477 | "185th training epoch totally loss is 1.404337\n", 1478 | "185th training epoch secondary structure Q8 mean accuracy is 0.738427\n", 1479 | "185th training epoch solvent accessibility Q4 mean accuracy is 0.767195\n", 1480 | "One epoch training cost time 162.875486s\n", 1481 | "186th training epoch totally loss is 1.406244\n", 1482 | "186th training epoch secondary structure Q8 mean accuracy is 0.738481\n", 1483 | "186th training epoch solvent accessibility Q4 mean accuracy is 0.766551\n", 1484 | "One epoch training cost time 162.907199s\n", 1485 | "187th training epoch totally loss is 1.405316\n", 1486 | "187th training epoch secondary structure Q8 mean accuracy is 0.738586\n", 1487 | "187th training epoch solvent accessibility Q4 mean accuracy is 0.767435\n", 1488 | "One epoch training cost time 162.822453s\n", 1489 | "188th training epoch totally loss is 1.405784\n", 1490 | "188th training epoch secondary structure Q8 mean accuracy is 0.738151\n", 1491 | "188th training epoch solvent accessibility Q4 mean accuracy is 0.766884\n", 1492 | "One epoch training cost time 162.498162s\n", 1493 | "189th training epoch totally loss is 1.404714\n", 1494 | "189th training epoch secondary structure Q8 mean accuracy is 0.738621\n", 1495 | "189th training epoch solvent accessibility Q4 mean accuracy is 0.766305\n", 1496 | "One epoch validation cost time 13.289683s\n", 1497 | "189th validation epoch secondary structure Q8 mean accuracy is 0.736077\n", 1498 | "189th validation epoch solvent accessibility Q4 mean accuracy is 0.766217 \n", 1499 | "\n", 1500 | "Epoch 191 of 200\n", 1501 | "One epoch training cost time 162.638886s\n", 1502 | "190th training epoch totally loss is 1.401539\n", 1503 | "190th training epoch secondary structure Q8 mean accuracy is 0.739570\n", 1504 | "190th training epoch solvent accessibility Q4 mean accuracy is 0.767655\n", 1505 | "One epoch training cost time 163.478735s\n", 1506 | "191th training epoch totally loss is 1.403431\n", 1507 | "191th training epoch secondary structure Q8 mean accuracy is 0.738942\n", 1508 | "191th training epoch solvent accessibility Q4 mean accuracy is 0.767394\n", 1509 | "One epoch training cost time 163.155745s\n", 1510 | "192th training epoch totally loss is 1.403418\n", 1511 | "192th training epoch secondary structure Q8 mean accuracy is 0.739035\n", 1512 | "192th training epoch solvent accessibility Q4 mean accuracy is 0.767577\n", 1513 | "One epoch training cost time 163.267155s\n", 1514 | "193th training epoch totally loss is 1.401508\n", 1515 | "193th training epoch secondary structure Q8 mean accuracy is 0.739664\n", 1516 | "193th training epoch solvent accessibility Q4 mean accuracy is 0.767446\n", 1517 | "One epoch training cost time 163.233048s\n", 1518 | "194th training epoch totally loss is 1.400352\n", 1519 | "194th training epoch secondary structure Q8 mean accuracy is 0.739430\n", 1520 | "194th training epoch solvent accessibility Q4 mean accuracy is 0.767470\n", 1521 | "One epoch validation cost time 13.271843s\n", 1522 | "194th validation epoch secondary structure Q8 mean accuracy is 0.734679\n", 1523 | "194th validation epoch solvent accessibility Q4 mean accuracy is 0.766977 \n", 1524 | "\n", 1525 | "One epoch training cost time 162.820884s\n", 1526 | "195th training epoch totally loss is 1.399314\n", 1527 | "195th training epoch secondary structure Q8 mean accuracy is 0.740509\n", 1528 | "195th training epoch solvent accessibility Q4 mean accuracy is 0.767608\n", 1529 | "One epoch training cost time 163.076934s\n", 1530 | "196th training epoch totally loss is 1.401378\n", 1531 | "196th training epoch secondary structure Q8 mean accuracy is 0.739973\n", 1532 | "196th training epoch solvent accessibility Q4 mean accuracy is 0.766991\n", 1533 | "One epoch training cost time 162.704583s\n", 1534 | "197th training epoch totally loss is 1.412708\n", 1535 | "197th training epoch secondary structure Q8 mean accuracy is 0.737030\n", 1536 | "197th training epoch solvent accessibility Q4 mean accuracy is 0.767073\n", 1537 | "One epoch training cost time 162.662694s\n", 1538 | "198th training epoch totally loss is 1.402747\n", 1539 | "198th training epoch secondary structure Q8 mean accuracy is 0.739875\n", 1540 | "198th training epoch solvent accessibility Q4 mean accuracy is 0.767222\n", 1541 | "One epoch training cost time 163.070463s\n", 1542 | "199th training epoch totally loss is 1.398927\n", 1543 | "199th training epoch secondary structure Q8 mean accuracy is 0.740517\n", 1544 | "199th training epoch solvent accessibility Q4 mean accuracy is 0.767694\n", 1545 | "One epoch validation cost time 13.290826s\n", 1546 | "199th validation epoch secondary structure Q8 mean accuracy is 0.734763\n", 1547 | "199th validation epoch solvent accessibility Q4 mean accuracy is 0.767465 \n", 1548 | "\n", 1549 | "One epoch cb513 test cost time 109.349188s\n", 1550 | "199th cb513 test epoch secondary structure Q8 mean accuracy is 0.681024\n", 1551 | "199th cb513 test epoch solvent accessibility Q4 mean accuracy is 0.296313\n", 1552 | "One epoch casp10 test cost time 26.190298s\n", 1553 | "199th casp10 test epoch secondary structure Q8 mean accuracy is 0.717980\n" 1554 | ] 1555 | } 1556 | ], 1557 | "source": [ 1558 | "train_epochs = 200\n", 1559 | "valid_epoch = 5\n", 1560 | "test_epoch = 20\n", 1561 | "\n", 1562 | "train_batch_size = 128\n", 1563 | "valid_batch_size = 32\n", 1564 | "test_batch_size = 1\n", 1565 | "\n", 1566 | "train_batches = X_train.shape[0]//train_batch_size\n", 1567 | "valid_batches = X_valid.shape[0]//valid_batch_size\n", 1568 | "test_cb513_batch = X_test_cb513.shape[0]//test_batch_size\n", 1569 | "test_casp10_batch = casp10_feature.shape[0]//test_batch_size\n", 1570 | "\n", 1571 | "\n", 1572 | "all_losses_train_1 = []\n", 1573 | "all_losses_train_2 = []\n", 1574 | "all_losses_train = []\n", 1575 | "all_accuracy_train_1 = []\n", 1576 | "all_accuracy_train_2 = []\n", 1577 | "all_confusion_matrix_train_1 = []\n", 1578 | "all_confusion_matrix_train_2 = []\n", 1579 | "\n", 1580 | "all_losses_valid_1 = []\n", 1581 | "all_losses_valid_2 = []\n", 1582 | "all_losses_valid = []\n", 1583 | "all_accuracy_valid_1 = []\n", 1584 | "all_accuracy_valid_2 = []\n", 1585 | "all_confusion_matrix_valid_1 = []\n", 1586 | "all_confusion_matrix_valid_2 = []\n", 1587 | "\n", 1588 | "all_losses_cb513_1 = []\n", 1589 | "all_losses_cb513_2 = []\n", 1590 | "all_losses_cb513 = []\n", 1591 | "all_accuracy_cb513_1 = []\n", 1592 | "all_accuracy_cb513_2 = []\n", 1593 | "all_confusion_matrix_cb513_1 = []\n", 1594 | "all_confusion_matrix_cb513_2 = []\n", 1595 | "all_prediction_cb513 = []\n", 1596 | "\n", 1597 | "all_losses_casp10_1 = []\n", 1598 | "all_accuracy_casp10_1 = []\n", 1599 | "all_confusion_matrix_casp10_1 = []\n", 1600 | "all_prediction_casp10 = []\n", 1601 | "\n", 1602 | "\n", 1603 | "for i in xrange(train_epochs):\n", 1604 | " if (i%10) == 0:\n", 1605 | " print \"Epoch %d of %d\" %(i+1, train_epochs)\n", 1606 | " \n", 1607 | " seq_index = np.arange(X_train.shape[0])\n", 1608 | " np.random.shuffle(seq_index)\n", 1609 | " X_train_in1_all = X_train[seq_index, :, 0:21]\n", 1610 | " X_train_in2_all = X_train[seq_index, :, 21:42]\n", 1611 | " labels_train_in1_all = labels_1_train[seq_index]\n", 1612 | " labels_train_in2_all = labels_2_train[seq_index]\n", 1613 | " mask_train_in_all = mask_train[seq_index] \n", 1614 | " \n", 1615 | " losses1 = []\n", 1616 | " losses2 = []\n", 1617 | " losses = []\n", 1618 | " preds1 = []\n", 1619 | " preds2 = []\n", 1620 | " norms = []\n", 1621 | " \n", 1622 | " train_one_epoch_start = time.time()\n", 1623 | " for j in xrange(X_train.shape[0]//train_batch_size):\n", 1624 | " idx = range(j*train_batch_size, (j+1)*train_batch_size)\n", 1625 | "\n", 1626 | " X_train_in1 = X_train_in1_all[idx]\n", 1627 | " X_train_in2 = X_train_in2_all[idx]\n", 1628 | " labels_train_in1 = labels_train_in1_all[idx]\n", 1629 | " labels_train_in2 = labels_train_in2_all[idx]\n", 1630 | " mask_train_in = mask_train_in_all[idx]\n", 1631 | " \n", 1632 | " loss1, loss2, loss, out, batch_norm = train(X_train_in1, X_train_in2, \\\n", 1633 | " labels_train_in1, labels_train_in2, mask_train_in)\n", 1634 | " \n", 1635 | " #print \"%dth mini-batch loss is %f\" %(j, loss)\n", 1636 | " norms.append(batch_norm)\n", 1637 | " preds1.append(out[:,:,0:8]) \n", 1638 | " preds2.append(out[:,:,8:12])\n", 1639 | " losses1.append(loss1)\n", 1640 | " losses2.append(loss2)\n", 1641 | " losses.append(loss)\n", 1642 | " \n", 1643 | " #######################################################\n", 1644 | " ### just for training process monitor\n", 1645 | " ######################################################\n", 1646 | " predictions1 = np.concatenate(preds1, axis = 0)\n", 1647 | " predictions2 = np.concatenate(preds2, axis = 0)\n", 1648 | "\n", 1649 | " loss_train_1 = np.mean(losses1)\n", 1650 | " loss_train_2 = np.mean(losses2)\n", 1651 | " loss_train = np.mean(losses)\n", 1652 | " \n", 1653 | " all_losses_train_1.append(loss_train_1)\n", 1654 | " all_losses_train_2.append(loss_train_2)\n", 1655 | " all_losses_train.append(loss_train)\n", 1656 | " \n", 1657 | " confmatrix1 = confmatrix(predictions1, labels_train_in1_all, mask_train_in_all, 8)\n", 1658 | " confmatrix2 = confmatrix(predictions2, labels_train_in2_all, mask_train_in_all, 4)\n", 1659 | "\n", 1660 | " all_accuracy_train_1.append(confmatrix1.accuracy())\n", 1661 | " all_accuracy_train_2.append(confmatrix2.accuracy())\n", 1662 | " all_confusion_matrix_train_1.append(confmatrix1)\n", 1663 | " all_confusion_matrix_train_2.append(confmatrix2)\n", 1664 | " \n", 1665 | " print \"One epoch training cost time %fs\" %(time.time()- train_one_epoch_start)\n", 1666 | " print \"%dth training epoch totally loss is %f\" %(i, all_losses_train[i])\n", 1667 | " print \"%dth training epoch secondary structure Q8 mean accuracy is %f\" %(i, all_accuracy_train_1[-1])\n", 1668 | " print \"%dth training epoch solvent accessibility Q4 mean accuracy is %f\" %(i, all_accuracy_train_2[-1])\n", 1669 | " \n", 1670 | " # every m epoch do validation\n", 1671 | " if (i+1)%valid_epoch == 0:\n", 1672 | " #print X_valid.shape[0]\n", 1673 | " seq_index = np.arange(X_valid.shape[0])\n", 1674 | " np.random.shuffle(seq_index)\n", 1675 | " \n", 1676 | " X_valid_in1_all = X_valid[seq_index, :, 0:21]\n", 1677 | " X_valid_in2_all = X_valid[seq_index, :, 21:42]\n", 1678 | " labels_valid_in1_all = labels_1_valid[seq_index]\n", 1679 | " labels_valid_in2_all = labels_2_valid[seq_index]\n", 1680 | " mask_valid_in_all = mask_valid[seq_index] \n", 1681 | " \n", 1682 | " losses1 = []\n", 1683 | " losses2 = []\n", 1684 | " losses = []\n", 1685 | " preds1 = []\n", 1686 | " preds2 = []\n", 1687 | " \n", 1688 | " valid_one_epoch_start = time.time()\n", 1689 | " for j in xrange(X_valid.shape[0]//valid_batch_size):\n", 1690 | " idx = range(j*valid_batch_size, (j+1)*valid_batch_size)\n", 1691 | " \n", 1692 | " X_valid_in1 = X_valid_in1_all[idx]\n", 1693 | " X_valid_in2 = X_valid_in2_all[idx]\n", 1694 | " labels_valid_in1 = labels_valid_in1_all[idx]\n", 1695 | " labels_valid_in2 = labels_valid_in2_all[idx]\n", 1696 | " mask_valid_in = mask_valid_in_all[idx]\n", 1697 | "\n", 1698 | " loss1, loss2, loss, out = eval(X_valid_in1, X_valid_in2, labels_valid_in1, labels_valid_in2, mask_valid_in)\n", 1699 | "\n", 1700 | " #print \"%dth mini-batch loss is %f\" %(j, loss)\n", 1701 | " preds1.append(out[:,:,0:8]) \n", 1702 | " preds2.append(out[:,:,8:12])\n", 1703 | " losses1.append(loss1)\n", 1704 | " losses2.append(loss2)\n", 1705 | " losses.append(loss)\n", 1706 | "\n", 1707 | " #######################################################\n", 1708 | " ### just for validation process monitor\n", 1709 | " ######################################################\n", 1710 | " predictions1 = np.concatenate(preds1, axis = 0)\n", 1711 | " predictions2 = np.concatenate(preds2, axis = 0)\n", 1712 | "\n", 1713 | " loss_valid_1 = np.mean(losses1)\n", 1714 | " loss_valid_2 = np.mean(losses2)\n", 1715 | " loss_valid = np.mean(losses)\n", 1716 | "\n", 1717 | " all_losses_valid_1.append(loss_valid_1)\n", 1718 | " all_losses_valid_2.append(loss_valid_2)\n", 1719 | " all_losses_valid.append(loss_valid)\n", 1720 | "\n", 1721 | " confmatrix1 = confmatrix(predictions1, labels_valid_in1_all, mask_valid_in_all, 8)\n", 1722 | " confmatrix2 = confmatrix(predictions2, labels_valid_in2_all, mask_valid_in_all, 4)\n", 1723 | "\n", 1724 | " all_accuracy_valid_1.append(confmatrix1.accuracy())\n", 1725 | " all_accuracy_valid_2.append(confmatrix2.accuracy())\n", 1726 | " all_confusion_matrix_valid_1.append(confmatrix1)\n", 1727 | " all_confusion_matrix_valid_2.append(confmatrix2)\n", 1728 | "\n", 1729 | " print \"One epoch validation cost time %fs\" %(time.time()- valid_one_epoch_start)\n", 1730 | " print \"%dth validation epoch secondary structure Q8 mean accuracy is %f\" %(i, all_accuracy_valid_1[-1])\n", 1731 | " print \"%dth validation epoch solvent accessibility Q4 mean accuracy is %f\" %(i, all_accuracy_valid_2[-1]), \"\\n\"\n", 1732 | " \n", 1733 | " # every test_epoch epoch do cb513 test\n", 1734 | " if (i+1)%test_epoch == 0:\n", 1735 | " cb513_batches_index = np.arange(test_cb513_batch)\n", 1736 | " \n", 1737 | " losses1 = []\n", 1738 | " losses2 = []\n", 1739 | " losses = []\n", 1740 | " preds1 = []\n", 1741 | " preds2 = []\n", 1742 | " \n", 1743 | " cb513_one_epoch_start = time.time()\n", 1744 | " for j in xrange(X_test_cb513.shape[0]):\n", 1745 | " X_cb513_in1 = X_test_cb513[j:j+1, :, 0:21]\n", 1746 | " X_cb513_in2 = X_test_cb513[j:j+1, :, 21:42]\n", 1747 | " labels_cb513_in1 = labels_1_test_cb513[j:j+1, :]\n", 1748 | " labels_cb513_in2 = labels_2_test_cb513[j:j+1, :]\n", 1749 | " mask_cb513_in = mask_test_cb513[j:j+1, :]\n", 1750 | "\n", 1751 | " loss1, loss2, loss, out = eval(X_cb513_in1, X_cb513_in2, labels_cb513_in1, labels_cb513_in2, mask_cb513_in)\n", 1752 | "\n", 1753 | " #print \"%dth mini-batch loss is %f\" %(j, loss)\n", 1754 | " preds1.append(out[:,:,0:8]) \n", 1755 | " preds2.append(out[:,:,8:12])\n", 1756 | " losses1.append(loss1)\n", 1757 | " losses2.append(loss2)\n", 1758 | " losses.append(loss)\n", 1759 | "\n", 1760 | " #######################################################\n", 1761 | " ### just for test cb513 process monitor\n", 1762 | " ######################################################\n", 1763 | " predictions1 = np.concatenate(preds1, axis = 0)\n", 1764 | " predictions2 = np.concatenate(preds2, axis = 0)\n", 1765 | "\n", 1766 | " loss_cb513_1 = np.mean(losses1)\n", 1767 | " loss_cb513_2 = np.mean(losses2)\n", 1768 | " loss_cb513 = np.mean(losses)\n", 1769 | "\n", 1770 | " all_losses_cb513_1.append(loss_cb513_1)\n", 1771 | " all_losses_cb513_2.append(loss_cb513_2)\n", 1772 | " all_losses_cb513.append(loss_cb513)\n", 1773 | "\n", 1774 | " confmatrix1 = confmatrix(predictions1, labels_1_test_cb513, mask_test_cb513, 8)\n", 1775 | " confmatrix2 = confmatrix(predictions2, labels_2_test_cb513, mask_test_cb513, 4)\n", 1776 | "\n", 1777 | " all_accuracy_cb513_1.append(confmatrix1.accuracy())\n", 1778 | " all_accuracy_cb513_2.append(confmatrix2.accuracy())\n", 1779 | " all_confusion_matrix_cb513_1.append(confmatrix1)\n", 1780 | " all_confusion_matrix_cb513_2.append(confmatrix2)\n", 1781 | " all_prediction_cb513.append(predictions1)\n", 1782 | "\n", 1783 | " print \"One epoch cb513 test cost time %fs\" %(time.time()- cb513_one_epoch_start)\n", 1784 | " print \"%dth cb513 test epoch secondary structure Q8 mean accuracy is %f\" %(i, all_accuracy_cb513_1[-1])\n", 1785 | " print \"%dth cb513 test epoch solvent accessibility Q4 mean accuracy is %f\" %(i, all_accuracy_cb513_2[-1])\n", 1786 | " \n", 1787 | " ################################################################################################################\n", 1788 | " ############## CASP10 dataset ###############################\n", 1789 | " ##############################################################\n", 1790 | " casp10_batches_index = np.arange(test_casp10_batch)\n", 1791 | "\n", 1792 | " losses1 = []\n", 1793 | " preds1 = []\n", 1794 | "\n", 1795 | " casp10_one_epoch_start = time.time()\n", 1796 | " for j in xrange(casp10_feature.shape[0]):\n", 1797 | "\n", 1798 | " X_casp10_in1 = casp10_feature[j:j+1, :, 0:21]\n", 1799 | " X_casp10_in2 = casp10_feature[j:j+1, :, 21:42]\n", 1800 | " labels_casp10_in1 = casp10_labels_1[j:j+1, :]\n", 1801 | " labels_casp10_in2 = np.zeros_like(labels_casp10_in1)\n", 1802 | " labels_casp10_in2 = labels_casp10_in2.astype(\"uint8\")\n", 1803 | " mask_casp10_in = casp10_mask[j:j+1, :]\n", 1804 | "\n", 1805 | " loss1, loss2, loss, out = eval(X_casp10_in1, X_casp10_in2, labels_casp10_in1, labels_casp10_in2, mask_casp10_in)\n", 1806 | "\n", 1807 | " #print \"%dth mini-batch loss is %f\" %(j, loss)\n", 1808 | " preds1.append(out[:,:,0:8]) \n", 1809 | " losses1.append(loss1)\n", 1810 | "\n", 1811 | " #######################################################\n", 1812 | " ### just for testing process monitor\n", 1813 | " ######################################################\n", 1814 | " predictions1 = np.concatenate(preds1, axis = 0)\n", 1815 | " loss_casp10_1 = np.mean(losses1)\n", 1816 | " all_losses_casp10_1.append(loss_casp10_1)\n", 1817 | " confmatrix1 = confmatrix(predictions1, casp10_labels_1, casp10_mask, 8)\n", 1818 | " all_accuracy_casp10_1.append(confmatrix1.accuracy())\n", 1819 | " all_confusion_matrix_casp10_1.append(confmatrix1)\n", 1820 | " all_prediction_casp10.append(predictions1)\n", 1821 | "\n", 1822 | " print \"One epoch casp10 test cost time %fs\" %(time.time()- casp10_one_epoch_start)\n", 1823 | " print \"%dth casp10 test epoch secondary structure Q8 mean accuracy is %f\" %(i, all_accuracy_casp10_1[-1])\n", 1824 | " \n", 1825 | " #######################################################\n", 1826 | " ### save itermediate parameters\n", 1827 | " ######################################################\n", 1828 | " \n", 1829 | " with open((\"./train_models/\" + \"train-%d\" %(i) + \".pkl\"), 'w') as f:\n", 1830 | " pickle.dump({\n", 1831 | "\n", 1832 | " 'param_values': lasagne.layers.get_all_param_values(l_out),\n", 1833 | "\n", 1834 | " 'losses_train1': all_losses_train_1,\n", 1835 | " 'losses_train2': all_losses_train_2,\n", 1836 | " 'losses_train': all_losses_train,\n", 1837 | " 'accuracy_train1': all_accuracy_train_1,\n", 1838 | " 'accuracy_train2': all_accuracy_train_2,\n", 1839 | " 'confusion_matrix_train_1': all_confusion_matrix_train_1,\n", 1840 | " 'confusion_matrix_train_2': all_confusion_matrix_train_2,\n", 1841 | "\n", 1842 | " 'losses_eval_valid1': all_losses_valid_1,\n", 1843 | " 'losses_eval_valid2': all_losses_valid_2,\n", 1844 | " 'losses_eval_valid': all_losses_valid,\n", 1845 | " 'accuracy_eval_valid1': all_accuracy_valid_1,\n", 1846 | " 'accuracy_eval_valid2': all_accuracy_valid_2,\n", 1847 | " 'confusion_matrix_valid_1': all_confusion_matrix_valid_1,\n", 1848 | " 'confusion_matrix_valid_2': all_confusion_matrix_valid_2,\n", 1849 | " \n", 1850 | " \n", 1851 | " 'losses_eval_test_cb513_1': all_losses_cb513_1,\n", 1852 | " 'losses_eval_test_cb513_2': all_losses_cb513_2,\n", 1853 | " 'losses_eval_test_cb513': all_losses_cb513,\n", 1854 | " 'accuracy_eval_test_cb513_1': all_accuracy_cb513_1,\n", 1855 | " 'accuracy_eval_test_cb513_2': all_accuracy_cb513_2,\n", 1856 | " 'confusion_matrix_test_cb513_1': all_confusion_matrix_cb513_1,\n", 1857 | " 'confusion_matrix_test_cb513_2': all_confusion_matrix_cb513_2,\n", 1858 | " \n", 1859 | " \n", 1860 | " 'losses_eval_test_casp10_1': all_losses_casp10_1,\n", 1861 | " 'accuracy_eval_test_casp10_1': all_accuracy_casp10_1,\n", 1862 | " 'confusion_matrix_test_casp_1': all_confusion_matrix_casp10_1,\n", 1863 | " \n", 1864 | " 'prediction_cb513': all_prediction_cb513,\n", 1865 | " 'prediction_casp10': all_prediction_casp10,\n", 1866 | "\n", 1867 | " 'i': i,\n", 1868 | " }, f, pickle.HIGHEST_PROTOCOL)" 1869 | ] 1870 | }, 1871 | { 1872 | "cell_type": "code", 1873 | "execution_count": null, 1874 | "metadata": { 1875 | "collapsed": true 1876 | }, 1877 | "outputs": [], 1878 | "source": [] 1879 | }, 1880 | { 1881 | "cell_type": "code", 1882 | "execution_count": 12, 1883 | "metadata": { 1884 | "collapsed": true 1885 | }, 1886 | "outputs": [], 1887 | "source": [] 1888 | }, 1889 | { 1890 | "cell_type": "code", 1891 | "execution_count": 12, 1892 | "metadata": { 1893 | "collapsed": false 1894 | }, 1895 | "outputs": [ 1896 | { 1897 | "data": { 1898 | "text/plain": [ 1899 | "" 1900 | ] 1901 | }, 1902 | "execution_count": 12, 1903 | "metadata": {}, 1904 | "output_type": "execute_result" 1905 | }, 1906 | { 1907 | "data": { 1908 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAW8AAAEACAYAAAB8nvebAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmYHPV95/H3t6qvOXqkkTTS6BpGiESS7cWwAUwskGeJ\nUUCPjyXGBB7f68eQjW0c77OBxUuQTNjFrI3X69hZJ2svNrbxQRL8OMQYm2OQ1iEGYsBcshHoPmd0\njWamp6/67R99qGc0l2aqew59Xs9TT1dXVf/qNzXd3/rVt46fOecQEZGZxZvqCoiIyOlT8BYRmYEU\nvEVEZiAFbxGRGUjBW0RkBlLwFhGZgcYM3ma2ysyerRiOm9mNtaiciIgMz07nOm8z84C9wEXOud1V\nq5WIiIzqdNMmbwdeU+AWEZlapxu8rwXuq0ZFRERk/MadNjGzGIWUyRucc11VrZWIiIwqchrLXgn8\n69DAbWZ6OIqIyAQ452yinz2dtMl1wPdGqICGkIaNGzdOeR1m06Dtqe05XYfJGlfwNrMGCicr/2HS\naxQRkUkbV9rEOdcHLKhyXUREZJx0h+U009HRMdVVmFW0PcOl7Tl9nNZNOsMWYObyQR7PtB8QERkv\nM8NN4oTl6VxtMqI7t9zJf133X8MoSkTGyWzCv3upsTBOUA4VSvBO5VJhFCMip6kaQUHCVa2dbCi5\njlRWwVtEpJbCCd5qeYuI1JSCt4jIDKS0iYjIDKSWt4iEqr29nUcffXSqq3FatmzZwurVq6e6GqdF\nLW8RCZWZ1fQyxk2bNvGBD3xgUmVceumlbN26dVzLZrNZrr76alasWIHneTzxxBOTWvdEhRK8+7P9\nYRQjIhK6sB4EVWndunV85zvfobW1dcqut9d13iKzkHV2hlaWm+At8ZlMhptuuon7778fgGuuuYa7\n7rqLWCxGd3c3H/7wh/nFL36B53m88Y1vZPPmzQDcdddd/NVf/RU9PT0sWbKEv/7rv+ayyy4bdh0/\n/elPufPOO3HO8aMf/YhzzjmHZ599lo6ODi655BIef/xxnn32WV544QU2b97M5z//efbs2UNLSws3\n33wz119/PQCdnZ184AMfYPfuQidh7e3tfPKTn+Tee+9l586dXHHFFXzrW98iHo8TjUa58cZCN76+\n709o24QhnOCttImIVHDOcccdd/DUU0/x/PPPA/Dud7+bO+64g9tvv527776b5cuX093dDcC//Mu/\nAPCb3/yGr371qzzzzDO0traya9cucrnciOu54oor+MxnPsNrr73GvffeO2jed77zHR566CFWrVpF\nEAQsWrSIf/qnf2LFihVs3ryZK6+8kgsvvJDzzz//lHLNjPvvv5+HH36YeDzO2rVr+eY3v8kNN9wQ\n1iaaNLW8RWahibaWw3Tffffxla98hQULCg8k3bhxIzfccAO33347sViM/fv3s2PHDlauXMnatWuB\nQks2nU7z0ksvMX/+fNra2sZcz3BpETPjwx/+MGvWrAHA8zw2bNhQnr9u3TrWr1/Pli1bhg3eADfe\neCOtra0AvPOd7+S55547/Y1QRTphKSJVsW/fPs4666zy+7a2Nvbt2wfAn//5n3POOeewfv16Vq5c\nyV133QXAOeecw5e+9CU2bdrEokWLuO6669i/f/+E1r98+fJB7x966CEuvvhi5s+fT3NzMz/5yU84\nfPjwiJ8vBW6Auro6ent7J1SPatGlgiJSFUuWLGHHjh3l97t27WLJkiUANDY28oUvfIHXXnuNH//4\nx3zxi1/kscceA+C6665jy5Yt7Ny5EzPj5ptvHnU9njd8GKs8kZhOp3nPe97DTTfdxKFDhzh69Cgb\nNmwY94nM6fgQMLW8RaQqrrvuOu644w66u7vp7u7m9ttvL1/S9+CDD7Jt2zacczQ1NeH7Pr7v89vf\n/pbHHnuMdDpNPB4nkUiMeVJw0aJF7Nix45RAXPk+k8mQyWRYsGABnufx0EMP8bOf/Wzcf8vQstPp\nNAMDA6eM11IowTsbZMkH+TCKEpFZwMy49dZbueCCCzj33HM599xzueCCC7j11lsB2LZtG5dffjnJ\nZJK3vvWtfPzjH+dtb3sb6XSaW265hZaWFhYvXkx3dzd33nnnqOt673vfC8D8+fO54IILBtWhJJlM\n8uUvf5lrrrmGefPm8b3vfY93v/vdp9R5tL+ncv6qVauor69n3759/OEf/iENDQ3s2rVr/BsoBKF0\nxsAmOHHLCRpjjeHUSkTGVHyY/1RXQ8Yw0v9psp0xhNb9jVInIiK1E1rw1l2WIlItV155Jclk8pTh\nc5/73FRXbcqEcp036IoTEamehx56aKqrMO0obSIiMgONGbzNbK6Z/Z2ZvWJmL5vZxcMtp5a3iEjt\njCdt8r+AnzjnrjazCNAw3EJqeYuI1M6owdvM5gCXOuc+BOCcywHHh1tWLW8RkdoZK22yAugys3vM\n7Fdm9n/MrH64BdXyFhGpnbHSJhHg3wKfcM49bWZfAv4LcNugpR6H+/bfx0utL9HR0UHHNHiimYhM\njfb2dr7xjW/wB3/wB1NdlXHbsmULH/vYx8bdm85EdHZ20hnic9bLj1McbgBage0V7y8BHhyyjGMT\n7mtPf82JSO0Ufr7TT3t7u3v00Udrtr6NGze697///TVb35NPPune/va3u3nz5rmWlhb33ve+1+3f\nv3/E5Uf6PxWnjxqDRxtGTZs45w4Au83sd4uT3g68NNyyynmLyHTkXLjdoB07dow/+ZM/YefOnezc\nuZNkMslHPvKR0Mofr/Fc5/1J4Ltm9jxwLvDfh1tId1iKTCNm4Q0TlMlk+LM/+zOWLl3K0qVL+fSn\nP00mkwGgu7ubd7zjHTQ3NzN//nzWrVtX/txdd93FsmXLaGpqYvXq1eVHxQ6n1A3aD37wA5LJZLlj\nhY6ODm699VbWrl1LQ0MDr7/+Ovfccw9veMMbaGpqYuXKlfzt3/5tuZzOzs5Bz/9ub2/n7rvv5s1v\nfjNz587l2muvJZ1OA4Xee97znvfQ2NhIXV0dH//4x/nFL34x4e00UWNeKuicex64cKzldMJSRErc\nGdQN2ubNm3nTm9404W01UeHdYam0icj04Vx4wwTdd9993HbbbSxYsIAFCxawceNGvv3tbwMM6gbN\n9/1hu0HLZrO0tbVx9tlnj/Gnjt4Nmud5RCIRNmzYwIoVK4DB3aCNpNQNWnNz84jdoP3617/mL//y\nL/n85z9/WtsmDLo9XkSqYrZ3g7Zt2zY2bNjAl7/85fLOp5bU8haRqpjN3aDt3LmTyy+/nNtuu433\nve994yojbAreIlIVs7UbtL1793LZZZfxiU98guuvv37cZYRNaRMRCd1s7gbt61//Otu3b2fTpk3l\n54o3NTWd3gYKQWjdoK1fuZ6H3/9wOLUSkTGpG7SZQd2giYhImXLeIjLtqRu0U4XWDZrusBSRalE3\naKdS2kREZAZS2kREZAZSy1tEZAZSy1tEZAYKJXgbRiafIR/kwyhORETGEErwTkQSAAzkBsIoTkRm\nsPb2dh599NGprsZp2bJlC6tXr57qapyWUIJ3fbTQJ7FSJyJSeSt5LWzatKn8zJSJuvTSS6vaf2U1\nhBK866J1gE5aisj0E3Y3aNNFOME7UgjeulFHZHqwz1pow0TN1m7QpotQ7rBsiDUA0JftC6M4EZnh\nzqRu0KZKKMF7TnwOAMcHjodRnIhMkts49WmC++67j6985SssWLAAgI0bN3LDDTdw++23D+oGbeXK\nlcN2gzZ//nza2trGXM9Y3aBBocOGDRs2lOdXdoM2XPCGk92gASN2gzaVQkmbzEkUg3dawVtECmZ7\nN2hTLZTg3RQvPIhcLW8RKZnN3aBNB+G0vItpk550TxjFicgsMFu7QZsuxhW8zWyHmf3azJ41s6eG\nzi/nvJU2ERFmdzdo08W4ukEzs+3A7znnjgwzz931/+7i5kdu5j///n/m8+s/X416isgQ6gZtZpgO\n3aCNuJJyzlstbxGRmhhv8HbAI2b2jJl9bOhM5bxFpJrUDdqpxnud91rn3H4zawF+bmZbnXNbSjMf\n/NsH4QV45vln6FzQSUdHR1UqKyJnptnQDVpnZyednZ2hlTeunPegD5htBHqdc3cX37stO7dw6T2X\n8vvLfp9//ug/h1Y5ERmZct4zw5TlvM2s3sySxfEGYD3wQuUySpuIiNTWeNImi4AHipfJRIDvOucG\nXSCpOyxFpsZ0u3xNamfM4O2c2w6cN9oyusNSpPaUMjmzhXKHZTKWBOBE5oS6QhMRqYFQgrfv+eUA\n3puZXg9vERGZjULrPV55bxGR2gkteCvvLSJSO+G1vPVwKhGRmgk9baJrvUVEqk9pExGRGUhpExGR\nGSj84K2Wt4hI1SnnLSIyA4Wf81baRESk6pTzFhGZgZQ2ERGZgXSpoIjIDKS0iYjIDBRa8G6uawbg\nSOpIWEWKiMgIQgveCxsWAtDV16WHxIuIVFlowbsh2kBdpI5ULkVfti+sYkVEZBihBW8zK7e+D/Ud\nCqtYEREZRmjBG1DwFhGpEQVvEZEZKNTg3dLQAih4i4hUW7gt73q1vEVEamFcwdvMfDN71sz+cbTl\nlDYREamN8ba8PwW8DIx6AbeCt4hIbYwZvM1sGbAB+Dpgoy1bvlGnvyuMuomIyAjG0/L+n8CfA8FY\nC6rlLSJSG5HRZprZO4BDzrlnzaxjpOU2bdoEFB8Hux8ONSp4i4hU6uzspLOzM7TybLTnkJjZfwc+\nAOSABNAE/L1z7oMVy7hSGZl8hvgdcXzzyfxFBs9CvZhFRGTWMDOcc6OmokczanR1zn3GObfcObcC\nuBZ4rDJwDxXzY8xNzCXv8hxNHZ1onUREZAyn2zQe83GBynuLiFTfuIO3c+4J59y7xlpOwVtEpPpC\nT0oreIuIVF/4wVu3yIuIVF3VWt4H+w6GXbSIiBSFHryXNi0FYE/PnrCLFhGRotCDd9ucNgB2Hd8V\ndtEiIlKk4C0iMgOFHryXNy0HCsFbvciLiFRH6ME7GU/SnGgmnU/r6YIiIlVSlYePnDX3LECpExGR\naqlK8FbeW0SkuqoTvJsKwXvnsZ3VKF5E5IynlreIyAxU3eDdo+AtIlINanmLiMxACt4iIjNQVYL3\n4uRiol6UQ32HSGVT1ViFiMgZrSrB2zOPZU3LANjds7saqxAROaNVrYfgs5vPBuDVw69WaxUiImes\nqgXv1QtWA7C1e2u1ViEicsaqWvBes2ANAK90v1KtVYiInLGqF7xbCsFbLW8RkfBVPW3ySvcrejSs\niEjIqha8FzcupinexJHUET0aVkQkZGMGbzNLmNkvzew5M3vZzO4cT8FmppOWIiJVMmbwds4NAP/O\nOXcecC7w78zskvEUXj5p2aWTliIiYRpX2sQ5118cjQE+cGQ8n6vMe4uISHjGFbzNzDOz54CDwOPO\nuZfH87lSy1tpExGRcEXGs5BzLgDOM7M5wMNm1uGc6yzN37RpU3nZjo4OOjo6gJOXC7546MWw6isi\nMiN1dnbS2dkZWnl2upfxmdlfACnn3BeK791IZQQuYO7n5nIic4J9/2kfi5OLJ11hEZHZwMxwztlE\nPz+eq00WmNnc4ngdcDnw7LgKN48LllwAwNP7np5oHUVEZIjx5LwXA48Vc96/BP7ROffoeFdw4ZIL\nAXh6r4K3iEhYxsx5O+deAP7tRFdw0dKLAHhq31MTLUJERIao2h2WJRcuPdny1m3yIiLhqHrwXt60\nnEUNizg6cJTXjr5W7dWJiJwRqh68zWxQ61tERCav6sEb4KIlhbz3P+/+51qsTkRk1qtJ8O5o7wDg\nke2P1GJ1IiKzXk2C98XLLqYx1sjW7q3s6dlTi1WKiMxqNQneUT9abn3//LWf12KVIiKzWk2CN8Dl\nZ18OwM9fV/AWEZmsmgXv9SvXA/DI648QuKBWqxURmZVqFrxXzV/FsqZldPV38ez+cT0aRURERlCz\n4G1mvOt33wXAD1/6Ya1WKyIyK9UseANc+6ZrAfjBSz/QrfIiIpNQ0+C9tm0tS5NL2Xl8J7/c+8ta\nrlpEZFapafD2zOOaN14DwPdf/H4tVy0iMqvUNHjDydTJ91/8Ppl8ptarFxGZFWoevC9cciFvbHkj\nB/sO8qOtP6r16kVEZoWaB28z408v/FMAvvr0V2u9ehGRWaHmwRvg/ee+n8ZYI5t3blbP8iIiEzAl\nwbsp3sQHz/0gAHc/efdUVEFEZEazyV5vbWZuImVsO7KNNV9dQ+ACXvyPL7KmZc2k6iEiMpOYGc45\nm+jnp6TlDXDOvHP46PkfJXABf/H4X0xVNUREZqQpa3kD7O3Zyzl/dQ4DuQGe+PATrDtr3aTqIiIy\nU8zYljfA0qal3PTWmwD46I8/Sn+2fyqrIyIyY4wZvM1suZk9bmYvmdmLZnZjmBX4zKWf4U0L38S2\nI9u49bFbwyxaRGTWGjNtYmatQKtz7jkzawT+Ffj3zrlXivMnnDYpeWbfM1z89YvJuzwPv//h8rO/\nRURmq6qnTZxzB5xzzxXHe4FXgCUTXeFwLlhyAZs6NgHwwQc+yMHeg2EWLyIy65xWztvM2oHzgdAf\nCXjLJbfQ0d7Bwb6DXHbvZWw/uj3sVYiIzBqR8S5YTJn8HfCpYgu8bNOmTeXxjo4OOjo6Trsivudz\n3x/dx9u//XZe7nqZt3z9LWz+yGZWL1h92mWJiEw3nZ2ddHZ2hlbeuC4VNLMo8CDwkHPuS0PmTTrn\nXen4wHGuvv9qHnn9EVbMXcGTH32SRY2LQitfRGQ6mGzOezwnLA34FnDYOffpYeaHGrwB+rP9dHyz\ng6f3Pc15refx4HUPsrRpaajrEBGZSrUI3pcAm4FfA6WFb3HO/bQ4P/TgDXCw9yCX3HMJ245sY3Hj\nYr5/9fd1E4+IzBpVD97jqEBVgjdAd383V//wap7Y+QQA/+G8/8AX//CLzEnMqcr6RERqZVYHb4BM\nPsN/2/zf+NwvPkcmn2HF3BV8+6pvs7ZtbdXWKSJSbbM+eJds7d7K+/7hffxq/68A+NCbP8TGt21k\nRfOKqq9bRCRsZ0zwhkIr/LOdn+ULT36BTD6DYaxfuZ4bfu8G3vG77yDqR2tSDxGRyZoWwbvz6FHe\nNnfupMo5HduObOOzT3yW+1+6n3Q+DcDS5FI+9ZZP8aHzPsTChoU1q4uIyERMi+B92+uv89kVtU9f\nHO4/zL3P38vf/Ovf8JvDvwHAM4+1y9dy1eqruGrNVbTPba95vURExjItgvcHX36Zb62Zup5wAhfw\n020/5atPf5Wfv/ZzskG2PO/81vO5avVVvHPVO1k1fxV10bopq6eISMm0CN5v+9Wv6Dz//EmVE5bj\nA8f5yas/4YGtD/DQtofozQy6k595dfNIxpKsX7meG99yI29seSOF+5BERGpnWgTv9iefZPvFF0+q\nnGoYyA3wyOuP8MArD/DEzifYeXwnuSA3aJlkLMnqBatZ07KG9jntLEkuYXFyMSvmrmDVglXE/NgU\n1V5EZrNpEbz9xx9nYN06It6UdswzplyQ49jAMfb07OFrz3yNf3jlH+jq7xpx+agXpW1OG0uSS1ja\ntJQljYXXpcml5WlLk0uJR+I1/CtEZDaYFsGbxx9n58UX05ZITKqsqdDV18Ur3a+wtXsre3r2sO/E\nPvad2MerR15l25FtY37eMJY1LaMp3oTv+bTUt7CocRGtDa20NrYyJzGHfSf2cTR1lJgfY2HDQtrn\ntnPW3LNon9tOS30LULgMUjsBkTPHtAneT5x3HutqeLlgLfRmessBfW/P3nJg33tib/l1b89e8i4/\n4XXUReoIXEA6n2ZRwyJaG1vpy/bhm09TvImlTUuZE5/D4dRhPPNoTjQzNzGXuYm55fE5iTlEvAhG\n4XtgZnjm0RBtIBlP0hRvIuJFyAU54n6cumgd9dF64n5c+X6RKTJtgve9q1fzgdbWSZU1E2XzWXYd\n30UqlyKbz9LV38WB3gMc7D3Igd4DHBs4xuLkYubXzSeTz3Cg9wA7ju9g57Gd7Di2g6MDRwHwzZ/U\nTmAiDKMuWodhBC4gcAHJeJLlTctpjDXicBzuP0x3fzdHUkfKRw2eeQQuwOFoqW9hYcNCDqcOM5Ab\noD5aT0O0gZgf40TmBL75tM1pIxlLEvWjRLwIES+Cbz6+5+OZVx4faZpnHmZGT7qHdC5NPBInEUmQ\niCSI+3HyLs+hvkM0xho5u/lsoHAFUjJW2HE1xBrIBTlS2RQDuQF8z6ch2kBDrOGUcxrZfJa8y5OI\nVO8o0jlHNsjqfMoZbrLBe9ydMYxlx8BAWEXNKFE/ysp5Kyf8+RPpE/ieTyKSYE/PHrr6ukjGk+SD\nfDk/fzx9nAX1CwA4mjrKsYFjHB0ovJaGUjAt7YwDF9CX7aMn3UNPuod8kMczj0w+Q3+2n1QuVR6v\nlO5P093fPWxd9/fuZ3/v/gn/rdNRxItQH63HOUcqlyqf0E5EEnjmkcqmmJOYw5z4HAZyA6RyKdK5\nNM11zcyvm49nw5/ncQzfKMoHefae2EtPuoflTctZ1rSsvMMyCkdMlUPpKGrQtOJyuSBHb6aXeXXz\naJvTRn+2n4HcAHWRuvLRVWm89P169cirNCeaaYg20NXfRSKSoLWxlYHcALkgR0t9C42xRqJ+lHl1\n85gTn0PgAvIuTz7ID3qNeBESkQQDuQGy+SxzEnOI+TGccwQuIJVLsbdnLxEvwtnNZxOPxMkH+VPK\n88xjXt08GmON5aNH4JSjwvHOGzq/Kd5Ec6KZwAWcyJygq6+LxlgjS5JLOJI6QjbIsrhxMb7nD/o/\nlS459s0vHNlO8Ch1b89eDvQeYPWC1TTEGiZUxnBCC947z9DgPVnJeLI83janjbY5bTVbdz7Ik8ql\nAMpB4djAMXb37CaVTeFwzK+bT0tDC3MTcznQe4Ddx3eXA4pzjoN9BznUd4iW+hbqonX0Z/vpy/SR\nyWdIxpOkc2l29+ymP9tPNp8lF+TIBTnyrvgjLgWE4o956A+7tIzDkYwliUfiZPIZBnIDpHNpBnID\nmBmLGhZxbOAYO47twPd8DKM308uJzAl6M71EvSh10TrifpzABfRmeunL9pELcvSke8rbpNTqH8id\n/D6XdpCVDvQe4EDvgQlve8PY3bOb3T27J1yGhCfmx0hEEmTyGTL5DIELTlnGM6981Fg6gox4EXzP\nH3REWRovXSBxsK/QJ69h5aPwq1ZfNek6q+V9BvM9n8ZY46BpddE6FicXD7t8+9z2WXfHaiafoS/T\nh2ceddE6ol7h+Tj92X4CF1AXreP4wHGOp4+XW7ExP8aR1BGOpI6MWvbQ1mBJa2MrzXXNvH70dbr6\nunC4ctqqcii1YE+ZXly+dF6jq7+L3cd3k4wnyy3h/mw/qWyKVC5VHl/YsJDVC1ZzPH2c/mw/CxsW\nksqmONB7gLpoHREvwqG+Q/Rn+8nkMxxOHaYn3XNKWqsUpLJBllQ2Vf5sT7qHTD5TbgjE/BhLkkvI\nBTm2H9tOLsidkg7zPZ98kOdw6vCgo8Ch6dzKI5nR5g2dH7iAnnQPRweO4ptPfbSehQ0L6Un3cKD3\nAPPq5uF7Pof6DpHJZwb976J+FMMGNTYqlxmvpngTy5qW8dvDvy2fN7t42eQvrQ4t531OXR2vvuUt\nk66QiEgtOOfKqZDSkWHMjxHzY4NSKKVlAxeUjxxLAb3yfS7IkQ8K07JBlqgXpSHWwLKmZXjmkc6l\nOdh3kCOpIyRjSc6Zf870yHnvGhggcA5PVy+IyAxQmcOuj9bDKA8lNbPyUUOciV3SG4/EQ02NhnJX\nTUs0SsY59mdO/5BCREROXyjBe019PQBP9fSMsaSIiIQhlOC9ft48AH56ZPQTOCIiEo5QgvcVFcG7\nVr3qiIicyUIJ3uc3NtISjbIrnWZrf//YHxARkUkJJXh7ZqxvbgbgYaVORESqbszgbWb/18wOmtkL\noy1XSp080N2t1ImISJWNp+V9D3DFWAtdOX8+jb7P5uPH+et9+yZfMxERGdGYwds5twU4OtZy86NR\nvr5qFQCf3raNJ44dG+MTIiIyUaF2ffPHCxfy8SVLyDrH5c8/z9f27iVQCkVEJHSh3B6/6Zpr4A1v\nAOCqdeuItrXxpT17+I+vvsr/2ruXP12yhD9qaWFpXD3FiMiZqbOzk87OztDKG9eDqcysHfhH59y/\nGWaec9dfD3/zN4Om33fwILe8/jq70unytIubmnjX/Plc1NTERckkyUhoj1YREZlRatKTzpjB++yz\n4bXXTvlcNgj4+64uftDVxU+PHGEgOPmM3IgZb21q4veSSX63ro6FsRjtiQRvqK8n7nk40EOuRGTW\nqnrwNrPvAW8D5gOHgNucc/dUzC+U8NprcPbZI5bTl8/z0OHDPHbsGM+cOMG/njjBqY87PynheVyU\nTPLmxkZaYzHqPI8G32d5PM5ZiQRt8TgNvq8+GEVkRpoWfVg6KKRNrr9+3J87ms2y5fhxXuzr4/VU\nikPZLK+mUvy2v3/UoD5o3RSCfMLzmBuJsDgWI2KFR+A3RSI0+T5NkQjL43GWxuNknWMgCEgHAcvi\ncX6nro46zyPmeUTNCkNxPFF81c5BRKph+gTv974XfvjDSZUFEDiHAUdyOX5x/DivpVIcyGRIBwEn\n8nl2pdPsHBhgdzo9KA1TDT7Q6PvU+z51nlcYfJ9EadzzqPd95vg+vhlBsf6+WXnHkfA8DmQyHMsV\n+kZM+j4LYzFaolEWRqO0xGLknKMnl2NeNEq953E4myVixrxolHmRCNHitNLnG32fBt9XWklkBps+\nwXvePDh0CHx/rI+EJu8c6SAgFQQcyWbZn8kQOEcA9Obz9ORyHM3l2DkwwP5MhnixlR41Y/vAANsH\nBkgHAVnnyJZei0Mqn6e2fbmfvgbPIxmJUO95ZJyjL5+nP5/Hq9h5NPg+ETMiZvgUzjX4pffFV4/C\n+QW/YrzyNRUEHM3lWBiN0p5IYECewo4q6fskIxFO5HJknCtvYx/oyefxzVgWj5PwPALniJqVj3RK\ndfCLdfPNCvUojvsVdbJieakgIFYsI1Y8WnLA8VyORt+nLZEgcI6cc9QVd7raycl0ND2C94oVsH07\nXHYZfPPW5NtfAAAJi0lEQVSbsHz5pMqcLjJBQH8+T18QkCoGjoHiziJVnNabz3MinyeAcsDLFo8S\nevJ5Uvk8i2Ix5kUL3XT05HIcymY5lMnQVXyNeh5Nvs+RXI6+fJ4F0Sh55ziSy3E4myUTBMyPRvHM\nOJHL0Vusk4xPvLSTqtgZ+MWgD4UurhyUh8ppBiyKxVgYi5UbCgNBQIPn0RSJlP/nVlzWiuOlGyhO\nFL8j8eKRWtzzCv+/fJ7Fxe/F0M9WvvdGmVcanxOJ0ByJ0JvPk3WOOZEIueLOvCUapSUaJep5dGUy\n7MtkmFPc4R/OZkl4HgtjMVJBQM45WmMxGrxC7cvbYsirx8l0ZcLzSAdBecdd+hv78nmO53L05PPE\nzWhLJIgXd+BBcfuWjlQ9s3Kdyv8H5watt/S/KB31JjyvFPxOKS8oLl8ab/A8IqW/yTl683nqKqZV\nW+AcqSCgYUjDdnoE78cfh2uuga4uiEbh2mthwwZ4y1ugvR3U8gld3jn6izuOvmJwaPB96j2PgMJO\noqcYOPLFlmi+OOScIw/kiuOu+D4Y8gMovcbMaI5E2JfJsDudLgc/j0JruCeXoykSIV78IaeLgaAp\nEiEbBOxJp8kW02E558gUj5jyxfWW6hVUjOcr6pQv/pCTvk+d75MtBotM8WgJCgHsWC7HnnS63Kov\n7WTlzFbawRnQn8+Tdg4PWBCNkgoCAudoicWIF1Of+WF+B8MdEQ73vrQjLx21Zp3jxb4+TuTznJ1I\n0J5IkPA8Lm9u5tNtbdMgeDsHBw7Apz4Ff/d3UPmDWbgQzj8fVq+G1tZCeqW5ufBaOTQ2KshL6Fzx\nJPXQnUGek61YOLU1W5oWAPvSaQ7ncoNafaW0nCsuM7T17ip2OI2+T7pYj1Krvd732ZtO05PPD/ps\nMEI5I70PnON4LseRXI6k7xM143g+T8SMOs+jK5vlcDZL1jnmRSIsjcfpyeVIFY/mUkHAoUymcA4F\nOJDJlM8lVZ6sr9wm+Yq/ZSAICqnI4o67/DcWzwU1RSL05/PsTqfJFYNg6WiiNJ4HjuVypPL5EY8w\nKP69A8Uj3nRF3Cod6VSm+6zitTefH9S/fJ3nMRAETC7ynZ6IGbmKOn+ktZV71qyZJsG7ZPt2+O53\n4ckn4Ze/hMOHx1dQJFII4nPmQH8/ZLOwbBksWAANDVBff3JoaIC6usH59UgEEonC9MohkYBYrDA/\nEimUffhwody2NojHC+WUyir9LWaFo4hIpPDqedq5iEwTpQsbxnM1WC4IOJ7PY1A+Qs0EAd3ZLPXF\ncyJd2SzZICjvAErnX0o7mkFHhZx6hFgar9wBl5qwq+rqWBCN8kp/PweLO8cl8Ti/19Q0zYJ3Jefg\n9dfhxRfhN78pBM0jR4YfZkInDqUdwEiD7xeCfOVgNvy00vTSuN5P3/fOFY4m88VT2JXLDB18v9Bg\nAOjtLXyucj4UvuupVKHhkEgUBt8/tayh9Rk65PPQ11cYj8UKg1mh7FLjo3IofSYITjZShjPSd9bz\nTm6L0t813HIwuPzK5QAGBgr1GO63M54GUmmZym06dBuPZ3yKTTbnXd37081g5crCMJZ0Go4ehWPH\nCi1rz4M9ewrT+vsLX9L+/sFDZXommy18KVKpU4dc7uQQj8P8+YUrY/bsKXwunz/1hxkEheWz2cLg\n3MkyRGR2GCvIh7GjGG7edddNuurT5+Ei8XghJ97aenLa0qVTV5+hSi2vyh3BcENly6Q0jDStNL00\nrvfT831lyxIGzx865POFRgQUzuOUWquVQ319IaWXzRYaF6WW6HDlDa1P5eD7hYYOQCZTGIKgUHZp\nWqnxUbxPAM872cofztB1Dx2GtrAr5+WHXFxb2QqvnF860hj6exrPyeVSi75Ux6F1Hmle5WeH+3tr\nradn0kVUN20iIjLdjCfIT2beeJZLJLC5c5m+aRMRkemmMo0xg9XmKnUREQmVgreIyAyk4C0iMgMp\neIuIzEAK3iIiM5CCt4jIDKTgLSIyAyl4i4jMQAreIiIzkIK3iMgMpOAtIjIDjRm8zewKM9tqZq+a\n2c21qJSIiIxu1OBtZj7wFeAK4A3AdWa2phYVO1N1dnZOdRVmFW3PcGl7Th9jtbwvArY553Y457LA\n94F3V79aZy79OMKl7Rkubc/pY6zgvRTYXfF+T3GaiIhMobGCt3pZEBGZhkbtScfMLgY2OeeuKL6/\nBQicc3dVLKMALyIyAVXrPd7MIsBvgD8A9gFPAdc5516Z6ApFRGTyRu0GzTmXM7NPAA8DPvANBW4R\nkak36Q6IRUSk9iZ1h6Vu4Jk8M9thZr82s2fN7KnitHlm9nMz+62Z/czM5k51PacjM/u/ZnbQzF6o\nmDbitjOzW4rf1a1mtn5qaj19jbA9N5nZnuL381kzu7JinrbnCMxsuZk9bmYvmdmLZnZjcXp430/n\n3IQGCmmUbUA7EAWeA9ZMtLwzdQC2A/OGTPsfwE3F8ZuBz011PafjAFwKnA+8MNa2o3CT2XPF72p7\n8bvrTfXfMJ2GEbbnRuA/DbOstufo27IVOK843kjh3OGaML+fk2l56wae8Aw94/wu4FvF8W8B/762\n1ZkZnHNbgKNDJo+07d4NfM85l3XO7aDw47ioFvWcKUbYnnDq9xO0PUflnDvgnHuuON4LvELhHpnQ\nvp+TCd66gSccDnjEzJ4xs48Vpy1yzh0sjh8EFk1N1WakkbbdEgrf0RJ9X8fvk2b2vJl9o+IwX9tz\nnMysncIRzS8J8fs5meCtM53hWOucOx+4Evi4mV1aOdMVjqm0rSdgHNtO23Vs/xtYAZwH7AfuHmVZ\nbc8hzKwR+HvgU865E5XzJvv9nEzw3gssr3i/nMF7DhkH59z+4msX8ACFQ6WDZtYKYGaLgUNTV8MZ\nZ6RtN/T7uqw4TUbhnDvkioCvc/JQXttzDGYWpRC4v+2c+1Fxcmjfz8kE72eA3zGzdjOLAX8M/HgS\n5Z1xzKzezJLF8QZgPfAChe34oeJiHwJ+NHwJMoyRtt2PgWvNLGZmK4DfoXDTmYyiGGBKrqLw/QRt\nz1GZmQHfAF52zn2pYlZo389Rb9IZjdMNPGFYBDxQ+D8TAb7rnPuZmT0D/NDMPgrsAK6ZuipOX2b2\nPeBtwAIz2w3cBnyOYbadc+5lM/sh8DKQA/602JqUomG250agw8zOo3AIvx24AbQ9x2Et8H7g12b2\nbHHaLYT4/dRNOiIiM5C6QRMRmYEUvEVEZiAFbxGRGUjBW0RkBlLwFhGZgRS8RURmIAVvEZEZSMFb\nRGQG+v9DYfJydgPrMgAAAABJRU5ErkJggg==\n", 1909 | "text/plain": [ 1910 | "" 1911 | ] 1912 | }, 1913 | "metadata": {}, 1914 | "output_type": "display_data" 1915 | } 1916 | ], 1917 | "source": [ 1918 | "plt.plot(all_losses_train_1, 'c', all_losses_train_2, 'r', all_losses_train, 'g', linewidth=2.0)\n", 1919 | "plt.legend((\"loss_train1\", \"loss_train2\", \"loss_train\"))" 1920 | ] 1921 | }, 1922 | { 1923 | "cell_type": "code", 1924 | "execution_count": 13, 1925 | "metadata": { 1926 | "collapsed": false 1927 | }, 1928 | "outputs": [ 1929 | { 1930 | "data": { 1931 | "text/plain": [ 1932 | "" 1933 | ] 1934 | }, 1935 | "execution_count": 13, 1936 | "metadata": {}, 1937 | "output_type": "execute_result" 1938 | }, 1939 | { 1940 | "data": { 1941 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXUAAAEACAYAAABMEua6AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmUXGWd//H3t6p6TW9Jd2iSdBZIQDCAAQIHRaFBDYuI\nI4yaBDjjqIDrCC5EM0gymKMTwcgo6PDjhw4iEUdQfogwKoQGZDAsSYCQGEiA7AnpTtL7WvX8/rh1\nK9XV1Wuqu5Z8Xuc85y51+95vP1X1vc997q17zTmHiIjkhkC6AxARkdRRUhcRySFK6iIiOURJXUQk\nhyipi4jkECV1EZEcMmBSN7NCM1ttZuvMbIOZfT/JMrVm1mhma6PlxtELV0REBhIa6EXnXIeZneec\nazOzEPBXM3u/c+6vCYs+5Zy7dPTCFBGRoRi0+8U51xYdzQeCwP4ki1kqgxIRkZEZNKmbWcDM1gF7\ngSedcxsSFnHA+8zsZTN71MzePRqBiojI4IbSUo845+YANcA5ZlabsMgaYKpz7j3AT4CHUh6liIgM\niQ3n3i9m9h2g3Tl36wDLvAWc7pzbnzBfN5kRERkB59yQu7gHu/qlyswqouNFwIeBtQnLVJuZRcfP\nxNtRJOt3xzmX8WXJkiVpjyEXYlScijPTS7bEOVwDXv0CTALuMbMA3g7gXufcE2Z2bTRJ3wn8I/AF\nM+sB2oD5w45CRERSYrBLGl8FTksy/8648TuAO1IfmoiIDJd+UZqgtrY23SEMKhtiBMWZaooztbIl\nzuEa1onSw9qQmRurbYmI5Aozww3jROlgfeoiksWi1zBIlkhFw1dJXSTH6Qg5O6RqB6w+dRGRHKKk\nLiKSQ5TURURyiJK6iEgOUVIXkTE3Y8YMnnjiiXSHMSQzZsxg1apVAHzve9/j6quvHnDZdP9fuvpF\nRMacmWXN5ZbxcS5evHjQZf3l169fz9e//nXWrFlDQ0MDkUhkVOP0qaUuIjIK8vPzmT9/PnffffeY\nblctdZEjVF1d6lrKtbUjuxa+q6uLG264gd/+9rcAfPKTn2T58uXk5+dTX1/Ppz/9aZ599lkCgQCz\nZ8/m6aefBmD58uX85Cc/oampicmTJ/PTn/6U888/P+k2du3axaxZs9i5cyfjx48HYO3atcybN489\ne/bw9ttvc/XVV/PKK69gZlxwwQXccccdlJeX91nX0qVL2bJlC/feey8A9957LzfeeCOtra187Wtf\n67Xs8ccfz/HHH8/mzZtHVDcjpZa6iKSFc45ly5bx/PPP8/LLL/Pyyy/z/PPPs2zZMgB++MMfMnXq\nVOrr63nnnXf4/ve9595v2rSJO+64gxdffJGmpib+/Oc/M2PGjH63M3nyZN773vfy4IMPxuatXLmS\nT3ziEwSDQQD+9V//ld27d7Nx40a2b9/O0qVLk64rvitmw4YNfPGLX+S+++5j165dNDQ0sGPHjsOs\nlcOnlrrIEWqkretUWrlyJbfffjtVVVUALFmyhGuvvZabb76Z/Px8du/ezdtvv83MmTM5++yzAQgG\ng3R2dvLaa69RWVnJtGnTBt3OwoULWblyJZ/73OdwzvGb3/yGlStXAjBz5kxmzpwJQFVVFddffz03\n33xz0vXE/zr3gQce4KMf/Sjvf//7Afjud7/L7bffPvLKSBG11EUkbXbt2sX06dNj09OmTWPXrl0A\nfPOb32TWrFnMmzePmTNnsnz5cgBmzZrFbbfdxtKlS6murmbBggXs3r17wO1cdtllPPfcc+zZs4en\nn36aQCAQS8Z79+5l/vz51NTUUF5ezlVXXUVDQ8OQYq+pqYlNFxcXU1lZOew6SDUldRFJm8mTJ/P2\n22/Hprdt28bkyZMBKCkp4dZbb2XLli08/PDDrFixInZp4YIFC3jmmWfYunUrZsaiRYsG3M748eOZ\nN29erIW+YMGC2GuLFy8mGAyyfv16Ghsbuffee4d0pcrkyZPZvn17bLqtrW1IO4PRpqQuImmzYMEC\nli1bRn19PfX19dx8881cddVVADzyyCNs3rwZ5xxlZWUEg0GCwSCvv/46q1atorOzk4KCAgoLC2N9\n4wNZuHAh99xzDw8++CALFy6MzW9paWHcuHGUlZWxc+dObrnlliHFfvnll/PII4/w7LPP0tXVxU03\n3dRnZ9DR0UFXVxcAnZ2ddHZ2DrVqRkxJXUTSwsy48cYbmTt3LqeccgqnnHIKc+fO5cYbbwRg8+bN\nfPjDH6a0tJT3ve99fOlLX+Lcc8+ls7OTb3/720ycOJFJkyZRX18fO4k6kEsvvZTNmzczadIkTj75\n5Nj8JUuWsGbNGsrLy/noRz/K5Zdf3u819PHXoc+ePZs77riDhQsXMnnyZCZMmMDUqVNjy7799tsU\nFxdz0kknYWYUFRVx4oknHk6VDYkekiGSw6IPWEh3GDIE/b1Xw31IhlrqIiI5REldRHLCRRddRGlp\naZ/y7//+7+kObUyp+0Ukh6n7JXuo+0VERPpQUhcRySFK6iIiOURJXUQkh4xpUu/u7h7LzYmIHHHG\nNKnv3LlzLDcnIhkqEx77NlTZ9ji7AZO6mRWa2WozW2dmG8ws6W9xzezHZvaGmb1sZqf2t76tW7ce\nbrwikgOy+XF2d91114DL+svfc889zJ07l/LycqZOncqiRYsIh8OjHu+ASd051wGc55ybA5wCnGdm\n749fxswuBmY5544DrgF+1t/64u/GJiKSy9rb2/mP//gPGhoaWL16NU888QS33nrrqG930O4X51xb\ndDQfCAL7Exa5FLgnuuxqoMLMqpOtSy11kcxhlroyUl1dXVx33XVMmTKFKVOmcP3118fualhfX88l\nl1zC+PHjqays5Jxzzon93fLly6mpqaGsrIwTTjgh1j2SzK5duyguLubAgQOxeWvXrmXixImEw2G2\nbNnC+eefT1VVFRMnTuTKK6+ksbEx6bqWLl0au4skeI+zmz59OlVVVXzve9/rteznP/95zj77bEKh\nEJMnT+aKK67g2WefHVE9DcegSd3MAma2DtgLPOmc25CwyBRge9z0DqCGJJTURcR3pD3O7qmnnuKk\nk04aavWM2FBa6pFo90sNcI6Z1SZZLHFfnfR3yep+EckczqWujNTKlSu56aabqKqqoqqqiiVLlsQe\n6hz/OLtgMJj0cXbd3d1MmzaNY489dsDtLFy4kF//+tfR/9t7nJ1/T/WZM2fywQ9+kLy8vNjj7J56\n6ql+6iz54+zy8/P57ne/SyCQPKX+/Oc/Z82aNXzjG98YXgWNwJCfUeqcazSzPwJzgbq4l3YCU+Om\na6Lz+li7dm1sD1hbW0ttbe3wohWRnDLY4+yWLl3KvHnzALjmmmtYtGhRr8fZvfbaa1xwwQWsWLGC\nSZMm9budyy67jK985Svs2bOHTZs29Xmc3Ve/+lX++te/0tzcTCQSYcKECUOKfSiPs3vooYdYvHgx\nTzzxxJDWW1dXR11d3aDL9cs5128BqoCK6HgR8DTwwYRlLgYejY6fBfytn3W5vLw8Fw6HnYiMDe8r\nnnlmzJjhHn/8cTdz5kz36KOPxub/6U9/cjNmzOiz/Pr1691RRx3lnnjiiV7zm5qa3IIFC9xVV101\n6DY/9rGPudtuu81dc8017lvf+lZs/mc+8xm3cOFCd+DAAeecc7///e9dTU1Nr1j97S5ZssRdeeWV\nzjnn/u3f/s3Nnz8/tlxra6vLz8/vFeNjjz3mJk6c6F544YVB4+vvvYrOHzBXx5fBul8mAauifeqr\ngT84554ws2vN7NroTuFR4E0z2wzcCXyxv5V1d3cP+oBYETly5PLj7FatWsUVV1zB7373O+bOnTvM\nmhm5wS5pfNU5d5pzbo5z7hTn3C3R+Xc65+6MW+7LzrlZzrn3OOfWDLROnSwVEcj9x9ktW7aM5ubm\nXvd5/8hHPnI4VTYkY3o/daDPk7xFZPTofurZI2vvp64rYERERs+YJ3V1v4jIaNDj7DxDvqQxVZTU\nRWQ0PPbYY+kOISOo+0VEJIeM+YnSoqIiWltbs+YObSLZTCdKs0dWniitqKigvb2d+vr6sdysiMgR\nY0yTuv9zYPWri4iMjrQkdfWri4iMjjFN6v7tMdVSFzmyZcJj34Yq2x5nN6aXNKr7RUQgux9nN9iy\n6f6/1P0iIpJD1P0icoTyW5WpKCOVy4+zSxdd/SIiaeGOsMfZjZUxTeqVlZUUFxfT2NjIwYMHx3LT\nIpJgOA9eGKyM1JHyOLuxNKYRmJla6yISM9jj7GbNmsW8efOYOXMmy5cvB+j1OLvq6moWLFgw6MN3\nLrvsMp577jn27NnD008/3edxdvPnz6empoby8nKuuuoqGhoahhT7UB5nN9bGfLeifnUR8U2ePLnX\nhRPbtm1j8uTJAJSUlHDrrbeyZcsWHn74YVasWBHrO1+wYAHPPPMMW7duxcxYtGjRgNsZP3488+bN\n4ze/+U2fZzosXryYYDDI+vXraWxs5N577+31BKOBYt++fXtsuq2tbUg7g9E25kldV8CIiC+XH2eX\nLmlL6mqpixzZcv1xdukypndpdM5x//33s2DBAi6//HIeeOCBMdm2yJFKd2nMHll5l0ZQS11EZDSp\nT11EcoIeZ+cZ8+6XSCRCUVERXV1dsRMUIjI61P2SPbK2+yUQCDBt2jTAu3xJRERSJy0/f1IXjIjI\n6BjTW+/6dLJUZOyk+1awMrbSktT1q1KRsaH+9COPul9ERHJIWpO6WuoiIqk1YFI3s6lm9qSZvWZm\n683sX5IsU2tmjWa2NlpuHGyj6n4RERkdA16nbmZHA0c759aZWQnwEvAPzrmNccvUAl9zzl064Iai\n16kD9PT0UFhYSDgcpqOjg4KCghT8KyIiuSel16k75/Y459ZFx1uAjcDkZNsdTpChUIgpU6YA9Lp1\npYiIHJ4h96mb2QzgVGB1wksOeJ+ZvWxmj5rZu4eyPnXBiIik3pAuaYx2vTwAfDXaYo+3BpjqnGsz\ns4uAh4Djk60n/rl/fpeLkrqIyCF1dXXU1dWN+O8HvfeLmeUBjwCPOeduG3SFZm8Bpzvn9ifMd/Hb\n+s53vsOyZcv4zne+w8033zyi4EVEcl1K+9TN+yna3cCG/hK6mVVHl8PMzsTbUexPtmw8db+IiKTe\nYN0vZwNXAq+Y2drovMXANADn3J3APwJfMLMeoA2YP5QN61p1EZHUG/Nb7/reeOMNjj/+eKZPn65f\nloqI9GO43S9pS+odHR0UFRURDAbp6OggFErLbWhERDJaxt9P3VdYWMikSZMIh8Ps2rUrXWGIiOSU\ntCV10I29RERSLSOSuk6WioikRlqTui5rFBFJrYxoqav7RUQkNTIiqaulLiKSGup+ERHJIWm7Th2g\ntbWVkpIS8vPzaW9vJxBI6z5GRCTjZM116gDjxo2jsrKSrq4u9u7dm85QRERyQtqbxupXFxFJnbQn\ndfWri4ikTtqTui5rFBFJnYxJ6mqpi4gcvrQndXW/iIikTtqTurpfRERSJ2OS+tatWxmra+ZFRHJV\n2pN6RUUFZWVltLa2sn379nSHIyKS1dKe1M2MD33oQwA88MADaY5GRCS7pT2pAyxcuBCA++67L82R\niIhkt7Te+8XX0dFBdXU1TU1NbNy4kRNOOGFMYhIRyXRZde8XX2FhIZdffjkAK1euTHM0IiLZKyOS\nOhzqglm5cqWughERGaGM6H4BCIfD1NTUsGfPHlavXs2ZZ545JnGJiGSyrOx+AQgGg8yfPx/QCVMR\nkZHKmJY6wIsvvsgZZ5xBdXU1O3bsIBQKjUlsIiKZKmtb6gCnn346xx13HHv37mXVqlXpDkdEJOtk\nVFI3M6644gpAV8GIiIzEgN0vZjYV+CVwFOCA/+Oc+3GS5X4MXAS0AZ92zq1Nssyg3S8Ar7/+Ou96\n17soLS1l7969FBUVDfmfERHJNanufukGrnfOzQbOAr5kZicmbPBiYJZz7jjgGuBnw4y5l+OPP565\nc+fS3NzMI488cjirEhE54gyY1J1ze5xz66LjLcBGYHLCYpcC90SXWQ1UmFn14QTld8HoKhgRkeEZ\ncp+6mc0ATgVWJ7w0BYi/veIOoOZwgvrUpz5FIBDg0Ucf5cCBA4ezKhGRI8qQrhk0sxLgAeCr0RZ7\nn0USppN2ni9dujQ2XltbS21tbdLtTZo0ifPPP5/HH3+cBx54gKuvvnooYYqIZL26ujrq6upG/PeD\nXqduZnnAI8Bjzrnbkrz+n0Cdc+7+6PTfgXOdc3sTlhvSiVLfL37xCz7zmc9QW1vLk08+OeS/ExHJ\nJcM9UTrY1S+G11/e4Jy7vp9lLga+7Jy72MzOAm5zzp2VZLlhJfXGxkaqq6vp6upi27Zt1NQcVo+O\niEhWSvXVL2cDVwLnmdnaaLnIzK41s2sBnHOPAm+a2WbgTuCLIw0+Xnl5OZdccgnOOe6///5UrFJE\nJOdl1G0CEv3+97/nsssu49RTT2XNmjWjFJmISOZKafdLKo0kqXd0dHD00UfT2NjIhg0bOPHEEwf/\nIxGRHJLV935JpIdniIgMT0YndaDXvWD08AwRkYFlfFI/99xzmTRpEm+++SbPPfdcusMREcloGZ/U\ng8FgrLV+3XXX0dXVleaIREQyV8YndYDFixczbdo0XnjhBW666aZ0hyMikrEy+uqXeM8++yznnHMO\nkUiEv/zlL3zoQx9KYXQiIpkpp65+iXf22WezZMkSAK666ir27duX5ohERDJP1rTUAcLhMOeffz5P\nP/00H/nIR/jDH/6AdycDEZHclLMtdfBOmv7qV79i/Pjx/PGPf+QnP/lJukMSEckoWdVS9/m3D8jP\nz2f16tXMmTMnJesVEck0Od1S93384x/n85//PF1dXcyfP5/W1tZ0hyQikhGyMqkDrFixgtmzZ7Np\n0yauu+66dIcjIpIRsrL7xffqq69yxhln0NnZyX//93/ziU98IqXrFxFJtyOi+8V38skns2LFCgCu\nvvpqtm7dmuaIRETSK6uTOsAXvvAFPvaxj9HY2MiFF17Iq6++mu6QRETSJuuTuplx99138+53v5u/\n//3vnHHGGdx+++26o6OIHJGyPqkDVFZW8vzzz3PNNdfQ2dnJV77yFS699FL96lREjjhZfaI0mQcf\nfJDPfe5zHDx4kEmTJvHLX/5S94kRkax1RJ0oTebyyy/nlVde4ZxzzmH37t3MmzePRYsW6Za9InJE\nyLmkDjB16lRWrVrFzTffTCAQ4Ac/+AFnn302b7zxRrpDExEZVTnX/ZLof//3f1m4cCFbt26lsLCQ\nz372s3z961/nmGOOGfNYRESGa7jdLzmf1AEOHjzIl7/8Ze677z7AuzHYJz/5SW644QbdN0ZEMpqS\n+gDWr1/PLbfcwsqVK+np6QGI9bmfd955uo2viGQcJfUh2LZtGz/60Y+46667YjcDmzt3LjfccAOX\nXHIJRUVFaY5QRMSjpD4M+/fv56c//Sk//vGPY9e0h0Ih5syZw1lnncV73/tezjrrLI455hi14kUk\nLZTUR6C9vZ3/+q//4q677uLll18mEon0ev2oo46KJfgzzzyT97znPVRWVqYpWhE5kiipH6aWlhZe\neOEFnnvuOZ577jn+9re/UV9f32e5qVOnMmfOHObMmcOpp57KnDlzmDFjRqxF75zjwIED7Nmzh717\n97J371727NlDQ0MD06ZN47TTTuOkk06ioKBgrP9FEckiKU/qZvZz4CPAO865k5O8Xgv8P+DN6KwH\nnXPLkiyXFUk9kXOOLVu2xJL82rVreeWVV2hra+uzbFlZGTNmzKChoYF33nmH7u7uAdedl5fHSSed\nxGmnncbpp5/OaaedximnnKI+fRGJGY2k/gGgBfjlAEn9a865SwdZT1Ym9WTC4TBvvPEG69ati5W1\na9fyzjvv9FquvLyc6upqqqurOfroo6murmbChAls3ryZNWvWsGnTpj43HgsGg8ycOZPp06czbdo0\npk+f3mt8ypQp5Ofnj+W/KyJpNCrdL2Y2A/jDAEn96865jw6yjpxJ6v3Zs2cPO3bsYOLEiVRXV1NY\nWDjg8i0tLaxbt441a9bw0ksvsWbNGjZs2NCnTz+emXHUUUcRCoUws1h3jz9uZgQCASZMmEBNTU2v\nMmXKlNhQRwMi2SEdSf1c4HfADmAn8A3n3IYky+V8Uk+FtrY23nrrLbZu3crWrVvZtm1br+GuXbsG\nTPpDVVZWRmlpKSUlJbESP11aWsr48eMZP348EyZMiBV/uqKigkAgQHd3Nx0dHb1Ke3s7HR0ddHZ2\n0tPTQ09PD93d3bFxfzocDlNcXEx5eTllZWWxUl5ezrhx4wgEvLtYOOdoa2ujubm5T2lrayMcDhOJ\nRGLDxPHi4uJe648vxcXFAHR2dtLY2EhTU1Os+NMtLS0Eg0Hy8vJ6lVAoFBsvLS1lwoQJVFZWUlFR\nQTAYHPF745zj4MGDsfMw/jkZM6OqqoqJEycyceLE2Hgqjtz8Ou7p6aG0tDRW97nKOUdXV1fsc5ps\nmJ+fHzvSHqyBNpqGm9RDKdjmGmCqc67NzC4CHgKOT7bg0qVLY+O1tbXU1tamYPO5pbi4mNmzZzN7\n9uykr3d3d7Nv3z7C4TDOuVj3jT/unCMSiVBfX8+OHTvYsWMHO3fujI37037iOhzRD9thrWOgdZeW\nlmJmNDc3p2RHlkwgECAYDA56/mO4KioqYjvCyspKSkpKAIhEIr3eK//9CofD1NfXxxL4cG5AV1ZW\nxsSJE6msrGTcuHEUFxfHhvHjRUVFNDc309DQQENDA/X19b3GOzs7Y3VSXl4e26nHl7KyMiKRSK8d\ndGIxM/Ly8sjPz++zI8zLyyMSidDc3ExTU1NsGD/e2toa2xGXlpb22RGXlpYSiURiO91kw7a2tqR1\nPdLPq9+V6nejVldXc9RRR1FSUtKnvvur/8LCwiHtLOvq6qirqxtRnJCClnqSZd8CTnfO7U+Yr5Z6\nhohEIrEWaEtLC83NzbFxf7q5uZkDBw6wf/9+9u/f32t8//79HDx4EPBO9hYWFiYt8V/qUCgUK/50\nIBCgra2tT8vY/2LHKywspLS0tE8ZN25cbF1+go4fNzPa29uTbqOpqYn29nYA8vPzkx4xlJWVMW7c\nOCKRCN3d3f2W5ubmXnVzuJ/10tLSPkkEYN++fdTX17Nv377YeDgcPqxt+QoLCwmFQrS0tKRkfZku\n/rNbUFDQZ9jR0RHbyfq/QD9cRUVFfZL/b3/7W4499th+/2bMW+pmVo13ZYwzszPxdhT7B/s7SZ9A\nIEBFRQUVFRUjXoefSA6nm2EgPT09NDc3A1BSUkJeXt6obKe7u5tIJJLSS0vD4TAHDx7stRNsbm4m\nEAj0Ovfhn//wh5WVlbEE7ncLDcZvse7bt4/9+/fT1tZGa2trr2H8eElJCVVVVVRWVlJZWRkbr6qq\nim2zu7ubxsZGDhw40KscPHiQpqYmgsFgr510fAkGgzjnBtwBAr1a3YnjxcXFtLe392rFJ5ZgMNhr\nx5s49LvvEus7vgxVJBLpdXmyP9y3b1+f+o0vra2ttLe39xr3S0NDw/A/WEM0lKtffg2cC1QBe4El\nQB6Ac+5OM/sS8AWgB2jDuxLmb0nWo5a6iByxIpEIHR0dfZL/7NmzB2xU6MdHIiI55Ih/8pGIyJFM\nSV1EJIcoqYuI5BAldRGRHKKkLiKSQ5TURURyiJK6iEgOUVIXEckhSuoiIjlESV1EJIcoqYuI5BAl\ndRGRHKKkLiKSQ5TURURyiJK6iEgOUVIXEckhSuoiIjlESV1EJIcoqYuI5BAldRGRHKKkLiKSQ5TU\nRURyiJK6iEgOUVIXEckhSuoiIjlESV1EJIcoqYuI5BAldRGRHDJoUjezn5vZXjN7dYBlfmxmb5jZ\ny2Z2ampDFBGRoRpKS/0XwIX9vWhmFwOznHPHAdcAP0tRbCIiMkyDJnXn3DPAgQEWuRS4J7rsaqDC\nzKpTE56IiAxHKvrUpwDb46Z3ADUpWK+IiAxTqk6UWsK0S9F6RURkGEIpWMdOYGrcdE10Xh9Lly6N\njdfW1lJbW5uCzYuI5I66ujrq6upG/Pfm3OCNajObAfzBOXdyktcuBr7snLvYzM4CbnPOnZVkOTeU\nbYmIyCFmhnMusTekX4O21M3s18C5QJWZbQeWAHkAzrk7nXOPmtnFZrYZaAX+eWShi4jI4RpSSz0l\nGzJzW7Z8m2nTvk0oVDom2xQRyXbDbamP6S9Kt237PqtXH8fu3XfjXHgsNy0ickQY06ReVnYW3d17\n2bTpc7z00lwOHKgby82LiOS8Me1+iUQivPPO/bz55iI6O71L26uq/oFjj72F4uJZYxKHiEg2GW73\ny5gmdX9b4XAb27evYNu27xOJtGGWx6RJn2XChIupqDiHUKh8TGISEcl0WZHUfZ2du3jzzcXs3XtP\n3NwApaWnU1FxHuPHn095+fsJBseNSYwiIpkmq5K6r6XlZfbte5CDB5+kqelvONcT93chSkvPpKLi\nPMrKzqSk5DQKCqZgNuT/UUQka2VlUo8XDrfS2PgsBw6s4uDBJ2lufhGI9FomL+8oSktPo6TkdEpL\nT6O09HQKCqYp0YtIzsn6pJ6op6eRgwefobHxGZqbX6KlZQ09PX1vGhkKVVJUdAyhUCV5eZXk5VX1\nGvrzQ6GKaCnDLJiKf21QkUgnbW1v0Nb2dzo6thAKjaew8FiKimZSUDCVQCAVd2sQkVyUc0k9kXOO\njo63Ywm+ufklmptfoqenYdjrCgZLowm+nFCogmCwPJroDbMA3n3KLHoEYECAQKAwunwZwWB5dLyc\nYLAseoLX0da2iba2v9PWtpG2to20t79J4tGGzyxEQcF0ioqOjSX6wsLp5OdPpqBgCvn5kwgGC0dc\nXyKS3XI+qSfjnKOzcwddXbvo7q6nu7shWrzxnh5/fD89PY309BwkHG5i7G4mGaCo6FiKi0+gqOg4\nenoO0N7+Ju3tW+jqSnrvs15CoQkUFEyOJfq8vCqc6yYcbicS8UsHkUh7dF4HgUBBwo7HH/d2PoHA\nOAKBPMxC/ZQ8gsFxBIMlBIMl0eWHd0ThnFOXmMhhOiKT+kg4FyEcbqan52C0NNLT0xj9pasDHM5F\nYuP+dCT0jbtoAAALUElEQVTSTk9PE+FwY+xvwuGmXn9fXHwcxcUnRouXyPtrbYfDHXR0vEVHx5ux\nRN/ZuZ3Ozp10de2iq2t3rxPH6RQIFMaSfDBYAgSIRDpxrotIpCtu2Ekk0oV3dGLRo58gZoeKPx0I\n5BMIFGJWQCBQSCAQPywALLrD6oxuqzM63hGd7okeVcWvP5SwzQAQiDv6CkR3Nv50JMn646e7GKwB\nEAyWRLv6JvYa5ud7w2Aw/jJdFzf0xr3Plr9jbovuqNuiO2lv2n8PAoGihKE3HgwWxb0/pb3eq2Cw\nlEAgD+fChMMt9PQ0Ew43Rb8Dh8adc7Edf2KDYCRHjN53Pv7/9f5X73sWxrn40hOd5wgGi6ONifwh\nbicS/b8ORhttrUCk13f4UCwRnHM419Prs+tcd6/PcSBQSH7+0eTnT4oOj07LUbOSeo5xLkJ3dz2d\nnbvo6tpFZ6d3NOIlw6LYl/nQl9wbj0Q643Y8TX3Gw+HW2Bepb/E+3F5SaYkV3SY/u5nl4Vz3Yfx9\nfnRnTjQp+0kzHJeo/SSaGv42/Z1UKOQNneuJa4x5w/66OFMpFBofl+Srow2S/Oj3sSA27g0LovOS\nNVgKY+NFRe8acGehpC6jwjkXbUU2x1p6QPQDeuhDHP+h9lrLkV6tsb6ts664lnhHXOvYGwcX15Iv\nSPiiFGCWF91GT7/b8BONl4Ai0RbboXlmgX7X7/9PXiu/39qhp6c52sW3L27ojXd17Yt29/nnZojr\nljo079AOuohAoDg6Xhybf+ioxe9u84rf5ebthFtj75FX4t8v735LXpIsi55TKo0bLwOIawQ0RY9E\n/aPQke8QDj1Hxz9PlXjUFoqbNiKRtl4xD4X3f/nnx0rizo/5dexvOxAd5sV9dvMSPrt5RCLtdHXt\njpY9dHXtGZWj5jPO2Mi4cSf0+3rKb70rAt4HyzskLgaG8wha/4ua2/LzC8jPrwL6/3Kmk9fd0IVZ\n3iA7qP7/3jv6a4l1XXnvayC6Uzw07t9S6nDPp8Rv89COqplwuBmzvNgFDv5FDqN9FZl31NwQTfC7\n6e7eF22MxHc9dsV1SXZGj3i9ne+hxkpHr4aMf/STKmqpi4hksIy+9a6IiIwuJXURkRyipC4ikkPG\nNKlv2TKWWxMROfKM6YlScMycCRde6JXaWihJ7YlfEZGcktHXqY8f7zgQdy+uvDz4wAfgggvgve+F\nqiqorITx473XRESOdBmd1Ht6HC+8AP/zP/CnP8Hzz0Oknx+BlZXBhAlekp8wwSslJVBcfKiMG9d3\nfNy45KWgAHQbEhHJNhmd1BO3tX8/PP64l+Q3bvSmGxrgwIH+k/1IBQLeTqG83NthxA/98bIyyM/3\njhJCIW8YPx4KQTDo7RwGKwMJBr11+SVx2gx6eiAc9obx4+GwV+LjS1YKCw/t6EL6iZlI1sqqpN6f\nSAQaGw8l+f37vdLW5pXW1t5Df3yg0n04v3DOcnl5vY9wiouhqMg7eiko8HZkycYH2mnk5Xk7H+e8\n9ysS6Tve0wOdnf2Xrq5DO6BkpaDA2xn7O7b+Sne3V5KN9/R4/2tp6aEdd/y4vyPv6uq/9PR4cebn\nHyp+XfklkHDJQbKPeiDg1Vkg0Lv4DYHWVmhpgeZmrySOmx1qhMQ3Rvxx//yU/z4417tEIocaBYkN\nBb8unfMaGX4JBPpOD9ZoCYUOfYYSP1MFBd4yXV3Q0QHt7X2HnZ3eNuK36xe/EeTH4scXP4wf76++\n/ffLf58Sh/H6a7gNpS5SISeS+mjo7va+GI2N0NTU/zBZUogfhsN9vyyJZSD+lytZcvK/XJGIl0Ti\nP8TxQz/RJUtmfunoOLRDS/VRj8jh8BsDuSAQ6HtEn3h0P9jwP/8Tamr634bu/dKPvDzvBOz48emO\nZGw55yX5xCObtjavteS3mONbz/544o4isTjXt/UTPx4M9m6hJZb8fG+H1NHRf/F3cPHdU4ldV/FH\nD4lfrFDIW09Tk1eamw+N+9OdnX1b3vElGPTi9Fvufj3FT/fXwot/H+KPZOKLP7+42DuKSCwlJd7Q\nP4KNb4jEl9bWwVuV/TUU4rsW/dZ8fMs+fnqwz1viEVri58s57/0pKvKOyBKHBQXeMvHbTjzC8Osu\nPq74eX7jK/EIMr74709/w/j3rb8SiRz630aqo2Pkf5vMEZPUj1Rmh5JTRUW6o5EjnZ8Igzlwjzd/\nx9Nf119/R/yJ8yZNSm1cR0z3i4hINkr5Db3M7EIz+7uZvWFmi5K8XmtmjWa2NlpuHG7QIiKSGgMm\ndfNuknw7cCHwbmCBmZ2YZNGnnHOnRsuyUYhzzNTV1aU7hEFlQ4ygOFNNcaZWtsQ5XIO11M8ENjvn\n3nbeY0/uBz6WZLmc+VlPNrzR2RAjKM5UU5yplS1xDtdgSX0KsD1uekd0XjwHvM/MXjazR83s3akM\nUEREhm6wq1+GcmZzDTDVOddmZhcBDwHHH3ZkIiIybANe/WJmZwFLnXMXRqe/DUScc8sH+Ju3gNOd\nc/sT5uvSFxGREUjlj49eBI4zsxnALuBTwIL4BcysGnjHOefM7Ey8HcX+xBUNJygRERmZAZO6c67H\nzL4M/AkIAnc75zaa2bXR1+8E/hH4gpn1AG3A/FGOWURE+jFmPz4SEZHRN+qPsxvsx0uZwszeNrNX\noj+gej7d8fjM7OdmttfMXo2bN8HM/mJmr5vZn80s7TcA6CfOpWa2I+6HaRemOcapZvakmb1mZuvN\n7F+i8zOqPgeIM9Pqs9DMVpvZOjPbYGbfj87PtPrsL86Mqk+fmQWj8fwhOj2s+hzVlnr0x0ubgA8B\nO4EXgAXOuY2jttER6u8Eb7qZ2QeAFuCXzrmTo/N+ANQ7534Q3VGOd859KwPjXAI0O+dWpDM2n5kd\nDRztnFtnZiXAS8A/AP9MBtXnAHF+kgyqTwAzK45e+RYC/gp8A7iUDKrPAeL8IBlWnwBm9jXgdKDU\nOXfpcL/vo91SH+qPlzJFxp3Mdc49AxxImH0pcE90/B68L3xa9RMnZFCdOuf2OOfWRcdbgI14v7vI\nqPocIE7IoPoEcM61RUfz8c67HSDD6hP6jRMyrD7NrAa4GPi/HIptWPU52kl9KD9eyhQOeNzMXjSz\nq9MdzCCqnXN7o+N7gep0BjOIr0R/mHZ3ug/D40Wv6DoVWE0G12dcnH+Lzsqo+jSzgJmtw6u3J51z\nr5GB9dlPnJBh9Qn8CPgmEP8UhGHV52gn9Ww6C3u2c+5U4CLgS9HuhIwXvfVlptbzz4BjgDnAbuCH\n6Q3HE+3SeBD4qnOuOf61TKrPaJwP4MXZQgbWp3Mu4pybA9QA55jZeQmvZ0R9JomzlgyrTzO7BO/y\n8LX0cwQxlPoc7aS+E5gaNz0Vr7WecZxzu6PDfcDv8bqOMtXeaL8rZjYJeCfN8STlnHvHReEdTqa9\nTs0sDy+h3+uceyg6O+PqMy7OX/lxZmJ9+pxzjcAf8fqCM64+fXFxzs3A+nwfcGn0/N6vgfPN7F6G\nWZ+jndRjP14ys3y8Hy89PMrbHDYzKzaz0uj4OGAe8OrAf5VWDwP/FB3/J7xbM2Sc6AfQ93HSXKdm\nZsDdwAbn3G1xL2VUffYXZwbWZ5XfZWFmRcCHgbVkXn0mjdNPlFFpr0/n3GLn3FTn3DF4v/dZ5Zy7\niuHWp3NuVAted8YmYDPw7dHe3ghjPAZYFy3rMylOvD32LqAL7/zEPwMTgMeB14E/AxUZGOdngF8C\nrwAvRz+I1WmO8f14fZXr8JLPWrzbSmdUffYT50UZWJ8n4937aV00rm9G52daffYXZ0bVZ0LM5wIP\nj6Q+9eMjEZEcMuo/PhIRkbGjpC4ikkOU1EVEcoiSuohIDlFSFxHJIUrqIiI5REldRCSHKKmLiOSQ\n/w8CRoKlSC3vIgAAAABJRU5ErkJggg==\n", 1942 | "text/plain": [ 1943 | "" 1944 | ] 1945 | }, 1946 | "metadata": {}, 1947 | "output_type": "display_data" 1948 | } 1949 | ], 1950 | "source": [ 1951 | "plt.plot(all_losses_valid_1, 'y', all_losses_valid_2, 'b', all_losses_valid, 'k', linewidth=2.0)\n", 1952 | "plt.legend((\"loss_valid1\", \"loss_valid2\", \"loss_valid\"))" 1953 | ] 1954 | }, 1955 | { 1956 | "cell_type": "code", 1957 | "execution_count": 14, 1958 | "metadata": { 1959 | "collapsed": false 1960 | }, 1961 | "outputs": [ 1962 | { 1963 | "data": { 1964 | "text/plain": [ 1965 | "" 1966 | ] 1967 | }, 1968 | "execution_count": 14, 1969 | "metadata": {}, 1970 | "output_type": "execute_result" 1971 | }, 1972 | { 1973 | "data": { 1974 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEACAYAAABbMHZzAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xl8VNX5+PHPM5NtskKIEFYDiIJIBVQURI2KLG6g4IaK\nS13QuqDty+Vbf4Uuora2LnWtUkAUUWoRrFrESqhYJGyKQIIEjAQIkBAICdlmMs/vj5nEJCSZAGEy\nkOf9es3Lueeec+fc6/DMybnnniOqijHGmNbF0dIVMMYYE3wW/I0xphWy4G+MMa2QBX9jjGmFLPgb\nY0wrZMHfGGNaoYDBX0RGikimiGwSkUfr2f8rEVnjf30nIh4RaePfly0ia/370o/GCRhjjDl00tg4\nfxFxAhuBYcB2YAVwg6pmNJD/cmCSqg7zb/8AnKGqBc1dcWOMMYcvUMt/EJClqtmq6gbmAKMbyT8e\neLdOmhxB/YwxxhwFgYJ/ZyCnxvY2f9pBRCQaGAF8UCNZgc9FZKWI3HkkFTXGGNN8wgLsP5S5H64A\nlqrqvhpp56pqroicACwSkUxV/fKQa2mMMaZZBQr+24GuNba74mv91+d66nT5qGqu/795IjIPXzdS\nreAvIja5kDHGHAZVPexu9UDdPiuBXiKSIiIRwHXAgrqZRCQBOB+YXyMtWkTi/O9jgOHAd/V9iKra\nq5lekydPbvE6HE8vu552LUP1daQabfmrqkdE7gMWAk5gmqpmiMjd/v2v+7OOARaqammN4h2AeSJS\n9TnvqOpnR1xjY4wxRyxQtw+q+inwaZ201+tszwRm1kn7AejfDHU0xpjjhtu9l/LyHFyuk3E6o1D1\nUlqaRXn5dsLC4iku/pa9e78gPDyRuLizaN/+BkSEvLx/UFi4jOLiNfTq9dIR1yNg8DfHltTU1Jau\nwnHFrmfzCYVrqVqJ11vm7zbxouqmsvIAJSWZlJZuISamD9HRvQEH4eGJiDgpKvqGoqIVeL3lxMUN\nID7+HHbteofi4rXExZ1JRUUuhYVf4vHsB7w4nfGIOFGtwOutQMRJVNSJeL3lHDiwnqKilYAXcBIW\nFkdlZQmqFQ3WOS/vfVS9FBR8Up1WVLTiiK9Fow95BYOIaEvXIdhWrIB16+DqqyEhoaVrY0xo8Ho9\nVFTspLJyPxERnQgLS0BE8HrdlJZuIiKiAyIRlJdvp6JiOx7PflyuHpSW/kB+/jxEwggLa0Nx8TeA\nkpg4kvDw9ni9B3A4otm/fzm7d79DZWVxk+rjdMYTGdmVkpL1tdLDwtrg8exroFRgIuFERnajrOwH\nfD8CEBHRGZerOx5PEZGRHUlMvAyv9wBbt/4Rj6fA/7nt6Nr1IWJjBxIfP4iIiCT0CG74WvBvRqtW\nwaJFkJ0NV10FI0bA7t2wZw/06ePLs2ULnH46FBdDdDT07Anl5TBpEtxzT4tW3zTCf+/KmBaxYkV/\n+vb9By5Xz+o0ETmi4G/dPg347jtf4E5Kgp/9DFTho498rfa9e2HyZGjfHiorYflyePFFeO+9n8rP\nmAEvvwyPPQb5+TBsGNx+O7z0ki/wd+wIubm+zwHfZ5nQdrw0UsyxRUQ444zVzd4AsZZ/Harwm9/A\nH/7wU9rVV4PHAwtqDHIdNgymToWxYyHH/wx0VBTceivk5cEHNZ5zFvEdt0qnTr6gX1wM+/ZBRASc\ncAK0a3dUT80cAX8rq6WrYVqhhr57R9ryP+6D//79kJUFAwbAzp3wl7/ApZfChRfWzqcK69fDc8/B\n3/8OTicMHQqrV0NRkS9Pmza+rpk33vC15p1OX8s/JcXXzTNpEnTrBm43jBoF//kPXHYZvPkmvP02\nLF7s6/Z57TW44IKjdsrmKLDgb1qKBf9D5PH4umKefBIKCuDiiyEzE7Zv97XEJ0yAjRth2zZwOHzd\nLmVlvrLh4fDuu75WfXY23HcflJTAtGnQvTt8/DFcfrkv7zXX+AJ7RETtzy8rg//9D847z3c8E1p8\nD8pU4HBEVqft3ZtGYeESYmP7U1LyPXv2fIRIOE5nLD/72QIL/qZFWPBvgspKXxB3u+G22+CLL3zp\nERFQ4R9Jdcop8P33tbthqrRr5wv4Eyf6/lJozFtv+frsf/lLCLM7J0FTUbELr9dNWFgcqkp5+Y8U\nFi5DJIzIyC5UVORSVraF0tLNlJZuprw8B9VKwsISiI8fQlTUiXg8e9mz52PKyrbgcLiIiemHy9WL\n3bvfafBzL7zw2OvznzFjBtOmTePLL30zqjgcDrKysujRo0cL1+z4snXrVvr27cv+/fuPysCAoxX8\nj/mwtWSJryW+bh189ZWvm6dKhw6+1vqgQb6/AFwu+O1v4euv4d//hrPPhv79fT8E7dpBXFzTP3fC\nhOY/l9ZGtZL8/AW43btp1+5KIiM74nbv48CBtVRWlhAdfQoFBZ+Sn78A1XLKyrZSVrblsD7L7d5N\naemmOqmC11tKUVE6RUXpgJPk5AmUlf1AWFgiHTrciNMZ5x8aePWRnq4JQXV/IA9Ht27dKKrqGz6G\nHNPBf+FCX996zR/Fdu18QycHDYKZM6FLF1/688//lOf8830v0zw8nv2oenE4otix41UKCj4lIqIj\nLldPXK6TaNv2YhyOKDZvfoT9+5fhdMbidMZRVvYjpaUb/Ue5B5HwRh92AXA643A6Y/F49iMSRnh4\nIvHxQxBxUF6+jYiITrhcPXC5ehIV1YOoqBNxOCIpL99OYeFSPJ69iISTkHA+CQmDqawsZu/ezykq\nWkVS0lXEx5919C+YaVBlZSVOp7Olq1GL1+vF4Tj+Vrw9JoL/5s0wdy6kp/vGyP/f/8GuXXDTTb7A\nP2GCrw/+nHOga9fAxzM/qa/vG8DrdVNSsgG3ey9Opwu3uwCPpwCHw8WBAxvYu/dzKip24nbn4/Hs\nAUAkot7gLRJOeHg7Kip2HrQvMrIbsbE/o6Bgob8eUcTEnIbDEc2BA+uJju5Np04TiYzsRFhYon/f\noX9tIyI6EBc38KD0sLAETjhhLCecMPaQjxkqnn76ad588012795N165defLJJxkzZsxhHevjjz/m\niSeeYMuWLSQkJPDzn/+cyZMnV+9funQpjzzyCBkZGcTFxfH73/+eW265hdLSUp544gk++OAD9u3b\nR79+/Vi0aBFff/01N998Mzk5Py0LkpKSwt///ncuuugipkyZwrp163C5XCxYsIDnnnuOfv368cAD\nD5CZmYnL5WLs2LH85S9/Idx/82z9+vVMmjSJ1atXEx4ezqRJk7j11lvp2bMnOTk5JCYmArB69WpG\njhxJbm5uvT8oGRkZ3HPPPbjdbuLi4ggPD6egoIBbb70Vl8vFjz/+yH//+18WLFhQfX71XZfs7Gx6\n9OiBx+PB4XCQmprK+eefzxdffMHatWsZPHgws2fPpl2IDecL6eC/YwdMmeIbfVNZ6UubN8833j47\n2/fw1PDhMH2676atOVhVX2HNvsjy8lwKC79i//6vyM//kLKybKKj+xIT0xenM46SkgyKi1fj9ZY1\n6TMcDhe+LpQSYmL60bXrI3i9ZZSWZnHgwLcUFHxGRcVOYmMHcNJJzyPi9HelCG3apOJwRFBZWQYo\nDkfUMfdAVVpa89U3NfXQ7yucdNJJLF26lOTkZN5//31uuukmsrKyDuvzY2Njefvtt+nbty/fffcd\nl1xyCf3792f06NH8+OOPXHrppbzxxhuMGzeOwsLC6qD+q1/9ioyMDJYtW0aHDh1IT09vsLVc9//v\nggUL+Mc//sGsWbMoKytjw4YNvPDCC5x55pnk5OQwatQoXnnlFR588EGKiooYNmwYjzzyCB9//DEV\nFRVkZGSQnJxMamoq77//PhMnTgRg1qxZ3HDDDQ3+JdGnTx9ee+013nzzzYO6fd59910+/fRTBg8e\nTHl5OV9//XWD16U+VeW7dOnCqFGjePbZZ3nqqacO6f/F0RaSwV8V/vhH+P3v4cABX2AfPx6GDPGN\nv1+1ypdv6FDfSJvWGvhVvZSXbycyshPgYP/+/yESSVzcGezbt5idO2eSnz8fhyOCxMSROJ0xHDiw\njsLCpQcdq6Rk/UGPsbtcJxER0RGvt5SwsETCwxOprCwlIqID7dpdist1MmFhbYmI6ACAx7OPsLA2\nB/3jLivbyv796SQlXXHQXxhVnM6o5rkordC4ceOq31977bU89dRTpKenH9aP6AU1xiD369eP66+/\nniVLljB69Ghmz57NJZdcwnXXXQdAYmIiiYmJeL1epk+fzvLly+nYsSMA55xzTpM/c8iQIVx55ZUA\nREVFMXDgT3+hnXjiidx1110sWbKEBx98kH/961906tSJhx56CICIiAjOOsvXVTdhwgT++te/MnHi\nRCorK5kzZw4fffRRo5/d0I3UMWPGMHjwYAAiIyMbvS71lb/ttts46aSTAN//kwULDpoJv8WFXPBX\nhV/9yjceH3zj5596yjdKB2DMGN9Tshdd5HvQ6hhrJDaLkpJN5OT8yX+zdBcREck4nQnV/ed15x6p\nrIRdu2ZVbzscUSQkXEB8/CASE0cSGzuA4uJvKCv7AY9nHy7XScTFnUl4eOIh1Ss8vG296VFR3YiK\n6nYYZ3psOJzWenN66623eO6558jOzgaguLiY/Pz8w+o7X758OY899hjr16+noqKC8vJyrr32WgBy\ncnLqHSmUn59PWVkZPXv2PGhfU3SpujHn9/333/Pwww+zatUqSkpK8Hg8nHnmmY3WAWD06NHcc889\nZGdnk5mZSUJCQnW5Q9W1Tv9xY9elPsnJydXvXS4XxcVNm08omEIu+E+d6gv84eG+6RKuuqr2/s6d\nfT8Gx5PKytLq7g63ey95eXPJz59PSclGHI5IunV7lDZtLqC4+Bt27pxFfv48qiaEcjhi/H3pO4mI\n6IRqZfUPQseOd9Ohw3hU3ezblwY4iIjoQNu2lxAWVntoU0LCYBISBgf71M0R+vHHH7nrrrv44osv\nGDx4MCLCgAEDDntY6vjx43nggQdYuHAhERERPPTQQ+zZ47un061bN9LT0w8qk5SURFRUFFlZWfzs\nZz+rtS8mJoaSkpLq7crKSvLy8mrlqfsXyj333MMZZ5zBe++9R0xMDM8//zwf+B+Z79atG+/VnEel\nhqioKK655hrefvttMjMzmdCEIXlN/euovuuSn5/fpLKhKqSC/9q1vjlzRGDOnIMD//HA6y0nL++f\n7NuXRklJBiUlGbjd+TgcLhyOKDyevQeVycy8pda2SBgdOtxGly4PEhNzGsXFq6moyKNt24sAB6Wl\nm3C5euJw/PTkWUxM36N9aqYFHDhwABEhKSkJr9fLW2+9xbp164DDey6huLiYtm3bEhERQXp6OrNn\nz2bEiBGALwBOnTqVuXPnctVVV1FYWMi2bds4/fTTuf3223n44YeZNWsW7du3Jz09nTPOOIOTTz6Z\nsrIyPvnkEy655BKmTp1KeXl5wDrExcURHR1NZmYmr776Ku3btwfgsssu4+GHH+aFF15g4sSJ1X3+\ngwYNAnxdPxMmTCAvL69JfezJycls27YNt9tdfUO5vuvW2HWpz7HwTEjA3nIRGSkimSKySUQerWf/\nr0Rkjf/1nYh4RKRNU8rWVFkJd93l++8vfuGbT+dY5nbvZc+ef7N16x/ZtGkS339/D2vXjuJ//+tE\nRsZ4cnP/RmHhl7jd+YiE4fWW+ochRtKmzYWccsp0zjprPb17z8TlOpmIiE7Ex59L9+5PcfbZP9C7\n95vExvZDRIiLO4N27UbicETgcIQRE9OnVuA3x69TTz2VX/7ylwwePJjk5GTWrVvH0KFDEZHqV5Wm\ntHJfeeUVfvOb3xAfH8/vf//76v598LW6P/nkE/785z/Trl07BgwYwNq1awF49tln6devH2eddRbt\n2rXj8ccfR1VJSEjglVde4Y477qBLly7ExsbW6lKpW8eqY82ePZv4+Hjuuusurr/++uo8cXFxLFq0\niI8++oiOHTty8sknk5aWVl323HPPxeFwcMYZZxzUdVOfiy66iL59+5KcnFz9A1NfnRq7LvVd27rX\nPRQHMTT6hK+IOIGNwDB8i7mvAG5Q1YwG8l8OTFLVYU0tW/WE7/TpvlkvO3WCjAyIj2+O0zv6KitL\nyMuby759S/w3R0vYt29J9Zzi9YmN7U+HDjcRE/MzoqP7EBnZmcrKIrzecsLDk0Lyi9La2dw+x45h\nw4Yxfvx4br/99pauSrNoqSd8BwFZqprt/7A5wGig3uAPjAfePdSyXq9vdA/AM8+EXuD3PcTkJjy8\nnX+7iE2b7qWgYCFudwFQeVAZkQji4s4iLm4gUVEpiEQQGdmFmJhTcbl6HRTgw8JC7KSNOQatWLGC\n1atXM3/+/JauSsgLFPw7Azk1trcBZ9eXUUSigRHAvYda9pNPfJOudesGdf6aanF5eR+yceMdeDz7\nSEq6ktjY/uTlfcCBA2ur88TFnU379tfh8exDxEFCwvnEx5+D0+lqwZobU7++ffuydevWg9L/9re/\nccMNN7RAjZrHLbfcwvz583nxxReJiYmpTp84cSLvvHPwvE0333wzr7zySjCrGFICBf9D+Tv3CmCp\nqlaNMWxy2V/8YgoAvXvDV1+ltuhan5WVBygry6GkJIPc3DcoKPhp7fr8/Hn+kTbgcp1M375ziY4+\npcGx68aEovXr1wfOdAyaOXNmvemvvfYar732WpBr0/zS0tJq3d84UoH6/M8BpqjqSP/244BXVZ+p\nJ+884D1VnXMoZUVEQYmP902vfCiTqzUHr7eC4uI1bN/+Cnv2fHTQaBuHw0WPHk9zwgnjyMv7B253\nHk5nLB073tXguHZz/LE+f9NSWmRKZxEJw3fT9mJgB5BO/TdtE4AtQBdVLT3EsgrKiBG+mTaPNlUl\nP/9Ddu+ezYED6ykt3YSqp3q/wxFFZGQXIiO7kJg4ko4d76ju6zetlwV/01Ja5IavqnpE5D5gIeAE\npqlqhojc7d//uj/rGGBhVeBvrGxDn3XaaYd7Ck134EAGGzfewf79/6uRKkRF9SApaTSdOt2Ly9XT\nRtsYY457IbGYC/iGet56a/Mfv7x8BwUFn1JSsont21/A6y0jPPwEunV7nDZtUomOPgWnM7r5P9gc\nV6zlb1rKcb+YS3O3/L1eNzk5f+THH6fi9f70eHly8q2cdNILNrTSGNOqhcR8mCLQp0/zHc/rrWDD\nhuv44Ycn8HpLSEwcSbduj9Ov36f07j3dAr9pFWbMmMF5551Xve1wONiy5fBWQjMHS01NZdq0aQC8\n8847jU73UDNvqAiJln+PHlBjWO5hy8ubx86dMygtzaKkZANhYW049dT3SUy85MgPbowxNdSctuHG\nG2/kxhtvbFLeUBESwb+hLh9VZenSpezatYuoqCguvvhiXC7fg1MlJSUsWrSIK6+8Eq+3nOzsyeTk\n/LG6bFhYO04/fRFxcQFWYjfGhIxQXMbxeBUS3T4NBf8XXniB888/n2uuuYYrrriCDh06MHz4cK64\n4grat2/PmDFj+Ne/Hubrr1P8gd9J9+5TOf30Lzj77CwL/KZVePrppznppJOIj4+nb9++fPjhh4d9\nrI8//pgBAwaQkJBAt27d+O1vf1tr/9KlSxkyZAht27alW7du1Q9WlZaW8stf/pKUlBTatGnDeeed\nR1lZGWlpaQdNsJaSksIXX3wBwJQpUxg3bhw333wzCQkJzJw5kxUrVjB48GDatm1Lp06duP/++3G7\n3dXl169fzyWXXEK7du1ITk7m6aefZufOncTExFBQUFCdb/Xq1bRv357KyoOnXwEoLy+nTZs2tR56\ny8vLIzo6mvz8fPbu3cvll19O+/btSUxM5IorrmD79u31HqtuF9uiRYvo3bs3bdq04f777/cvlxpa\nAwZCJvirKkuWLGHixImMGzeOzMxMfve73wEwatQozjzzTIqKili0aBH/+te/OHDgAP36dWDz5udx\nu3cRG9uf/v3/w4knPk7bthcSHt6mhc/KtBYizfc6HFXLOO7fv5/Jkydz0003sXPnweslN0XVMo6F\nhYV8/PHHvPrqq9Xz5FQt4/jggw+Sn5/PN998Q//+/QHfMo5r1qxh2bJlFBQU8Kc//emQlnG85ppr\nKCwsZPz48TidTl544QX27NnDsmXL+M9//lM9DUPVMo6XXnopubm5ZGVlcfHFF9daxrFKoGUcIyMj\nGTt2LO+++2512vvvv09qaipJSUmoKj//+c/ZunUrW7duxeVycd999wW8hvn5+YwdO5apU6eyZ88e\nevbsyVdffRVy3T7Vv0gt9QL0vfdW6dChQxXflBAKaEREhAJ6wQUXqNfrVVXVrKws/eSTT/Ttt9/W\nzz+/RRcvRtPSInTXrjnVeYw5Gnz/VBra13yv5tC/f3+dP3++zpgxQ4cOHVqdLiK6efPmQzrWgw8+\nqA899JCqqk6dOlWvvvrqg/JUVlaqy+XStWvXHrRv8eLF2qVLl1ppKSkp+p///EdVVSdPnqwXXHBB\no3V47rnn9KqrrlJV1dmzZ+vAgQPrzTdnzhw999xzVVXV4/FocnKyrlixotFjf/7559qzZ8/q7SFD\nhuisWbPqzbtmzRpt27Zt9XZqaqpOmzZNVVWnT59efa1nzpypgwcPrlW2S5cu1XkPVUPfPX/6Ycfe\nkGj5T5gwhKVLl5KUlMRjjz3G6aefTkVFBQDPPPNM9S9mz549GTVqFOedtwOncyYiEZx22oe0b39d\n6P2qmlajOcP/4XjrrbcYMGAAbdu2pW3btqxbt+6wV5lavnw5F154Ie3bt6dNmza8/vrr1St5BXMZ\nx8svv5yOHTuSkJDAr3/964B1AN8yjhs2bCA7O5tFixY1aRnH1NRUSkpKSE9PJzs7m2+//Zar/KtI\nlZSUcPfdd5OSkkJCQgIXXHABhYWFAbtvduzYcdA5NWVtgWALieBfXl7OmDFjyMrK4qmnnuKLL77g\npptu4g9/+ANnn117ItCdO2eyZcsjAPTp8xbt2o1qiSobExKqlnF8+eWXKSgoYO/evZx22mlHtIzj\nmDFj2LZtG/v27WPixInVx+rWrRubN28+qEzNZRzrOtxlHE899VSysrIoLCzkySefxOv1VtehoeGq\nNZdxfPvtt5u0jKPT6eTaa6/l3Xff5d133+WKK66onhH0z3/+M99//z3p6ekUFhayZMmSJvXdd+rU\niZycnyY0VtVa26EiJIJ/ZGQkf/3rX0lISAAgMTGRWbNm8etf/7pWvp07Z5GZeRsAPXs+S/v2ITb/\nszFBVncZx+nTpzf7Mo5Vxo8fz+eff87cuXPxeDzs2bOHb7/9FofDUb2MY25uLpWVlSxbtoyKiopa\nyzi63W7+8Ic/HNYyjlUuu+wycnNzeeGFFygvL6eoqKjWusITJkxg+vTpLFiwgJtvvrlJ5zx+/Hjm\nzJnD7NmzGT9+fK16uFwuEhISKCgoOOjmd0MuvfRS1q9fz7x58/B4PLz44ouHfQ/maAqJ4H///fcf\n9GdSXfv3L/cHfqV79yfp2vWXwamcMSHMlnE8smUcAQYNGkRsbCy5ubmMGvVTT8KkSZMoLS0lKSmJ\nIUOGMGrUqAavYc3zSEpKYu7cuTz22GMkJSWRlZXF0KFDm1SXYAqJuX3y8/Np167hmTMrKw+wcuUA\nSks30bnzg/Tq9XwQa2iMze1zLLFlHJt43Jb+Qlet4duYTZvuZ/v2l4iJOY2BA1fgdEYFqXbG+Fjw\nPzasWLGCESNGkJOTU2s1r2PZ0Qr+IdHt05j9+1eyffvLiITRu/csC/zGHKG+ffsSFxd30KvmePdj\n0S233MIll1zC888/f9AyjvWd77333tvI0Y5/Id3yV61k9erBFBWtoGvXX9Gz55+CXDtjfKzlb1pK\nq2z579w5i6KiFUREdObEE3/T0tUxxpjjRsDgLyIjRSRTRDaJyKMN5EkVkTUisk5E0mqkZ4vIWv++\n9PrKNkS1kq1bnwKgR4+phIUFeXFfY4w5jjU6q6eIOIGXgGHAdmCFiCzQGssxikgb4GVghKpuE5Gk\nGodQIFVVCzhEeXn/pLT0e6KiUmjffnzgAsYYY5osUMt/EJClqtmq6gbmAKPr5BkPfKCq2wBUte5z\n5YfcJ6WqbN06FYCuXR/F4QiJmaeNMea4ESj4dwZqPpe8zZ9WUy8gUUQWi8hKEan5WJ0Cn/vT72xq\npQ4cWE9x8TeEh59AcvKtTS1mjDGmiQI1qZsyvCEcGAhcDEQDy0Tka1XdBAxV1R0icgKwSEQyVfXL\nugeYMmVK9fvU1FR69doAQNu2l9jQTmMO04wZM5g2bRpffun7J+dwOMjKympwYjRTW0pKCtOmTePi\niy9u6aoAkJaWVutp5iMVKPhvB2o+I90VX+u/phwgX1VLgVIR+S9wOrBJVXcAqGqeiMzD143UaPAH\nWL/+NQDatDm/qedhjDHNqrGlF7Ozs+nRo0et5wkee+yx6vnIFi9ezO9+9zvWrFlD27Zt+eGHH2qV\nv/DCC1m/fj1lZWV07tyZhx9+mDvvbLxzJDU1ldTU1Ortps411JBAwX8l0EtEUoAdwHXADXXyzAde\n8t8cjgTOBv4iItGAU1WLRCQGGA4ErK2qUlj4XwASEiz4G9OaHGvLOO7fv7/eH4jY2FjuuOMOSkpK\nmDp16kH7X3zxRXr37k14eDjp6emcf/75nH/++ZxyyinBqDYQoM9fVT3AfcBCYAPwnqpmiMjdInK3\nP08m8G9gLbAceENVNwDJwJci8o0//V+q+lmgCpWWZlFRkUt4+AlER/c+knMzplWwZRwPbxnHKm+8\n8Qannnpq9fX75ptvqvelp6fTt29fEhMTuf322w+akbRqqum6zjrrLG688Ua6d+9e7/5+/foRHh5e\nvR0bG0t8fHyj9Wx2R7ISTHO8qLNKzY4db+rixeh3342td/UaY1pC3e9p3X3N9Tocc+fO1dzcXFVV\nfe+99zQmJkZzc3NrrS6l2rSVvNLS0nTdunWqqrp27Vrt0KGDfvjhh6qqmp2drXFxcTpnzhz1eDy6\nZ88e/eabb1RV9d5779ULL7xQd+zYoZWVlbps2TItLy9v0kpe4eHhOn/+fFVVLS0t1VWrVuny5cu1\nsrJSs7MFYPdmAAAUUUlEQVSztU+fPvr888+rqur+/fs1OTlZ//KXv2h5ebkWFRVpenq6qqpeeuml\n+uqrr1Z/zqRJk/SBBx5o9Hzff/997dy5s65cuVJVfasF/vjjj6qqeuKJJ2q/fv1027ZtWlBQoOee\ne64+8cQTqqr6ww8/qIho586dtUuXLnrbbbdpfn7+QcdftGiRpqSk1PvZl112mUZFRanL5ao+//o0\n9L3gCFfyCrngv2HDBF28GM3JeaHBi2FMsIVy8K/LlnFs+jKOw4cP1xdffLHefSkpKfr6669Xb3/y\nySfVSz4WFxfrqlWrtLKyUnft2qXjxo3TESNGHHSMxoJ/VT3nzp2rbdu2rf7RqetoBf+Qm96hqGgV\nAPHxg1u4JsY0zZH8A6z7Ohy2jKPP4SzjuG3btkbrXbPLqlu3buzYsQPwrVA2cOBAHA4H7du356WX\nXuKzzz7jwIEDTTrnKk6nk3HjxnH22Wczb968Qyp7pEIq+KsqZWW+Jdpcrl4tXBtjQp8t4/iTw1nG\nsWvXrvXWu8rWrVtrve/UqVOjx2voHkAgbrc76FNQh1Twr6jYiddbSlhYIuHhbVq6OsaEPFvG8ciW\ncbzjjjt49tlnWb16NapKVlZWdcBXVV5++WW2b99OQUEBTz75JNdffz3guxG8ceNGvF4ve/bs4YEH\nHuDCCy8kLi6uumxZWRlutxtVpby8nIqKCgA2btzIp59+SmlpKW63m7fffpuVK1cyfPjwJv5fah4h\nFfxLS32tCpfr8P58NKa1sWUcj2wZx3HjxvHrX/+a8ePHEx8fz9VXX83evXur63bjjTcyfPhwevbs\nSa9evXjiiScA2LJlC6NGjSI+Pp5+/frhcrlqrYewZMkSoqOjueyyy8jJycHlcjFy5EjA98Pw29/+\nlg4dOpCcnMybb77Jxx9/TLdu3QLWtzmF1Hz+O3e+RWbmLbRvfz2nnnpsLyxhji82n/+xw5ZxbJqQ\nmjGtquUfFWWPnxtjDt2KFStYvXo18+fPb+mqhDzr9jGmlbFlHG0ZRwixbp/Vq4ewf/8y+vdPo02b\nC1q0XsbUZN0+pqW0imUcrdvHGGOCI2SCv8dThNu9G5FIIiPrLhlgjDGmOYVM8C8r80156nJ1RyRk\nqmWMMcelkBntY10+JtQ1ZZy8MceKkAn+ZWU/AhAVVf8UqMa0JLvZa443IdO/Ulm5H4Dw8LYtXBNj\njDn+hVDw982G53AEd3IjY4xpjQIGfxEZKSKZIrJJRB5tIE+qiKwRkXUiknYoZatUBX+n04K/McYc\nbY32+fvX5X0JGIZvMfcVIrJAVTNq5GkDvAyMUNVtIpLU1LI1eb0W/I0xJlgCtfwHAVmqmq2qbmAO\nMLpOnvHAB6q6DUBV8w+hbDVr+RtjTPAECv6dgZwa29v8aTX1AhJFZLGIrBSRmw+hbDXr8zfGmOAJ\nNNSzKePbwoGBwMVANLBMRL5uYlkApkyZQm7uesrLYezYTVxxRVNLGmNM65CWllZr7YIj1ejEbiJy\nDjBFVUf6tx8HvKr6TI08jwIuVZ3i334T+De+ln6jZf3pqqqsWnUWRUUrGThwOfHxg5rtBI0x5nh0\ntCd2Wwn0EpEUEYkArgMW1MkzHxgqIk4RiQbOBjY0sWy1n7p9og/rRIwxxjRdo90+quoRkfuAhYAT\nmKaqGSJyt3//66qaKSL/BtYCXuANVd0AUF/Zhj7LbvgaY0zwhMx8/kuXJuHx7GHIkF1ERLRv0ToZ\nY0yoO27m87dx/sYYEzwhEfxVK/F6ywBwOFwtXBtjjDn+hUTwr6wsAXw3e20uf2OMOfpCItJ6vb7g\nb10+xhgTHCER/G2kjzHGBFdIBX+b2sEYY4IjpIK/tfyNMSY4QiL42zBPY4wJrpAI/tbyN8aY4Aqp\n4G99/sYYExwhFfyt5W+MMcEREsH/pz5/m9HTGGOCISSCv3X7GGNMcIVU8LduH2OMCQ4L/sYY0woF\nDP4iMlJEMkVkk3/Jxrr7U0WkUETW+F//r8a+bBFZ609Pb+gzbJy/McYEV6MreYmIE3gJGAZsB1aI\nyIJ6VuRaoqpX1nMIBVJVtaCxz7E+f2OMCa5ALf9BQJaqZquqG5gDjK4nX2OryQRcaaZqSmdr+Rtj\nTHAECv6dgZwa29v8aTUpMEREvhWRT0Tk1Dr7PheRlSJyZ0MfYt0+xhgTXI12++AL3oGsBrqqaomI\njAI+BE727ztXVXNF5ARgkYhkquqXdQ9gN3yNMSa4AgX/7UDXGttd8bX+q6lqUY33n4rIKyKSqKoF\nqprrT88TkXn4upEOCv6vvroZtxsWLpzFiBFuUlNTD/N0jDHm+JSWlkZaWlqzHU9UG27ci0gYsBG4\nGNgBpAM31LzhKyIdgN2qqiIyCHhfVVNEJBpwqmqRiMQAnwG/VdXP6nyGLlvWk7KyzQwa9D3R0b2a\n7eSMMeZ4JSKoasB7qg1ptOWvqh4RuQ9YCDiBaaqaISJ3+/e/DowD7hERD1ACXO8vngz8U0SqPued\nuoG/ivX5G2NMcDXa8g9KBUT0v/+No7KyiKFD9xEWltCi9THGmGPBkbb8Q+oJX4fDJnYzxphgCIng\nD15EwnE4wlu6IsYY0yqESPC3/n5jjAmmkAn+NrWDMcYETwgF/8iWroIxxrQaIRP8Ray/3xhjgiWE\ngn+gh42NMcY0l5AJ/jbSxxhjgidkgr+1/I0xJnhCKPhby98YY4LFgr8xxrRCIRT8rdvHGGOCJYSC\nv7X8jTEmWEIo+FvL3xhjgiVkgr8N9TTGmOAJmeBvLX9jjAmegMFfREaKSKaIbBKRR+vZnyoihSKy\nxv96oqllax/HWv7GGBMsjTa3RcQJvAQMw7eY+woRWVBzDV+/Jap65WGW9ee3lr8xxgRLoJb/ICBL\nVbNV1Q3MAUbXk6++pcSaWtZ3AGv5G2NM0AQK/p2BnBrb2/xpNSkwRES+FZFPROTUQyhbzYK/McYE\nT6C+lqas7r4a6KqqJSIyCvgQOPlQK2LdPsYYEzyBIu52oGuN7a74WvDVVLWoxvtPReQVEUn052u0\nbJUZMyA+fiWJiVNITU0lNTW16WdgjDGtQFpaGmlpac12PFFtuHEvvub4RuBiYAeQDtxQ86atiHQA\ndquqisgg4H1VTWlKWX95XbwYunb9FT17/qnZTswYY45nIoKq1ne/tUkabfmrqkdE7gMWAk5gmqpm\niMjd/v2vA+OAe0TEA5QA1zdWtuETsT5/Y4wJlkZb/kGpgL/lf+KJ/4/u3X/XonUxxphjxZG2/EPo\nCV9r+RtjTLCEUPC30T7GGBMsIRT8reVvjDHBEjLB32b1NMaY4AmZ4G/dPsYYEzwhFPyt5W+MMcES\nQsHfWv7GGBMsIRT8reVvjDHBEkLB31r+xhgTLCEU/K3lb4wxwRJCwd9a/sYYEywhE/xtnL8xxgRP\nyAR/6/YxxpjgCaHgb90+xhgTLCEU/K3lb4wxwRJCwd9a/sYYEywBg7+IjBSRTBHZJCKPNpLvLBHx\niMjYGmnZIrJWRNaISHrjn2Mtf2OMCZZGm9si4gReAobhW8x9hYgsqGcdXifwDPDvOodQIFVVCwJV\nxFr+xhgTPIFa/oOALFXNVlU3MAcYXU+++4F/AHn17GvSMmM21NMYY4InUPDvDOTU2N7mT6smIp3x\n/SC86k+quSiwAp+LyEoRubOxD7KWvzHGBE+giNuU1d2fBx5TVRURoXZL/1xVzRWRE4BFIpKpql/W\nPcCMGbBo0SuEh7clNTWV1NTUptbfGGNahbS0NNLS0prteKLacHwXkXOAKao60r/9OOBV1Wdq5NnC\nTwE/CSgB7lTVBXWONRkoVtU/10nXxYvhnHO2EhXVtTnOyRhjjnsigqo2qVu9PoG6fVYCvUQkRUQi\ngOuAWkFdVXuoandV7Y6v3/8eVV0gItEiEuevZAwwHPiu4ROxbh9jjAmWRiOuqnpE5D5gIeAEpqlq\nhojc7d//eiPFk4F/+nqCCAPeUdXPGspsQz2NMSZ4Gu32CUoF/N0+5567l/DwNi1aF2OMOVYc7W6f\noLGhnsYYEzwhE/ytz98YY4InhIK/tfyNMSZYQiT4CyIhUhVjjGkFQiLiWqvfGGOCy4K/Mca0QiES\n/O1mrzHGBFNIBH8b5mmMMcEVEsHfWv7GGBNcIRL8reVvjDHBFCLB31r+xhgTTCES/K3lb4wxwRQi\nwd9a/sYYE0whEvyt5W+MMcEUEsHfhnoaY0xwhUTwt24fY4wJroDBX0RGikimiGwSkUcbyXeWiHhE\nZOyhl7WWvzHGBFOjwV9EnMBLwEjgVOAGEenTQL5ngH8fallfXmv5G2NMMAVq+Q8CslQ1W1XdwBxg\ndD357se3eHveYZS1lr8xxgRZoODfGcipsb3Nn1ZNRDrjC+qv+pOqFgUOWPanY1jL3xhjgilQ1G3K\n6u7PA4+pqoqIAFULCjd5ZfhXX91Mhw5TAEhNTSU1NbWpRY0xplVIS0sjLS2t2Y4nqg3HaBE5B5ii\nqiP9248DXlV9pkaeLfwU8JOAEuBOYHegsv50/e67sZx22j+a7aSMMeZ4JyKoqgTOWb9ALf+VQC8R\nSQF2ANcBN9TMoKo9alRmOvCRqi4QX19Oo2Wr2Dh/Y4wJrkaDv6p6ROQ+YCHgBKapaoaI3O3f//qh\nlq0vr93wNcaY4Gq02ycoFRDRjIzb6N377y1aD2OMOZYcabdPiDzhay1/Y4wJphAJ/jbU0xhjgilE\ngr+1/I0xJphCJPhby98YY4IpJIK/DfU0xpjgCongby1/Y4wJrhAJ/tbyN8aYYLLgb4wxrVCIBH/r\n9jHGmGAKkeBvLX9jjAmmEAn+1vI3xphgCongb0M9jTEmuEIi+FvL3xhjgitEgr+1/I0xJphCJPhb\ny98YY4IpRIK/tfyNMSaYAgZ/ERkpIpkisklEHq1n/2gR+VZE1ojIKhG5qMa+bBFZ69+X3vBnWPA3\nxphgarS/RUScwEvAMGA7sEJEFtRZjvFzVZ3vz98PmAec5N+nQKqqFjT+OdbtY4wxwRSo5T8IyFLV\nbFV1A3OA0TUzqOqBGpuxQH6dYwRcZsyGehpjTHAFCv6dgZwa29v8abWIyBgRyQA+BR6osUuBz0Vk\npYjc2dCHWMvfGGOCK1DUbdLq7qr6IfChiJwHzAJO8e86V1VzReQEYJGIZKrql3XLP/PM20RF+ZJT\nU1NJTU1tav2NMaZVSEtLIy0trdmOJ6oNx3cROQeYoqoj/duPA15VfaaRMpuBQaq6p076ZKBYVf9c\nJ1337fuKhIQhR3AaxhjTuogIqhqwW70hgbp9VgK9RCRFRCKA64AFdSrQU0TE/34ggKruEZFoEYnz\np8cAw4Hv6j8J6/M3xphgarTbR1U9InIfsBBwAtNUNUNE7vbvfx0YC0wQETdQDFzvL54M/NP/uxAG\nvKOqn9X3OQ5HVHOcizHGmCZqtNsnKBUQ0ZaugzHGHGuOdrePMcaY45AFf2OMaYUs+BtjTCtkwd8Y\nY1ohC/7GGNMKWfA3xphWyIK/Mca0Qhb8jTGmFbLgb4wxrZAFf2OMaYUs+BtjTCtkwd8YY1ohC/7G\nGNMKWfA3xphWyIK/Mca0QgGDv4iMFJFMEdkkIo/Ws3+0iHwrImtEZJWIXNTUssYYY1pGo8FfRJzA\nS8BI4FTgBhHpUyfb56p6uqoOAG4F/nYIZU0za84Fno1dz+Zk1zK0BGr5DwKyVDVbVd3AHGB0zQyq\neqDGZiyQ39SypvnZP7DmZdez+di1DC2Bgn9nIKfG9jZ/Wi0iMkZEMoBPgQcOpawxxpjgCxT8m7S4\nrqp+qKp9gCuAWeJftd0YY0xoanQBdxE5B5iiqiP9248DXlV9ppEym/F1+fRqSlkRsdXbjTHmMBzJ\nAu5hAfavBHqJSAqwA7gOuKFmBhHpCWxRVRWRgf4K7RGRwkBlj7TyxhhjDk+jwV9VPSJyH7AQcALT\nVDVDRO72738dGAtMEBE3UAxc31jZo3cqxhhjmqrRbh9jjDHHpxZ9wtceAjsyIpItImv9D9il+9MS\nRWSRiHwvIp+JSJuWrmeoEpG/i8guEfmuRlqD109EHvd/VzNFZHjL1Dp0NXA9p4jINv93dI2IjKqx\nz65nA0Skq4gsFpH1IrJORB7wpzff91NVW+SFrysoC0gBwoFvgD4tVZ9j8QX8ACTWSfsj8Ij//aPA\n0y1dz1B9AecBA4DvAl0/fA8qfuP/rqb4v7uOlj6HUHo1cD0nAw/Xk9euZ+PXMhno738fC2wE+jTn\n97MlW/72EFjzqHvD/Epgpv/9TGBMcKtz7FDVL4G9dZIbun6jgXdV1a2q2fj+cQ0KRj2PFQ1cTzj4\nOwp2PRulqjtV9Rv/+2IgA99zUs32/WzJ4G8PgR05BT4XkZUicqc/rYOq7vK/3wV0aJmqHbMaun6d\n8H1Hq9j3tenu98//Na1GN4Vdzybyj5gcACynGb+fLRn87U7zkTtXfXMqjQJ+ISLn1dypvr8H7Tof\npiZcP7u2gb0KdAf6A7nAnxvJa9ezDhGJBT4AHlTVopr7jvT72ZLBfzvQtcZ2V2r/cpkAVDXX/988\nYB6+P/N2iUgygIh0BHa3XA2PSQ1dv7rf1y7+NNMIVd2tfsCb/NQVYdczABEJxxf4Z6nqh/7kZvt+\ntmTwr36ATEQi8D0EtqAF63NMEZFoEYnzv48BhgPf4buGt/iz3QJ8WP8RTAMaun4LgOtFJEJEuuN7\ngj29Bep3TPEHqCpX4fuOgl3PRvmnyJkGbFDV52vsarbvZ6AnfI8atYfAjlQHYJ5/GqUw4B1V/UxE\nVgLvi8jPgWzg2parYmgTkXeBC4AkEckBfgM8TT3XT1U3iMj7wAbAA9zrb80av3qu52QgVUT64+uC\n+AGoekDUrmfjzgVuAtaKyBp/2uM04/fTHvIyxphWyJZxNMaYVsiCvzHGtEIW/I0xphWy4G+MMa2Q\nBX9jjGmFLPgbY0wrZMHfGGNaIQv+xhjTCv1/FHEJS4fw3QkAAAAASUVORK5CYII=\n", 1975 | "text/plain": [ 1976 | "" 1977 | ] 1978 | }, 1979 | "metadata": {}, 1980 | "output_type": "display_data" 1981 | } 1982 | ], 1983 | "source": [ 1984 | "plt.plot(all_accuracy_train_1, 'y', all_accuracy_valid_1, 'b', all_accuracy_cb513_1, 'k', linewidth=2.0)\n", 1985 | "plt.legend((\"all_accuracy_train\", \"all_accuracy_valid\", \"all_accuracy_cb513\"))" 1986 | ] 1987 | }, 1988 | { 1989 | "cell_type": "code", 1990 | "execution_count": null, 1991 | "metadata": { 1992 | "collapsed": true 1993 | }, 1994 | "outputs": [], 1995 | "source": [] 1996 | }, 1997 | { 1998 | "cell_type": "code", 1999 | "execution_count": null, 2000 | "metadata": { 2001 | "collapsed": false 2002 | }, 2003 | "outputs": [], 2004 | "source": [] 2005 | }, 2006 | { 2007 | "cell_type": "code", 2008 | "execution_count": null, 2009 | "metadata": { 2010 | "collapsed": false 2011 | }, 2012 | "outputs": [], 2013 | "source": [] 2014 | }, 2015 | { 2016 | "cell_type": "code", 2017 | "execution_count": null, 2018 | "metadata": { 2019 | "collapsed": false 2020 | }, 2021 | "outputs": [], 2022 | "source": [] 2023 | }, 2024 | { 2025 | "cell_type": "code", 2026 | "execution_count": null, 2027 | "metadata": { 2028 | "collapsed": false 2029 | }, 2030 | "outputs": [], 2031 | "source": [] 2032 | }, 2033 | { 2034 | "cell_type": "code", 2035 | "execution_count": null, 2036 | "metadata": { 2037 | "collapsed": true 2038 | }, 2039 | "outputs": [], 2040 | "source": [] 2041 | } 2042 | ], 2043 | "metadata": { 2044 | "kernelspec": { 2045 | "display_name": "Python 2", 2046 | "language": "python", 2047 | "name": "python2" 2048 | }, 2049 | "language_info": { 2050 | "codemirror_mode": { 2051 | "name": "ipython", 2052 | "version": 2 2053 | }, 2054 | "file_extension": ".py", 2055 | "mimetype": "text/x-python", 2056 | "name": "python", 2057 | "nbconvert_exporter": "python", 2058 | "pygments_lexer": "ipython2", 2059 | "version": "2.7.13" 2060 | } 2061 | }, 2062 | "nbformat": 4, 2063 | "nbformat_minor": 0 2064 | } 2065 | --------------------------------------------------------------------------------