├── __init__.py ├── DCNN ├── __init__.py ├── folding.py ├── embeddings.py ├── pooling.py └── convolutions.py ├── README.md ├── dataUtils.py ├── utils.py ├── networks.py ├── data └── binarySentiment │ ├── valid_lbl.txt │ └── test_lbl.txt └── trainDCNN.py /__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'frederic' 2 | -------------------------------------------------------------------------------- /DCNN/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Frederic Godin (frederic.godin@ugent.be / www.fredericgodin.com)' 2 | from .convolutions import * 3 | from .embeddings import * 4 | from .folding import * 5 | from .pooling import * 6 | 7 | -------------------------------------------------------------------------------- /DCNN/folding.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Frederic Godin (frederic.godin@ugent.be / www.fredericgodin.com)' 2 | 3 | 4 | import theano.tensor as T 5 | 6 | from lasagne.layers.base import Layer 7 | 8 | class FoldingLayer(Layer): 9 | 10 | def __init__(self,incoming,**kwargs): 11 | super(FoldingLayer, self).__init__(incoming, **kwargs) 12 | 13 | def get_output_shape_for(self, input_shape): 14 | return (input_shape[0], input_shape[1],input_shape[2]/2, input_shape[3]) 15 | 16 | def get_output_for(self, input, **kwargs): 17 | # The paper defines that every consecutive 2 rows are merged into 1 row. 18 | # For efficiency reasons, we use a reshape function which means that we merge every x and x + n/2 row. 19 | # For a NN, this is the same implementation 20 | 21 | # make 2 long rows 22 | long_shape = (self.input_shape[0],self.input_shape[1],2,-1) 23 | long_rows = T.reshape(input,long_shape) 24 | # sum the two rows 25 | summed = T.sum(long_rows,axis=2,keepdims=True) 26 | # reshape them back 27 | folded_output = T.reshape(summed,(self.input_shape[0], self.input_shape[1],self.input_shape[2]/2, -1)) 28 | 29 | return folded_output -------------------------------------------------------------------------------- /DCNN/embeddings.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Frederic Godin (frederic.godin@ugent.be / www.fredericgodin.com)' 2 | 3 | 4 | 5 | import theano.tensor as T 6 | from lasagne import init 7 | from lasagne.layers import EmbeddingLayer 8 | 9 | class SentenceEmbeddingLayer(EmbeddingLayer): 10 | 11 | def __init__(self, incoming, vocab_size, embedding_size, 12 | W=init.Normal(), padding='no', **kwargs): 13 | super(SentenceEmbeddingLayer, self).__init__(incoming, input_size=vocab_size, output_size=embedding_size, 14 | W=W, **kwargs) 15 | 16 | if padding=='first': 17 | self.sentence_W=T.concatenate([T.zeros((1,embedding_size)),self.W]) 18 | elif padding=='last': 19 | self.sentence_W=T.concatenate([self.W,T.zeros((1,embedding_size))]) 20 | else: 21 | self.sentence_W=self.W 22 | 23 | def get_output_shape_for(self, input_shape): 24 | return input_shape[0:-1] + (self.output_size, ) + (input_shape[-1],) 25 | 26 | def get_output_for(self, input, **kwargs): 27 | t_size = len(self.input_shape)+1 28 | t_shape = tuple(range(0,t_size-2))+(t_size-1,t_size-2) 29 | return T.transpose(self.sentence_W[input],t_shape) 30 | 31 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Dynamic Convolutional Neural Networks 2 | 3 | ### Introduction 4 | This is a Theano implementation of the paper "A Convolutional Neural Network for Modelling Sentences" (click here). 5 | The example included is that of binary movie review sentiment classification (Stanford Sentiment Treebank). 6 | I was able to achieve a test set accuracy of 85-86% which is just below the reported accuracy of 86.8%. 7 | 8 | ### Using it 9 | To run it, simply run trainDCNN.py. 10 | If you are only interested in the layers such as Dynamic K-max pooling, or the 1D convolution, only use the DCNN package. 11 | 12 | 13 | ### Paper/implementation issues 14 | There is some discrepancy between the paper and Matlab code provided. Therefore, it was difficult to rely on the Matlab code for details not provided in the paper. For example: 15 | (1) different number of layers and filters. 16 | (2) the L2 regularization is not specified in the paper but is very detailed in the code (different values for different matrices). It would be hard to guess those values. 17 | 18 | ### Implementation details 19 | The layers of the network are wrapped as Lasagne layers and can be easily reused. 20 | In the paper, some layer types were introduced which are not trivial for Theano. 21 | (1) 1D convolution layers that only apply row wise convolutions and not on all rows at once. 22 | (2) Dynamic K-max pooling. Currently an argsort operation is used which is executed on the CPU. 23 | However, this operation is too heavy for selecting the K max values. 24 | 25 | Because of these implementation issues, a very heavy GpuContiguous operation is automatically introduced somewhere. 26 | If you have a solution or comments, I'm happy to support pull requests ;) 27 | -------------------------------------------------------------------------------- /dataUtils.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Frederic Godin (frederic.godin@ugent.be / www.fredericgodin.com)' 2 | 3 | import numpy 4 | 5 | 6 | def read_and_sort_matlab_data(x_file,y_file,padding_value=15448): 7 | 8 | 9 | sorted_dict = {} 10 | x_data = [] 11 | i=0 12 | file = open(x_file,"r") 13 | for line in file: 14 | words = line.split(",") 15 | result = [] 16 | length=None 17 | for word in words: 18 | word_i = int(word) 19 | if word_i == padding_value and length==None: 20 | length = len(result) 21 | result.append(word_i) 22 | x_data.append(result) 23 | 24 | if length==None: 25 | length=len(result) 26 | 27 | if length in sorted_dict: 28 | sorted_dict[length].append(i) 29 | else: 30 | sorted_dict[length]=[i] 31 | i+=1 32 | 33 | file.close() 34 | 35 | file = open(y_file,"r") 36 | y_data = [] 37 | for line in file: 38 | words = line.split(",") 39 | y_data.append(int(words[0])-1) 40 | file.close() 41 | 42 | new_train_list = [] 43 | new_label_list = [] 44 | lengths = [] 45 | for length, indexes in sorted_dict.items(): 46 | for index in indexes: 47 | new_train_list.append(x_data[index]) 48 | new_label_list.append(y_data[index]) 49 | lengths.append(length) 50 | 51 | return numpy.asarray(new_train_list,dtype=numpy.int32),numpy.asarray(new_label_list,dtype=numpy.int32),lengths 52 | 53 | def pad_to_batch_size(array,batch_size): 54 | rows_extra = batch_size - (array.shape[0] % batch_size) 55 | if len(array.shape)==1: 56 | padding = numpy.zeros((rows_extra,),dtype=numpy.int32) 57 | return numpy.concatenate((array,padding)) 58 | else: 59 | padding = numpy.zeros((rows_extra,array.shape[1]),dtype=numpy.int32) 60 | return numpy.vstack((array,padding)) 61 | 62 | def extend_lenghts(length_list,batch_size): 63 | elements_extra = batch_size - (len(length_list) % batch_size) 64 | length_list.extend([length_list[-1]]*elements_extra) 65 | 66 | -------------------------------------------------------------------------------- /DCNN/pooling.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Frederic Godin (frederic.godin@ugent.be / www.fredericgodin.com)' 2 | 3 | import theano.tensor as T 4 | from lasagne.layers.base import Layer 5 | 6 | 7 | class KMaxPoolLayer(Layer): 8 | 9 | def __init__(self,incoming,k,**kwargs): 10 | super(KMaxPoolLayer, self).__init__(incoming, **kwargs) 11 | self.k = k 12 | 13 | def get_output_shape_for(self, input_shape): 14 | return (input_shape[0], input_shape[1], input_shape[2], self.k) 15 | 16 | def get_output_for(self, input, **kwargs): 17 | return self.kmaxpooling(input,self.k) 18 | 19 | 20 | def kmaxpooling(self,input,k): 21 | 22 | sorted_values = T.argsort(input,axis=3) 23 | topmax_indexes = sorted_values[:,:,:,-k:] 24 | # sort indexes so that we keep the correct order within the sentence 25 | topmax_indexes_sorted = T.sort(topmax_indexes) 26 | 27 | #given that topmax only gives the index of the third dimension, we need to generate the other 3 dimensions 28 | dim0 = T.arange(0,self.input_shape[0]).repeat(self.input_shape[1]*self.input_shape[2]*k) 29 | dim1 = T.arange(0,self.input_shape[1]).repeat(k*self.input_shape[2]).reshape((1,-1)).repeat(self.input_shape[0],axis=0).flatten() 30 | dim2 = T.arange(0,self.input_shape[2]).repeat(k).reshape((1,-1)).repeat(self.input_shape[0]*self.input_shape[1],axis=0).flatten() 31 | dim3 = topmax_indexes_sorted.flatten() 32 | return input[dim0,dim1,dim2,dim3].reshape((self.input_shape[0], self.input_shape[1], self.input_shape[2], k)) 33 | 34 | 35 | 36 | class DynamicKMaxPoolLayer(KMaxPoolLayer): 37 | 38 | def __init__(self,incoming,ktop,nroflayers,layernr,**kwargs): 39 | super(DynamicKMaxPoolLayer, self).__init__(incoming,ktop, **kwargs) 40 | self.ktop = ktop 41 | self.layernr = layernr 42 | self.nroflayers = nroflayers 43 | 44 | def get_k(self,input_shape): 45 | return T.cast(T.max([self.ktop,T.ceil((self.nroflayers-self.layernr)/float(self.nroflayers)*input_shape[3])]),'int32') 46 | 47 | def get_output_shape_for(self, input_shape): 48 | return (input_shape[0], input_shape[1], input_shape[2], None) 49 | 50 | def get_output_for(self, input, **kwargs): 51 | 52 | k = self.get_k(input.shape) 53 | 54 | return self.kmaxpooling(input,k) 55 | 56 | 57 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Frederic Godin (frederic.godin@ugent.be / www.fredericgodin.com)' 2 | 3 | from collections import OrderedDict 4 | 5 | import numpy as np 6 | 7 | import theano 8 | import theano.tensor as T 9 | import lasagne.updates 10 | 11 | 12 | # Adapted from Lasagne 13 | 14 | def adagrad(loss_or_grads, params, learning_rate=1.0, epsilon=1e-6): 15 | """Adagrad updates 16 | Scale learning rates by dividing with the square root of accumulated 17 | squared gradients. See [1]_ for further description. 18 | Parameters 19 | ---------- 20 | loss_or_grads : symbolic expression or list of expressions 21 | A scalar loss expression, or a list of gradient expressions 22 | params : list of shared variables 23 | The variables to generate update expressions for 24 | learning_rate : float or symbolic scalar 25 | The learning rate controlling the size of update steps 26 | epsilon : float or symbolic scalar 27 | Small value added for numerical stability 28 | Returns 29 | ------- 30 | OrderedDict 31 | A dictionary mapping each parameter to its update expression 32 | Notes 33 | ----- 34 | Using step size eta Adagrad calculates the learning rate for feature i at 35 | time step t as: 36 | .. math:: \\eta_{t,i} = \\frac{\\eta} 37 | {\\sqrt{\\sum^t_{t^\\prime} g^2_{t^\\prime,i}+\\epsilon}} g_{t,i} 38 | as such the learning rate is monotonically decreasing. 39 | Epsilon is not included in the typical formula, see [2]_. 40 | References 41 | ---------- 42 | .. [1] Duchi, J., Hazan, E., & Singer, Y. (2011): 43 | Adaptive subgradient methods for online learning and stochastic 44 | optimization. JMLR, 12:2121-2159. 45 | .. [2] Chris Dyer: 46 | Notes on AdaGrad. http://www.ark.cs.cmu.edu/cdyer/adagrad.pdf 47 | """ 48 | 49 | grads = lasagne.updates.get_or_compute_grads(loss_or_grads, params) 50 | updates = OrderedDict() 51 | accus = [] 52 | 53 | for param, grad in zip(params, grads): 54 | value = param.get_value(borrow=True) 55 | accu = theano.shared(np.zeros(value.shape, dtype=value.dtype), 56 | broadcastable=param.broadcastable) 57 | accu_new = accu + grad ** 2 58 | updates[accu] = accu_new 59 | accus.append((accu,value.shape)) 60 | updates[param] = param - (learning_rate * grad / 61 | T.sqrt(accu_new + epsilon)) 62 | 63 | return updates,accus 64 | 65 | def reset_grads(accus): 66 | for accu in accus: 67 | accu[0].set_value(np.zeros(accu[1], dtype=accu[0].dtype)) -------------------------------------------------------------------------------- /networks.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Frederic Godin (frederic.godin@ugent.be / www.fredericgodin.com)' 2 | import lasagne 3 | import DCNN 4 | import theano.tensor as T 5 | 6 | 7 | def parseActivation(str_a): 8 | if str_a=="linear": 9 | return lasagne.nonlinearities.linear 10 | elif str_a=="tanh": 11 | return lasagne.nonlinearities.tanh 12 | elif str_a=="rectify": 13 | return lasagne.nonlinearities.rectify 14 | elif str_a=="sigmoid": 15 | return lasagne.nonlinearities.sigmoid 16 | else: 17 | raise Exception("Activation function \'"+str_a+"\' is not recognized") 18 | 19 | 20 | def buildDCNNPaper(batch_size,vocab_size,embeddings_size=48,filter_sizes=[10,7],nr_of_filters=[6,12],activations=["tanh","tanh"],ktop=5,dropout=0.5,output_classes=2,padding='last'): 21 | 22 | l_in = lasagne.layers.InputLayer( 23 | shape=(batch_size, None), 24 | ) 25 | 26 | l_embedding = DCNN.embeddings.SentenceEmbeddingLayer( 27 | l_in, 28 | vocab_size, 29 | embeddings_size, 30 | padding=padding 31 | ) 32 | 33 | 34 | l_conv1 = DCNN.convolutions.Conv1DLayerSplitted( 35 | l_embedding, 36 | nr_of_filters[0], 37 | filter_sizes[0], 38 | nonlinearity=lasagne.nonlinearities.linear, 39 | border_mode="full" 40 | ) 41 | 42 | l_fold1 = DCNN.folding.FoldingLayer(l_conv1) 43 | 44 | l_pool1 = DCNN.pooling.DynamicKMaxPoolLayer(l_fold1,ktop,nroflayers=2,layernr=1) 45 | 46 | 47 | l_nonlinear1 = lasagne.layers.NonlinearityLayer(l_pool1,nonlinearity=parseActivation(activations[0])) 48 | 49 | l_conv2 = DCNN.convolutions.Conv1DLayerSplitted( 50 | l_nonlinear1, 51 | nr_of_filters[1], 52 | filter_sizes[1], 53 | nonlinearity=lasagne.nonlinearities.linear, 54 | border_mode="full" 55 | ) 56 | 57 | l_fold2 = DCNN.folding.FoldingLayer(l_conv2) 58 | 59 | l_pool2 = DCNN.pooling.KMaxPoolLayer(l_fold2,ktop) 60 | 61 | l_nonlinear2 = lasagne.layers.NonlinearityLayer(l_pool2,nonlinearity=parseActivation(activations[1])) 62 | 63 | l_dropout2=lasagne.layers.DropoutLayer(l_nonlinear2,p=dropout) 64 | 65 | l_out = lasagne.layers.DenseLayer( 66 | l_dropout2, 67 | num_units=output_classes, 68 | nonlinearity=lasagne.nonlinearities.softmax 69 | ) 70 | 71 | return l_out 72 | 73 | 74 | 75 | 76 | def buildMaxTDNN(batch_size,vocab_size,embeddings_size,filter_size,output_classes): 77 | 78 | l_in = lasagne.layers.InputLayer( 79 | shape=(batch_size, None), 80 | ) 81 | 82 | l_embedding = DCNN.embeddings.SentenceEmbeddingLayer(l_in, vocab_size, embeddings_size) 83 | 84 | 85 | l_conv1 = DCNN.convolutions.Conv1DLayer( 86 | l_embedding, 87 | 1, 88 | filter_size, 89 | nonlinearity=lasagne.nonlinearities.tanh, 90 | stride=1, 91 | border_mode="valid", 92 | 93 | ) 94 | 95 | l_pool1 = lasagne.layers.GlobalPoolLayer(l_conv1,pool_function=T.max) 96 | 97 | l_out = lasagne.layers.DenseLayer( 98 | l_pool1, 99 | num_units=output_classes, 100 | nonlinearity=lasagne.nonlinearities.softmax, 101 | ) 102 | 103 | return l_out 104 | 105 | 106 | -------------------------------------------------------------------------------- /DCNN/convolutions.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Frederic Godin (frederic.godin@ugent.be / www.fredericgodin.com)' 2 | 3 | from lasagne import * 4 | from lasagne.layers import Layer 5 | import lasagne.utils 6 | import theano.tensor as T 7 | 8 | # Adapted from Lasagne 9 | class Conv1DLayerSplitted(Layer): 10 | 11 | def __init__(self, incoming, num_filters, filter_size, 12 | border_mode="valid", 13 | W=lasagne.init.GlorotUniform(), b=lasagne.init.Constant(0.), 14 | nonlinearity=nonlinearities.rectify, 15 | **kwargs): 16 | super(Conv1DLayerSplitted, self).__init__(incoming, **kwargs) 17 | if nonlinearity is None: 18 | self.nonlinearity = nonlinearities.identity 19 | else: 20 | self.nonlinearity = nonlinearity 21 | 22 | self.num_filters = num_filters 23 | self.filter_size = filter_size 24 | self.stride = lasagne.utils.as_tuple(1, 1) 25 | self.border_mode = border_mode 26 | 27 | 28 | # If it is an image the input shape will be 3 29 | # If it is a stack of filter ouputs after a previous convolution, the input shape will be 4 30 | if len(self.input_shape)==3: 31 | self.num_input_channels = 1 32 | self.num_of_rows = self.input_shape[1] 33 | elif len(self.input_shape)==4: 34 | self.num_input_channels = self.input_shape[1] 35 | self.num_of_rows = self.input_shape[2] 36 | 37 | self.W = self.add_param(W, self.get_W_shape(), name="W") 38 | if b is None: 39 | self.b = None 40 | else: 41 | bias_temp_shape = self.get_output_shape_for(self.input_shape) 42 | biases_shape = (bias_temp_shape[1],bias_temp_shape[2]) 43 | self.b = self.add_param(b, biases_shape, name="b", regularizable=False) 44 | 45 | def get_W_shape(self): 46 | return (self.num_filters,self.num_input_channels, self.num_of_rows, self.filter_size) 47 | 48 | def get_output_shape_for(self, input_shape): 49 | 50 | output_length = lasagne.layers.conv.conv_output_length(input_shape[-1], 51 | self.filter_size, 52 | self.stride[0], 53 | self.border_mode) 54 | 55 | return (input_shape[0], self.num_filters, self.num_of_rows, output_length) 56 | 57 | def get_output_for(self, input, input_shape=None, **kwargs): 58 | 59 | if input_shape is None: 60 | input_shape = self.input_shape 61 | 62 | filter_shape = self.get_W_shape() 63 | 64 | # We split the input shape and the filters into seperate rows to be able to execute a row wise 1D convolutions 65 | # We cannot convolve over the columns 66 | # However, we do need to convolve over multiple channels=output filters previous layer 67 | # See paper of Kalchbrenner for more details 68 | if self.border_mode in ['valid', 'full']: 69 | 70 | if len(self.input_shape)==3: 71 | input_shape_row= (self.input_shape[0], 1, 1, self.input_shape[2]) 72 | new_input = input.dimshuffle(0,'x', 1, 2) 73 | elif len(self.input_shape)==4: 74 | input_shape_row= (self.input_shape[0], self.input_shape[1], 1, self.input_shape[3]) 75 | new_input = input 76 | 77 | filter_shape_row =(filter_shape[0],filter_shape[1],1,filter_shape[3]) 78 | conveds = [] 79 | 80 | #Note that this for loop is only to construct the Theano graph and will never be part of the computation 81 | for i in range(self.num_of_rows): 82 | conveds.append(T.nnet.conv.conv2d(new_input[:,:,i,:].dimshuffle(0,1,'x',2), 83 | self.W[:,:,i,:].dimshuffle(0,1,'x',2), 84 | image_shape=input_shape_row, 85 | filter_shape=filter_shape_row, 86 | border_mode=self.border_mode, 87 | )) 88 | 89 | conved = T.concatenate(conveds,axis=2) 90 | 91 | 92 | 93 | elif self.border_mode == 'same': 94 | raise NotImplementedError("Not implemented yet ") 95 | else: 96 | raise RuntimeError("Invalid border mode: '%s'" % self.border_mode) 97 | 98 | 99 | if self.b is None: 100 | activation = conved 101 | else: 102 | activation = conved + self.b.dimshuffle('x',0,1,'x') 103 | 104 | 105 | return self.nonlinearity(activation) 106 | 107 | -------------------------------------------------------------------------------- /data/binarySentiment/valid_lbl.txt: -------------------------------------------------------------------------------- 1 | 2,12 2 | 2,24 3 | 2,8 4 | 2,24 5 | 2,8 6 | 2,24 7 | 2,32 8 | 2,30 9 | 2,33 10 | 2,26 11 | 2,20 12 | 2,27 13 | 2,11 14 | 2,21 15 | 2,32 16 | 2,29 17 | 2,34 18 | 2,20 19 | 2,19 20 | 2,19 21 | 2,33 22 | 2,6 23 | 2,30 24 | 2,12 25 | 2,8 26 | 2,9 27 | 2,24 28 | 2,36 29 | 2,23 30 | 2,28 31 | 2,12 32 | 2,23 33 | 2,29 34 | 2,32 35 | 2,33 36 | 1,19 37 | 2,22 38 | 1,13 39 | 2,14 40 | 2,19 41 | 2,9 42 | 2,24 43 | 2,27 44 | 2,7 45 | 2,19 46 | 2,15 47 | 2,16 48 | 2,15 49 | 2,30 50 | 1,12 51 | 2,14 52 | 2,14 53 | 2,16 54 | 2,16 55 | 2,26 56 | 2,20 57 | 2,15 58 | 2,18 59 | 2,14 60 | 2,19 61 | 2,24 62 | 2,21 63 | 2,12 64 | 2,15 65 | 2,11 66 | 2,40 67 | 2,25 68 | 2,11 69 | 2,28 70 | 2,14 71 | 2,35 72 | 2,13 73 | 2,11 74 | 2,26 75 | 2,23 76 | 2,34 77 | 2,22 78 | 2,38 79 | 2,35 80 | 2,16 81 | 2,7 82 | 2,17 83 | 2,13 84 | 2,20 85 | 2,21 86 | 2,20 87 | 1,11 88 | 2,12 89 | 2,19 90 | 2,18 91 | 2,32 92 | 2,38 93 | 2,19 94 | 2,15 95 | 2,15 96 | 2,18 97 | 2,9 98 | 2,23 99 | 2,26 100 | 2,12 101 | 2,23 102 | 2,9 103 | 2,17 104 | 2,6 105 | 2,10 106 | 2,46 107 | 2,16 108 | 2,9 109 | 2,22 110 | 2,22 111 | 2,23 112 | 2,22 113 | 2,13 114 | 2,19 115 | 2,14 116 | 2,33 117 | 1,17 118 | 2,16 119 | 2,30 120 | 2,11 121 | 2,18 122 | 2,20 123 | 2,30 124 | 2,25 125 | 2,11 126 | 2,30 127 | 2,10 128 | 2,7 129 | 2,12 130 | 2,36 131 | 2,12 132 | 2,8 133 | 2,15 134 | 2,13 135 | 2,6 136 | 2,19 137 | 2,21 138 | 2,15 139 | 2,22 140 | 2,12 141 | 2,24 142 | 2,23 143 | 2,25 144 | 2,27 145 | 2,32 146 | 2,29 147 | 2,8 148 | 2,19 149 | 2,16 150 | 2,27 151 | 2,34 152 | 2,8 153 | 2,35 154 | 2,28 155 | 2,30 156 | 2,13 157 | 2,28 158 | 2,21 159 | 2,28 160 | 2,16 161 | 2,20 162 | 2,25 163 | 2,23 164 | 2,30 165 | 2,16 166 | 2,12 167 | 2,12 168 | 1,14 169 | 2,23 170 | 2,27 171 | 2,7 172 | 2,5 173 | 2,16 174 | 2,26 175 | 2,10 176 | 2,12 177 | 2,22 178 | 2,21 179 | 1,26 180 | 2,7 181 | 2,27 182 | 2,20 183 | 2,21 184 | 2,29 185 | 1,20 186 | 2,8 187 | 2,9 188 | 2,14 189 | 2,32 190 | 2,11 191 | 2,34 192 | 2,21 193 | 2,23 194 | 1,17 195 | 2,23 196 | 2,32 197 | 2,19 198 | 2,19 199 | 2,30 200 | 2,7 201 | 2,20 202 | 2,14 203 | 2,22 204 | 2,18 205 | 2,15 206 | 2,15 207 | 2,9 208 | 2,15 209 | 2,23 210 | 2,22 211 | 2,9 212 | 2,16 213 | 2,25 214 | 2,30 215 | 2,24 216 | 1,16 217 | 2,34 218 | 2,25 219 | 1,10 220 | 2,38 221 | 2,16 222 | 2,10 223 | 2,13 224 | 2,8 225 | 2,22 226 | 2,16 227 | 2,30 228 | 2,41 229 | 2,32 230 | 2,7 231 | 2,27 232 | 2,9 233 | 2,23 234 | 2,5 235 | 2,11 236 | 2,33 237 | 2,24 238 | 2,12 239 | 2,16 240 | 2,24 241 | 2,26 242 | 2,21 243 | 2,16 244 | 2,15 245 | 2,34 246 | 2,17 247 | 2,9 248 | 2,7 249 | 2,35 250 | 2,23 251 | 2,19 252 | 2,18 253 | 2,9 254 | 2,15 255 | 2,22 256 | 2,11 257 | 2,18 258 | 2,22 259 | 2,7 260 | 2,21 261 | 2,18 262 | 2,8 263 | 2,25 264 | 2,18 265 | 2,14 266 | 2,13 267 | 2,14 268 | 2,26 269 | 2,26 270 | 2,41 271 | 2,18 272 | 2,27 273 | 2,32 274 | 2,15 275 | 1,3 276 | 1,13 277 | 2,7 278 | 1,15 279 | 2,21 280 | 2,8 281 | 2,14 282 | 2,35 283 | 2,24 284 | 2,32 285 | 2,26 286 | 2,9 287 | 2,9 288 | 2,27 289 | 2,27 290 | 2,9 291 | 2,16 292 | 2,18 293 | 1,9 294 | 2,21 295 | 2,15 296 | 2,17 297 | 2,15 298 | 2,15 299 | 2,26 300 | 2,24 301 | 2,26 302 | 2,24 303 | 2,4 304 | 2,13 305 | 2,20 306 | 2,19 307 | 2,21 308 | 1,15 309 | 2,7 310 | 2,4 311 | 1,32 312 | 2,9 313 | 2,33 314 | 2,10 315 | 2,27 316 | 2,26 317 | 2,18 318 | 2,32 319 | 2,29 320 | 2,28 321 | 2,31 322 | 1,32 323 | 2,17 324 | 2,27 325 | 2,17 326 | 2,18 327 | 1,14 328 | 2,18 329 | 2,35 330 | 2,18 331 | 2,15 332 | 2,9 333 | 2,20 334 | 2,16 335 | 2,27 336 | 2,5 337 | 2,21 338 | 2,14 339 | 2,28 340 | 2,9 341 | 2,18 342 | 2,17 343 | 2,20 344 | 2,20 345 | 2,10 346 | 2,22 347 | 2,6 348 | 2,15 349 | 2,11 350 | 2,13 351 | 2,13 352 | 2,8 353 | 2,35 354 | 2,31 355 | 2,17 356 | 2,22 357 | 2,17 358 | 2,28 359 | 2,14 360 | 2,14 361 | 2,28 362 | 2,18 363 | 2,27 364 | 2,18 365 | 2,2 366 | 2,11 367 | 2,11 368 | 2,29 369 | 2,16 370 | 2,9 371 | 2,19 372 | 2,19 373 | 2,23 374 | 2,19 375 | 2,14 376 | 2,18 377 | 2,26 378 | 2,19 379 | 2,22 380 | 1,39 381 | 2,18 382 | 2,42 383 | 2,30 384 | 2,17 385 | 2,31 386 | 1,12 387 | 2,31 388 | 2,20 389 | 2,13 390 | 2,7 391 | 2,15 392 | 2,7 393 | 2,17 394 | 2,10 395 | 2,12 396 | 2,13 397 | 2,14 398 | 2,9 399 | 2,9 400 | 2,10 401 | 2,28 402 | 2,26 403 | 2,7 404 | 1,6 405 | 2,10 406 | 2,17 407 | 2,16 408 | 2,25 409 | 2,16 410 | 2,20 411 | 2,27 412 | 2,19 413 | 2,24 414 | 2,29 415 | 2,10 416 | 2,24 417 | 2,21 418 | 2,26 419 | 2,22 420 | 2,16 421 | 1,4 422 | 2,16 423 | 2,26 424 | 2,14 425 | 2,18 426 | 2,27 427 | 2,34 428 | 2,13 429 | 2,10 430 | 2,21 431 | 2,12 432 | 2,16 433 | 2,16 434 | 2,15 435 | 2,12 436 | 2,44 437 | 2,40 438 | 2,30 439 | 2,27 440 | 2,36 441 | 2,8 442 | 1,18 443 | 1,27 444 | 1,29 445 | 1,15 446 | 1,15 447 | 1,15 448 | 1,10 449 | 1,13 450 | 1,19 451 | 1,6 452 | 1,39 453 | 1,24 454 | 1,38 455 | 1,6 456 | 2,12 457 | 1,20 458 | 1,27 459 | 1,14 460 | 2,6 461 | 1,18 462 | 1,18 463 | 1,25 464 | 1,35 465 | 1,24 466 | 1,8 467 | 1,8 468 | 1,6 469 | 1,11 470 | 1,31 471 | 1,10 472 | 1,14 473 | 1,10 474 | 1,9 475 | 1,14 476 | 1,27 477 | 1,13 478 | 1,21 479 | 1,20 480 | 1,12 481 | 1,35 482 | 1,5 483 | 1,7 484 | 1,27 485 | 1,10 486 | 1,12 487 | 1,7 488 | 1,9 489 | 1,12 490 | 1,20 491 | 1,7 492 | 1,20 493 | 1,36 494 | 1,24 495 | 1,37 496 | 1,39 497 | 1,15 498 | 1,24 499 | 1,22 500 | 1,26 501 | 1,14 502 | 1,38 503 | 1,11 504 | 1,24 505 | 1,14 506 | 1,12 507 | 1,25 508 | 2,17 509 | 1,33 510 | 1,30 511 | 1,28 512 | 2,13 513 | 1,15 514 | 1,11 515 | 1,18 516 | 1,23 517 | 1,31 518 | 1,11 519 | 1,6 520 | 1,10 521 | 1,22 522 | 1,9 523 | 2,13 524 | 1,22 525 | 1,20 526 | 1,7 527 | 1,6 528 | 1,25 529 | 1,21 530 | 1,9 531 | 2,5 532 | 1,6 533 | 1,23 534 | 1,18 535 | 1,25 536 | 1,28 537 | 1,34 538 | 1,8 539 | 1,19 540 | 1,16 541 | 1,14 542 | 1,23 543 | 1,33 544 | 1,8 545 | 1,33 546 | 1,7 547 | 1,20 548 | 1,16 549 | 1,12 550 | 1,26 551 | 1,15 552 | 1,16 553 | 1,21 554 | 1,13 555 | 1,5 556 | 1,16 557 | 1,4 558 | 1,12 559 | 1,5 560 | 1,34 561 | 1,27 562 | 1,19 563 | 1,37 564 | 1,18 565 | 1,16 566 | 1,15 567 | 1,10 568 | 1,30 569 | 1,5 570 | 1,17 571 | 1,19 572 | 1,18 573 | 1,11 574 | 1,13 575 | 1,16 576 | 1,35 577 | 2,18 578 | 1,15 579 | 1,19 580 | 1,27 581 | 1,26 582 | 1,33 583 | 1,19 584 | 1,17 585 | 1,6 586 | 2,11 587 | 1,16 588 | 1,26 589 | 1,22 590 | 1,31 591 | 1,15 592 | 1,37 593 | 1,32 594 | 1,27 595 | 1,20 596 | 1,26 597 | 1,25 598 | 1,14 599 | 1,38 600 | 1,19 601 | 1,14 602 | 1,21 603 | 1,31 604 | 1,23 605 | 1,27 606 | 1,10 607 | 1,11 608 | 1,15 609 | 1,24 610 | 1,7 611 | 1,29 612 | 1,27 613 | 1,13 614 | 1,40 615 | 1,10 616 | 1,17 617 | 1,27 618 | 1,5 619 | 2,22 620 | 1,27 621 | 1,10 622 | 1,32 623 | 1,16 624 | 1,28 625 | 2,46 626 | 1,15 627 | 1,20 628 | 1,20 629 | 2,14 630 | 1,23 631 | 1,8 632 | 1,15 633 | 1,20 634 | 1,18 635 | 1,26 636 | 1,28 637 | 1,17 638 | 1,16 639 | 1,11 640 | 1,15 641 | 1,10 642 | 2,5 643 | 1,21 644 | 1,9 645 | 1,16 646 | 1,21 647 | 1,11 648 | 1,24 649 | 1,17 650 | 2,26 651 | 1,22 652 | 1,19 653 | 1,13 654 | 1,38 655 | 1,12 656 | 1,19 657 | 1,32 658 | 1,27 659 | 2,29 660 | 1,20 661 | 2,29 662 | 1,11 663 | 1,10 664 | 1,5 665 | 1,30 666 | 1,12 667 | 1,26 668 | 1,37 669 | 1,27 670 | 1,10 671 | 1,17 672 | 1,21 673 | 1,41 674 | 1,22 675 | 1,18 676 | 1,8 677 | 1,21 678 | 1,19 679 | 2,15 680 | 1,30 681 | 1,27 682 | 1,22 683 | 1,24 684 | 1,4 685 | 1,17 686 | 1,23 687 | 1,16 688 | 1,22 689 | 1,14 690 | 1,13 691 | 1,29 692 | 1,13 693 | 1,31 694 | 1,9 695 | 1,19 696 | 1,19 697 | 1,10 698 | 1,11 699 | 1,8 700 | 1,34 701 | 1,30 702 | 1,10 703 | 1,6 704 | 1,5 705 | 1,20 706 | 1,21 707 | 1,7 708 | 2,14 709 | 1,19 710 | 1,12 711 | 1,14 712 | 1,27 713 | 1,11 714 | 1,25 715 | 1,8 716 | 1,19 717 | 1,22 718 | 1,10 719 | 1,17 720 | 1,32 721 | 1,7 722 | 1,17 723 | 1,14 724 | 1,25 725 | 1,6 726 | 1,27 727 | 1,8 728 | 1,6 729 | 1,26 730 | 1,5 731 | 1,7 732 | 1,14 733 | 1,25 734 | 1,24 735 | 2,15 736 | 1,4 737 | 1,26 738 | 1,12 739 | 1,25 740 | 1,32 741 | 1,36 742 | 1,14 743 | 1,12 744 | 1,18 745 | 1,19 746 | 1,28 747 | 1,19 748 | 1,33 749 | 1,17 750 | 1,5 751 | 1,20 752 | 1,25 753 | 1,21 754 | 1,15 755 | 1,8 756 | 1,15 757 | 1,16 758 | 2,28 759 | 1,19 760 | 1,22 761 | 1,26 762 | 1,16 763 | 1,32 764 | 1,8 765 | 1,15 766 | 1,34 767 | 2,9 768 | 1,18 769 | 1,24 770 | 2,26 771 | 1,21 772 | 1,26 773 | 1,31 774 | 1,19 775 | 1,16 776 | 1,11 777 | 1,20 778 | 1,22 779 | 1,16 780 | 1,24 781 | 1,17 782 | 1,19 783 | 2,26 784 | 1,26 785 | 1,23 786 | 1,30 787 | 1,25 788 | 1,17 789 | 1,8 790 | 1,17 791 | 1,16 792 | 1,18 793 | 1,12 794 | 1,24 795 | 1,20 796 | 1,15 797 | 1,17 798 | 1,37 799 | 1,18 800 | 1,24 801 | 1,10 802 | 1,36 803 | 2,22 804 | 1,16 805 | 1,20 806 | 1,30 807 | 1,22 808 | 1,8 809 | 1,13 810 | 1,20 811 | 1,28 812 | 1,2 813 | 1,3 814 | 1,7 815 | 1,21 816 | 1,17 817 | 1,20 818 | 1,17 819 | 1,20 820 | 1,28 821 | 1,21 822 | 1,12 823 | 2,13 824 | 1,12 825 | 1,30 826 | 1,33 827 | 1,9 828 | 2,18 829 | 1,7 830 | 1,20 831 | 1,9 832 | 1,13 833 | 1,33 834 | 1,13 835 | 1,13 836 | 1,23 837 | 1,8 838 | 1,28 839 | 1,20 840 | 1,12 841 | 1,5 842 | 1,13 843 | 1,18 844 | 1,5 845 | 1,13 846 | 1,45 847 | 1,20 848 | 1,18 849 | 1,11 850 | 1,17 851 | 1,19 852 | 1,16 853 | 1,10 854 | 1,19 855 | 1,5 856 | 1,4 857 | 1,10 858 | 2,14 859 | 1,24 860 | 1,19 861 | 1,26 862 | 1,23 863 | 1,23 864 | 1,7 865 | 1,25 866 | 1,25 867 | 1,18 868 | 1,23 869 | 1,24 870 | 1,34 871 | 1,19 872 | 1,27 873 | -------------------------------------------------------------------------------- /trainDCNN.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Frederic Godin (frederic.godin@ugent.be / www.fredericgodin.com)' 2 | import theano 3 | import theano.tensor as T 4 | import numpy 5 | import lasagne 6 | import argparse 7 | 8 | import DCNN 9 | import dataUtils 10 | import networks 11 | import utils 12 | 13 | parser = argparse.ArgumentParser(description='Train a DCNN on the binary Stanford Sentiment dataset as specified in the Kalchbrenner \'14 paper. All the default values are taken from the paper or the Matlab code.') 14 | # training settings 15 | parser.add_argument("--learning_rate",type=float, default=0.1, help='Learning rate') 16 | parser.add_argument("--n_epochs",type=int,default=500,help="Number of epochs") 17 | parser.add_argument("--valid_freq",type=int,default=10,help="Number of batches processed until we validate.") 18 | parser.add_argument("--adagrad_reset",type=int,default=5,help="Resets the adagrad cumulative gradient after x epochs. If the value is 0, no reset will be executed.") 19 | # input output 20 | parser.add_argument("--vocab_size",type=int, default=15448, help='Vocabulary size') 21 | parser.add_argument("--output_classes",type=int, default=2, help='Number of output classes') 22 | parser.add_argument("--batch_size",type=int, default=4, help='Batch size') 23 | # network paras 24 | parser.add_argument("--word_vector_size",type=int, default=48, help='Word vector size') 25 | parser.add_argument("--filter_size_conv_layers", nargs="+", type=int, default=[7,5],help="List of sizes of filters at layer 1 and 2, default=[10,7]") 26 | parser.add_argument("--nr_of_filters_conv_layers", nargs="+", type=int, default=[6,14],help="List of number of filters at layer 1 and 2, default=[6,12]") 27 | parser.add_argument("--activations",nargs='+', type=str,default=["tanh","tanh"],help="List of activation functions behind first and second conv layers, default [tanh, tanh]. Possible values are \"linear\", \"tanh\", \"rectify\" and \"sigmoid\". ") 28 | parser.add_argument("--L2",nargs='+',type=float,default=[0.0001/2,0.00003/2,0.000003/2,0.0001/2],help="Fine-grained L2 regularization. 4 values are needed for 4 layers, namly for the embeddings layer, 2 conv layers and a final/output dense layer.") 29 | parser.add_argument("--ktop",type=int,default=4,help="K value of top pooling layer DCNN") 30 | parser.add_argument("--dropout_value", type=float,default=0.5,help="Dropout value after penultimate layer") 31 | 32 | args = parser.parse_args() 33 | hyperparas = vars(args) 34 | print("Hyperparameters: "+str(hyperparas)) 35 | 36 | if len(hyperparas['filter_size_conv_layers'])!= 2 or len(hyperparas['nr_of_filters_conv_layers'])!=2 or len(hyperparas['activations'])!=2 or len(hyperparas["L2"])!=4 : 37 | raise Exception('Check if the input --filter_size_conv_layers, --nr_of_filters_conv_layers and --activations are lists of size 2, and the --L2 field needs a value list of 4 values.') 38 | 39 | 40 | ####################### 41 | # LOAD TRAINING DATA # 42 | ####################### 43 | print('Loading the training data') 44 | 45 | # load data, taken from Kalchbrenner matlab files 46 | # we order the input according to length and pad all sentences until the maximum length 47 | # at training time however, we will use the "length" array to shrink that matrix following the largest sentence within a batch 48 | # in practice, this means that batches are padded with 1 or 2 zeros, or aren't even padded at all. 49 | kalchbrenner_path = "./data/binarySentiment/" 50 | train_x_indexes, train_y, train_lengths = dataUtils.read_and_sort_matlab_data(kalchbrenner_path+"train.txt",kalchbrenner_path+"train_lbl.txt") 51 | dev_x_indexes, dev_y, dev_lengths = dataUtils.read_and_sort_matlab_data(kalchbrenner_path+"valid.txt",kalchbrenner_path+"valid_lbl.txt") 52 | test_x_indexes, test_y, test_lengths = dataUtils.read_and_sort_matlab_data(kalchbrenner_path+"test.txt",kalchbrenner_path+"test_lbl.txt") 53 | 54 | # train data 55 | n_train_batches = len(train_lengths) / hyperparas['batch_size'] 56 | 57 | #dev data 58 | # to be able to do a correct evaluation, we pad a number of rows to get a multiple of the batch size 59 | dev_x_indexes_extended = dataUtils.pad_to_batch_size(dev_x_indexes,hyperparas['batch_size']) 60 | dev_y_extended = dataUtils.pad_to_batch_size(dev_y,hyperparas['batch_size']) 61 | n_dev_batches = dev_x_indexes_extended.shape[0] / hyperparas['batch_size'] 62 | n_dev_samples = len(dev_y) 63 | dataUtils.extend_lenghts(dev_lengths,hyperparas['batch_size']) 64 | 65 | # test data 66 | test_x_indexes_extended = dataUtils.pad_to_batch_size(test_x_indexes,hyperparas['batch_size']) 67 | test_y_extended = dataUtils.pad_to_batch_size(test_y,hyperparas['batch_size']) 68 | n_test_batches = test_x_indexes_extended.shape[0] / hyperparas['batch_size'] 69 | n_test_samples = len(test_y) 70 | dataUtils.extend_lenghts(test_lengths,hyperparas['batch_size']) 71 | 72 | ###################### 73 | # BUILD ACTUAL MODEL # 74 | ###################### 75 | print('Building the model') 76 | 77 | # allocate symbolic variables for the data 78 | X_batch = T.imatrix('x') 79 | y_batch = T.ivector('y') 80 | 81 | # define/load the network 82 | output_layer = networks.buildDCNNPaper(batch_size=hyperparas['batch_size'],vocab_size=hyperparas['vocab_size'],embeddings_size=hyperparas['word_vector_size'],filter_sizes=hyperparas['filter_size_conv_layers'],nr_of_filters=hyperparas['nr_of_filters_conv_layers'],activations=hyperparas['activations'],ktop=hyperparas['ktop'],dropout=hyperparas["dropout_value"],output_classes=hyperparas['output_classes'],padding='last') 83 | 84 | # Kalchbrenner uses a fine-grained L2 regularization in the Matlab code, default values taken from Matlab code 85 | # Training objective 86 | l2_layers = [] 87 | for layer in lasagne.layers.get_all_layers(output_layer): 88 | if isinstance(layer,(DCNN.embeddings.SentenceEmbeddingLayer,DCNN.convolutions.Conv1DLayerSplitted,lasagne.layers.DenseLayer)): 89 | l2_layers.append(layer) 90 | loss_train = lasagne.objectives.aggregate(lasagne.objectives.categorical_crossentropy(lasagne.layers.get_output(output_layer,X_batch),y_batch),mode='mean')+lasagne.regularization.regularize_layer_params_weighted(dict(zip(l2_layers,hyperparas["L2"])),lasagne.regularization.l2) 91 | 92 | # validating/testing 93 | loss_eval = lasagne.objectives.categorical_crossentropy(lasagne.layers.get_output(output_layer,X_batch,deterministic=True),y_batch) 94 | pred = T.argmax(lasagne.layers.get_output(output_layer, X_batch, deterministic=True),axis=1) 95 | correct_predictions = T.eq(pred, y_batch) 96 | 97 | # In the matlab code, Kalchbrenner works with a adagrad reset mechanism, if the para --adagrad_reset has value 0, no reset will be applied 98 | all_params = lasagne.layers.get_all_params(output_layer) 99 | updates, accumulated_grads = utils.adagrad(loss_train, all_params, hyperparas['learning_rate']) 100 | #updates = lasagne.updates.adagrad(loss_train, all_params, hyperparas['learning_rate']) 101 | 102 | 103 | train_model = theano.function(inputs=[X_batch,y_batch], outputs=loss_train,updates=updates) 104 | 105 | valid_model = theano.function(inputs=[X_batch,y_batch], outputs=correct_predictions) 106 | 107 | test_model = theano.function(inputs=[X_batch,y_batch], outputs=correct_predictions) 108 | 109 | 110 | 111 | ############### 112 | # TRAIN MODEL # 113 | ############### 114 | print('Started training') 115 | print('Because of the default high validation frequency, only improvements are printed.') 116 | 117 | best_validation_accuracy = 0 118 | epoch = 0 119 | batch_size = hyperparas["batch_size"] 120 | while (epoch < hyperparas['n_epochs']): 121 | epoch = epoch + 1 122 | permutation = numpy.random.permutation(n_train_batches) 123 | batch_counter = 0 124 | train_loss=0 125 | for minibatch_index in permutation: 126 | x_input = train_x_indexes[minibatch_index*batch_size:(minibatch_index+1)*batch_size,0:train_lengths[(minibatch_index+1)*batch_size-1]] 127 | y_input = train_y[minibatch_index*batch_size:(minibatch_index+1)*batch_size] 128 | train_loss+=train_model(x_input,y_input) 129 | 130 | if batch_counter>0 and batch_counter % hyperparas["valid_freq"] == 0: 131 | accuracy_valid=[] 132 | for minibatch_dev_index in range(n_dev_batches): 133 | x_input = dev_x_indexes_extended[minibatch_dev_index*batch_size:(minibatch_dev_index+1)*batch_size,0:dev_lengths[(minibatch_dev_index+1)*batch_size-1]] 134 | y_input = dev_y_extended[minibatch_dev_index*batch_size:(minibatch_dev_index+1)*batch_size] 135 | accuracy_valid.append(valid_model(x_input,y_input)) 136 | 137 | #dirty code to correctly asses validation accuracy, last results in the array are predictions for the padding rows and can be dumped afterwards 138 | this_validation_accuracy = numpy.concatenate(accuracy_valid)[0:n_dev_samples].sum()/float(n_dev_samples) 139 | 140 | if this_validation_accuracy > best_validation_accuracy: 141 | print("Train loss, "+str( (train_loss/hyperparas["valid_freq"]))+", validation accuracy: "+str(this_validation_accuracy*100)+"%") 142 | best_validation_accuracy = this_validation_accuracy 143 | 144 | # test it 145 | accuracy_test= [] 146 | for minibatch_test_index in range(n_test_batches): 147 | x_input = test_x_indexes_extended[minibatch_test_index*batch_size:(minibatch_test_index+1)*batch_size,0:test_lengths[(minibatch_test_index+1)*batch_size-1]] 148 | y_input = test_y_extended[minibatch_test_index*batch_size:(minibatch_test_index+1)*batch_size] 149 | accuracy_test.append(test_model(x_input,y_input)) 150 | this_test_accuracy = numpy.concatenate(accuracy_test)[0:n_test_samples].sum()/float(n_test_samples) 151 | print("Test accuracy: "+str(this_test_accuracy*100)+"%") 152 | 153 | train_loss=0 154 | batch_counter+=1 155 | 156 | if hyperparas["adagrad_reset"] > 0: 157 | if epoch % hyperparas["adagrad_reset"] == 0: 158 | utils.reset_grads(accumulated_grads) 159 | 160 | print("Epoch "+str(epoch)+" finished.") 161 | 162 | 163 | 164 | -------------------------------------------------------------------------------- /data/binarySentiment/test_lbl.txt: -------------------------------------------------------------------------------- 1 | 2,21 2 | 2,23 3 | 2,9 4 | 2,19 5 | 2,21 6 | 2,7 7 | 2,14 8 | 2,15 9 | 2,14 10 | 2,23 11 | 2,12 12 | 2,24 13 | 2,8 14 | 2,14 15 | 2,11 16 | 2,9 17 | 2,36 18 | 2,23 19 | 2,16 20 | 2,9 21 | 2,15 22 | 2,19 23 | 1,23 24 | 2,22 25 | 2,11 26 | 2,16 27 | 2,30 28 | 2,33 29 | 1,17 30 | 2,15 31 | 2,14 32 | 2,44 33 | 1,20 34 | 2,20 35 | 1,23 36 | 2,14 37 | 2,15 38 | 2,18 39 | 2,13 40 | 2,24 41 | 2,7 42 | 2,41 43 | 2,8 44 | 2,13 45 | 2,20 46 | 2,11 47 | 2,22 48 | 2,21 49 | 2,20 50 | 2,12 51 | 2,14 52 | 2,20 53 | 2,12 54 | 1,23 55 | 2,24 56 | 2,7 57 | 2,30 58 | 2,18 59 | 2,18 60 | 2,4 61 | 2,7 62 | 2,24 63 | 2,10 64 | 1,25 65 | 2,24 66 | 2,27 67 | 2,24 68 | 2,17 69 | 2,13 70 | 2,19 71 | 2,31 72 | 2,22 73 | 2,20 74 | 2,39 75 | 2,9 76 | 2,36 77 | 2,20 78 | 1,36 79 | 2,16 80 | 1,21 81 | 2,17 82 | 2,19 83 | 2,8 84 | 2,14 85 | 2,11 86 | 2,26 87 | 2,36 88 | 2,26 89 | 2,17 90 | 2,15 91 | 2,27 92 | 2,20 93 | 2,31 94 | 2,12 95 | 2,33 96 | 2,10 97 | 2,22 98 | 2,36 99 | 2,23 100 | 2,10 101 | 1,27 102 | 2,16 103 | 2,37 104 | 2,22 105 | 2,18 106 | 2,26 107 | 2,21 108 | 2,19 109 | 2,35 110 | 2,13 111 | 2,18 112 | 1,11 113 | 2,7 114 | 2,9 115 | 2,9 116 | 2,16 117 | 2,10 118 | 2,18 119 | 2,25 120 | 2,23 121 | 2,22 122 | 2,32 123 | 1,10 124 | 2,30 125 | 2,32 126 | 2,18 127 | 2,15 128 | 2,21 129 | 2,31 130 | 2,21 131 | 2,16 132 | 2,17 133 | 2,23 134 | 2,7 135 | 1,25 136 | 2,32 137 | 2,19 138 | 2,18 139 | 1,17 140 | 2,19 141 | 2,55 142 | 2,16 143 | 2,4 144 | 2,44 145 | 2,39 146 | 2,15 147 | 2,19 148 | 2,20 149 | 2,16 150 | 2,15 151 | 2,14 152 | 2,19 153 | 2,7 154 | 2,11 155 | 2,14 156 | 2,27 157 | 2,14 158 | 2,14 159 | 2,5 160 | 1,15 161 | 2,16 162 | 2,10 163 | 2,28 164 | 2,19 165 | 2,29 166 | 2,33 167 | 2,6 168 | 2,16 169 | 2,32 170 | 2,6 171 | 2,22 172 | 2,16 173 | 2,20 174 | 2,8 175 | 2,9 176 | 2,16 177 | 2,18 178 | 2,21 179 | 2,17 180 | 2,24 181 | 2,19 182 | 2,23 183 | 2,26 184 | 2,19 185 | 2,14 186 | 2,9 187 | 2,12 188 | 2,15 189 | 2,25 190 | 2,21 191 | 2,9 192 | 2,17 193 | 2,35 194 | 2,14 195 | 2,27 196 | 2,9 197 | 2,12 198 | 2,9 199 | 2,28 200 | 1,4 201 | 2,28 202 | 2,18 203 | 2,7 204 | 2,28 205 | 2,11 206 | 2,35 207 | 2,5 208 | 1,15 209 | 2,15 210 | 2,13 211 | 2,6 212 | 2,15 213 | 2,24 214 | 2,31 215 | 2,12 216 | 2,43 217 | 1,7 218 | 2,14 219 | 2,30 220 | 2,15 221 | 2,14 222 | 2,11 223 | 2,21 224 | 2,33 225 | 2,24 226 | 2,13 227 | 2,13 228 | 2,13 229 | 2,35 230 | 1,14 231 | 2,40 232 | 2,20 233 | 2,20 234 | 2,30 235 | 2,15 236 | 2,19 237 | 2,38 238 | 2,10 239 | 2,37 240 | 2,29 241 | 2,29 242 | 2,6 243 | 2,11 244 | 2,20 245 | 2,39 246 | 2,22 247 | 2,27 248 | 2,9 249 | 2,14 250 | 2,37 251 | 2,24 252 | 2,27 253 | 2,10 254 | 2,26 255 | 2,19 256 | 2,8 257 | 1,9 258 | 2,14 259 | 2,18 260 | 2,23 261 | 2,18 262 | 2,22 263 | 2,8 264 | 2,18 265 | 2,27 266 | 2,25 267 | 1,26 268 | 2,16 269 | 2,20 270 | 2,18 271 | 2,11 272 | 2,19 273 | 2,19 274 | 2,42 275 | 2,17 276 | 2,17 277 | 2,28 278 | 2,17 279 | 2,13 280 | 2,8 281 | 2,5 282 | 2,9 283 | 2,14 284 | 2,10 285 | 2,20 286 | 2,29 287 | 2,10 288 | 1,11 289 | 2,14 290 | 2,27 291 | 2,12 292 | 2,16 293 | 2,9 294 | 2,17 295 | 2,31 296 | 2,26 297 | 2,16 298 | 2,24 299 | 2,17 300 | 1,25 301 | 2,7 302 | 2,26 303 | 2,11 304 | 2,42 305 | 2,10 306 | 2,5 307 | 2,12 308 | 2,23 309 | 2,11 310 | 2,42 311 | 2,30 312 | 2,24 313 | 2,11 314 | 2,26 315 | 2,36 316 | 2,5 317 | 2,9 318 | 2,20 319 | 2,36 320 | 2,24 321 | 2,7 322 | 2,20 323 | 2,33 324 | 2,8 325 | 2,23 326 | 2,17 327 | 1,8 328 | 2,14 329 | 2,26 330 | 2,26 331 | 2,6 332 | 2,19 333 | 2,26 334 | 2,11 335 | 2,18 336 | 2,32 337 | 2,11 338 | 2,21 339 | 2,11 340 | 2,20 341 | 2,8 342 | 2,25 343 | 2,29 344 | 2,30 345 | 2,25 346 | 2,12 347 | 2,9 348 | 2,6 349 | 2,11 350 | 2,8 351 | 1,28 352 | 2,24 353 | 2,49 354 | 2,17 355 | 2,12 356 | 2,19 357 | 2,19 358 | 2,21 359 | 2,25 360 | 2,9 361 | 2,11 362 | 1,19 363 | 2,7 364 | 2,23 365 | 2,21 366 | 2,10 367 | 2,30 368 | 2,26 369 | 2,24 370 | 2,19 371 | 1,8 372 | 2,25 373 | 2,22 374 | 2,27 375 | 2,23 376 | 2,22 377 | 1,17 378 | 2,18 379 | 2,26 380 | 2,23 381 | 2,26 382 | 2,19 383 | 2,15 384 | 2,21 385 | 2,31 386 | 2,32 387 | 2,28 388 | 2,14 389 | 2,19 390 | 2,20 391 | 2,30 392 | 2,25 393 | 2,9 394 | 2,17 395 | 2,19 396 | 2,5 397 | 2,22 398 | 2,34 399 | 2,25 400 | 2,27 401 | 1,8 402 | 2,23 403 | 2,12 404 | 2,23 405 | 2,5 406 | 2,17 407 | 2,24 408 | 2,25 409 | 2,22 410 | 2,21 411 | 2,32 412 | 2,13 413 | 2,20 414 | 2,33 415 | 2,16 416 | 2,7 417 | 2,27 418 | 2,19 419 | 2,7 420 | 2,6 421 | 2,18 422 | 2,12 423 | 2,10 424 | 2,20 425 | 2,15 426 | 2,20 427 | 2,11 428 | 2,27 429 | 2,13 430 | 2,19 431 | 2,18 432 | 2,25 433 | 2,18 434 | 2,14 435 | 2,22 436 | 2,16 437 | 2,34 438 | 2,25 439 | 2,14 440 | 2,15 441 | 2,22 442 | 2,22 443 | 2,29 444 | 1,20 445 | 2,31 446 | 1,38 447 | 2,18 448 | 2,22 449 | 2,9 450 | 2,30 451 | 2,30 452 | 2,23 453 | 2,33 454 | 2,5 455 | 2,20 456 | 2,34 457 | 2,25 458 | 2,14 459 | 1,16 460 | 2,18 461 | 2,26 462 | 2,17 463 | 2,7 464 | 2,21 465 | 2,23 466 | 2,17 467 | 2,16 468 | 2,30 469 | 2,12 470 | 2,12 471 | 2,6 472 | 1,26 473 | 2,26 474 | 2,21 475 | 2,13 476 | 2,23 477 | 1,19 478 | 2,17 479 | 2,10 480 | 2,26 481 | 2,26 482 | 2,20 483 | 1,6 484 | 2,6 485 | 2,16 486 | 2,25 487 | 2,24 488 | 2,38 489 | 2,19 490 | 2,5 491 | 2,15 492 | 2,15 493 | 2,26 494 | 2,22 495 | 2,10 496 | 2,16 497 | 2,14 498 | 2,10 499 | 2,9 500 | 2,23 501 | 2,25 502 | 2,19 503 | 2,28 504 | 2,36 505 | 2,19 506 | 2,18 507 | 2,17 508 | 2,13 509 | 2,8 510 | 2,9 511 | 2,26 512 | 2,12 513 | 2,19 514 | 2,18 515 | 2,7 516 | 2,29 517 | 2,17 518 | 2,21 519 | 2,31 520 | 2,11 521 | 2,22 522 | 2,37 523 | 2,25 524 | 2,12 525 | 2,22 526 | 2,27 527 | 2,21 528 | 2,15 529 | 2,36 530 | 2,11 531 | 2,10 532 | 2,22 533 | 2,14 534 | 1,24 535 | 2,15 536 | 2,13 537 | 2,6 538 | 2,25 539 | 2,36 540 | 2,25 541 | 2,24 542 | 1,16 543 | 2,11 544 | 2,22 545 | 2,17 546 | 1,12 547 | 2,27 548 | 2,27 549 | 1,32 550 | 2,17 551 | 1,21 552 | 1,11 553 | 1,29 554 | 2,28 555 | 2,12 556 | 2,24 557 | 2,25 558 | 2,6 559 | 1,14 560 | 2,17 561 | 2,13 562 | 2,38 563 | 2,19 564 | 2,19 565 | 2,3 566 | 2,3 567 | 2,14 568 | 1,17 569 | 2,24 570 | 2,37 571 | 2,15 572 | 2,21 573 | 2,12 574 | 2,28 575 | 2,29 576 | 2,19 577 | 2,8 578 | 2,27 579 | 2,20 580 | 2,14 581 | 2,4 582 | 2,18 583 | 2,17 584 | 2,18 585 | 1,9 586 | 2,11 587 | 2,3 588 | 2,14 589 | 1,25 590 | 2,7 591 | 2,24 592 | 2,24 593 | 2,32 594 | 2,16 595 | 2,27 596 | 2,12 597 | 2,22 598 | 2,12 599 | 2,13 600 | 2,13 601 | 2,8 602 | 2,30 603 | 2,14 604 | 2,24 605 | 2,27 606 | 2,22 607 | 2,27 608 | 2,24 609 | 1,12 610 | 2,12 611 | 2,27 612 | 2,22 613 | 2,35 614 | 2,24 615 | 2,8 616 | 1,29 617 | 2,13 618 | 2,21 619 | 2,28 620 | 2,9 621 | 2,10 622 | 2,18 623 | 2,12 624 | 2,24 625 | 2,30 626 | 2,22 627 | 2,8 628 | 2,18 629 | 2,8 630 | 2,5 631 | 2,6 632 | 2,19 633 | 2,19 634 | 2,15 635 | 2,19 636 | 2,30 637 | 2,33 638 | 2,7 639 | 2,16 640 | 2,18 641 | 2,15 642 | 2,18 643 | 1,19 644 | 2,23 645 | 2,10 646 | 2,35 647 | 2,7 648 | 2,14 649 | 2,8 650 | 2,15 651 | 2,27 652 | 2,18 653 | 2,27 654 | 2,30 655 | 2,22 656 | 2,14 657 | 2,24 658 | 2,10 659 | 2,16 660 | 2,10 661 | 2,24 662 | 2,14 663 | 2,25 664 | 2,18 665 | 2,34 666 | 2,17 667 | 2,29 668 | 2,27 669 | 2,7 670 | 2,29 671 | 2,34 672 | 2,12 673 | 2,18 674 | 2,18 675 | 2,10 676 | 2,12 677 | 2,3 678 | 2,21 679 | 2,7 680 | 2,18 681 | 2,15 682 | 2,13 683 | 2,27 684 | 2,23 685 | 2,16 686 | 2,10 687 | 2,25 688 | 2,11 689 | 2,19 690 | 2,15 691 | 2,6 692 | 2,13 693 | 2,27 694 | 2,33 695 | 2,23 696 | 2,20 697 | 2,23 698 | 2,13 699 | 2,19 700 | 2,6 701 | 2,7 702 | 2,22 703 | 2,17 704 | 2,18 705 | 2,20 706 | 2,16 707 | 2,27 708 | 2,29 709 | 2,14 710 | 2,12 711 | 2,20 712 | 2,30 713 | 2,22 714 | 2,15 715 | 2,13 716 | 2,10 717 | 2,17 718 | 2,18 719 | 2,28 720 | 2,19 721 | 2,22 722 | 2,32 723 | 2,8 724 | 2,15 725 | 1,13 726 | 2,16 727 | 2,14 728 | 2,14 729 | 2,13 730 | 2,34 731 | 2,11 732 | 2,15 733 | 2,14 734 | 2,17 735 | 2,9 736 | 2,15 737 | 2,12 738 | 2,19 739 | 2,33 740 | 2,10 741 | 2,19 742 | 2,15 743 | 2,30 744 | 2,7 745 | 2,35 746 | 2,18 747 | 2,23 748 | 2,14 749 | 1,22 750 | 2,8 751 | 2,19 752 | 2,14 753 | 2,9 754 | 2,9 755 | 2,16 756 | 2,35 757 | 2,20 758 | 2,7 759 | 2,24 760 | 2,38 761 | 2,20 762 | 1,2 763 | 2,8 764 | 2,28 765 | 2,19 766 | 2,28 767 | 2,23 768 | 2,7 769 | 2,21 770 | 2,21 771 | 2,25 772 | 2,7 773 | 1,8 774 | 2,5 775 | 2,6 776 | 2,28 777 | 2,35 778 | 2,32 779 | 2,25 780 | 2,16 781 | 2,6 782 | 2,18 783 | 2,23 784 | 2,10 785 | 2,19 786 | 2,12 787 | 2,18 788 | 1,25 789 | 2,15 790 | 2,45 791 | 2,25 792 | 2,29 793 | 2,8 794 | 2,6 795 | 2,25 796 | 2,23 797 | 2,20 798 | 2,20 799 | 2,10 800 | 2,20 801 | 2,28 802 | 1,10 803 | 2,35 804 | 2,20 805 | 2,28 806 | 2,8 807 | 1,9 808 | 2,8 809 | 2,8 810 | 2,26 811 | 2,41 812 | 2,10 813 | 2,19 814 | 1,13 815 | 2,19 816 | 1,22 817 | 2,37 818 | 2,16 819 | 2,7 820 | 2,14 821 | 2,4 822 | 2,25 823 | 2,16 824 | 1,10 825 | 2,17 826 | 2,36 827 | 2,13 828 | 2,7 829 | 2,28 830 | 2,19 831 | 2,16 832 | 2,15 833 | 2,30 834 | 2,20 835 | 2,44 836 | 2,17 837 | 2,28 838 | 2,23 839 | 2,20 840 | 2,12 841 | 2,34 842 | 1,11 843 | 2,16 844 | 2,25 845 | 2,12 846 | 2,10 847 | 2,12 848 | 2,26 849 | 2,17 850 | 2,8 851 | 2,47 852 | 2,12 853 | 1,18 854 | 2,23 855 | 2,17 856 | 2,20 857 | 2,8 858 | 2,23 859 | 2,17 860 | 2,43 861 | 2,24 862 | 2,18 863 | 2,22 864 | 2,19 865 | 2,37 866 | 2,5 867 | 2,20 868 | 2,18 869 | 2,8 870 | 2,22 871 | 2,25 872 | 2,26 873 | 2,25 874 | 2,17 875 | 2,16 876 | 2,10 877 | 2,18 878 | 1,20 879 | 1,14 880 | 2,22 881 | 2,29 882 | 2,19 883 | 2,25 884 | 2,44 885 | 1,14 886 | 2,8 887 | 1,8 888 | 2,12 889 | 2,15 890 | 2,23 891 | 2,18 892 | 2,12 893 | 1,27 894 | 2,6 895 | 2,23 896 | 2,11 897 | 2,27 898 | 2,21 899 | 2,22 900 | 2,13 901 | 2,16 902 | 2,11 903 | 2,11 904 | 2,12 905 | 2,28 906 | 2,11 907 | 2,5 908 | 2,34 909 | 2,8 910 | 2,33 911 | 2,21 912 | 2,8 913 | 2,22 914 | 1,7 915 | 1,36 916 | 2,20 917 | 1,14 918 | 1,18 919 | 1,7 920 | 1,19 921 | 1,24 922 | 1,14 923 | 2,14 924 | 1,14 925 | 1,22 926 | 1,7 927 | 1,10 928 | 1,7 929 | 1,8 930 | 1,3 931 | 1,18 932 | 1,13 933 | 1,21 934 | 1,35 935 | 1,18 936 | 1,6 937 | 1,19 938 | 1,22 939 | 1,6 940 | 1,32 941 | 1,20 942 | 1,32 943 | 1,17 944 | 1,10 945 | 1,26 946 | 1,24 947 | 1,25 948 | 1,22 949 | 1,23 950 | 1,24 951 | 1,29 952 | 1,14 953 | 1,23 954 | 1,20 955 | 1,24 956 | 1,23 957 | 1,15 958 | 1,18 959 | 1,19 960 | 2,7 961 | 2,15 962 | 2,7 963 | 1,15 964 | 1,16 965 | 1,8 966 | 1,15 967 | 1,4 968 | 1,32 969 | 1,20 970 | 1,26 971 | 1,23 972 | 2,14 973 | 1,21 974 | 1,9 975 | 1,10 976 | 1,33 977 | 1,44 978 | 1,17 979 | 1,11 980 | 1,37 981 | 1,19 982 | 2,12 983 | 1,12 984 | 1,16 985 | 1,19 986 | 1,19 987 | 1,30 988 | 1,9 989 | 1,10 990 | 2,22 991 | 1,12 992 | 1,18 993 | 1,17 994 | 1,24 995 | 1,20 996 | 1,6 997 | 2,13 998 | 1,8 999 | 2,12 1000 | 1,18 1001 | 2,7 1002 | 1,25 1003 | 1,19 1004 | 2,11 1005 | 1,6 1006 | 1,7 1007 | 1,22 1008 | 1,22 1009 | 1,20 1010 | 1,14 1011 | 1,23 1012 | 1,29 1013 | 1,16 1014 | 1,6 1015 | 1,42 1016 | 1,17 1017 | 1,19 1018 | 1,26 1019 | 1,20 1020 | 1,6 1021 | 1,27 1022 | 2,17 1023 | 1,9 1024 | 1,18 1025 | 1,25 1026 | 1,9 1027 | 1,18 1028 | 1,26 1029 | 1,13 1030 | 1,7 1031 | 1,24 1032 | 1,15 1033 | 2,10 1034 | 1,34 1035 | 2,23 1036 | 1,20 1037 | 1,31 1038 | 1,14 1039 | 1,28 1040 | 1,6 1041 | 1,31 1042 | 1,14 1043 | 2,20 1044 | 1,13 1045 | 1,24 1046 | 1,24 1047 | 1,30 1048 | 1,12 1049 | 1,26 1050 | 1,23 1051 | 1,9 1052 | 1,15 1053 | 1,19 1054 | 2,23 1055 | 1,22 1056 | 1,13 1057 | 1,27 1058 | 1,28 1059 | 1,14 1060 | 1,10 1061 | 1,30 1062 | 1,11 1063 | 1,8 1064 | 1,9 1065 | 1,25 1066 | 1,29 1067 | 1,24 1068 | 1,8 1069 | 2,30 1070 | 1,34 1071 | 1,29 1072 | 1,7 1073 | 1,33 1074 | 1,34 1075 | 1,28 1076 | 1,31 1077 | 1,19 1078 | 1,14 1079 | 1,24 1080 | 1,40 1081 | 1,12 1082 | 1,34 1083 | 1,28 1084 | 1,23 1085 | 1,16 1086 | 1,21 1087 | 1,37 1088 | 1,28 1089 | 1,10 1090 | 1,22 1091 | 1,22 1092 | 1,20 1093 | 1,31 1094 | 1,21 1095 | 1,27 1096 | 1,29 1097 | 1,21 1098 | 1,24 1099 | 1,7 1100 | 1,31 1101 | 1,23 1102 | 1,6 1103 | 1,29 1104 | 1,20 1105 | 1,20 1106 | 1,27 1107 | 1,8 1108 | 1,33 1109 | 1,22 1110 | 1,19 1111 | 1,7 1112 | 1,9 1113 | 1,24 1114 | 1,28 1115 | 1,13 1116 | 1,6 1117 | 1,28 1118 | 1,16 1119 | 1,5 1120 | 1,36 1121 | 1,32 1122 | 1,36 1123 | 1,21 1124 | 1,35 1125 | 1,27 1126 | 1,25 1127 | 2,25 1128 | 1,17 1129 | 1,12 1130 | 2,24 1131 | 2,37 1132 | 1,38 1133 | 1,15 1134 | 1,31 1135 | 1,9 1136 | 1,8 1137 | 1,7 1138 | 1,6 1139 | 1,16 1140 | 1,29 1141 | 1,7 1142 | 1,17 1143 | 1,11 1144 | 1,24 1145 | 1,8 1146 | 1,21 1147 | 1,5 1148 | 1,28 1149 | 1,19 1150 | 1,11 1151 | 2,10 1152 | 1,23 1153 | 1,12 1154 | 1,8 1155 | 1,20 1156 | 1,17 1157 | 1,42 1158 | 1,24 1159 | 1,13 1160 | 1,29 1161 | 1,22 1162 | 1,18 1163 | 1,6 1164 | 1,27 1165 | 1,26 1166 | 1,27 1167 | 1,18 1168 | 1,6 1169 | 1,4 1170 | 1,21 1171 | 1,6 1172 | 1,17 1173 | 1,27 1174 | 1,27 1175 | 2,33 1176 | 1,15 1177 | 1,21 1178 | 1,13 1179 | 1,26 1180 | 1,16 1181 | 2,20 1182 | 1,13 1183 | 1,12 1184 | 1,10 1185 | 1,32 1186 | 1,12 1187 | 2,26 1188 | 1,21 1189 | 1,16 1190 | 1,41 1191 | 1,11 1192 | 1,23 1193 | 1,24 1194 | 1,8 1195 | 1,7 1196 | 1,18 1197 | 1,37 1198 | 1,12 1199 | 1,9 1200 | 1,24 1201 | 1,12 1202 | 1,9 1203 | 1,20 1204 | 1,8 1205 | 1,16 1206 | 1,17 1207 | 1,28 1208 | 1,11 1209 | 1,14 1210 | 2,34 1211 | 1,11 1212 | 1,8 1213 | 1,19 1214 | 2,28 1215 | 1,8 1216 | 1,11 1217 | 1,19 1218 | 1,12 1219 | 1,25 1220 | 1,23 1221 | 1,9 1222 | 1,24 1223 | 1,45 1224 | 1,23 1225 | 1,9 1226 | 1,37 1227 | 1,5 1228 | 1,8 1229 | 2,6 1230 | 1,6 1231 | 1,16 1232 | 1,37 1233 | 1,31 1234 | 1,12 1235 | 1,8 1236 | 1,9 1237 | 1,18 1238 | 1,7 1239 | 1,8 1240 | 1,20 1241 | 1,9 1242 | 1,20 1243 | 1,9 1244 | 1,26 1245 | 1,12 1246 | 1,22 1247 | 1,15 1248 | 1,34 1249 | 1,36 1250 | 1,26 1251 | 1,46 1252 | 1,3 1253 | 1,3 1254 | 1,3 1255 | 1,21 1256 | 1,15 1257 | 1,33 1258 | 1,6 1259 | 1,7 1260 | 1,31 1261 | 1,11 1262 | 1,24 1263 | 1,12 1264 | 1,19 1265 | 1,20 1266 | 1,24 1267 | 1,42 1268 | 1,24 1269 | 1,7 1270 | 1,9 1271 | 1,20 1272 | 1,25 1273 | 1,23 1274 | 1,34 1275 | 1,16 1276 | 1,4 1277 | 1,18 1278 | 1,17 1279 | 1,28 1280 | 1,11 1281 | 1,46 1282 | 1,27 1283 | 1,11 1284 | 1,14 1285 | 1,10 1286 | 1,15 1287 | 1,8 1288 | 1,22 1289 | 1,13 1290 | 1,23 1291 | 1,7 1292 | 1,9 1293 | 1,24 1294 | 1,22 1295 | 1,15 1296 | 1,21 1297 | 1,20 1298 | 1,20 1299 | 1,16 1300 | 1,19 1301 | 1,10 1302 | 1,29 1303 | 1,10 1304 | 1,9 1305 | 1,13 1306 | 2,13 1307 | 1,15 1308 | 1,19 1309 | 1,11 1310 | 1,11 1311 | 1,4 1312 | 1,24 1313 | 1,21 1314 | 1,14 1315 | 1,38 1316 | 1,6 1317 | 1,15 1318 | 1,20 1319 | 1,23 1320 | 1,5 1321 | 2,5 1322 | 1,38 1323 | 1,24 1324 | 1,11 1325 | 1,22 1326 | 1,21 1327 | 1,35 1328 | 1,7 1329 | 1,8 1330 | 1,40 1331 | 1,11 1332 | 1,15 1333 | 1,12 1334 | 1,10 1335 | 1,4 1336 | 1,19 1337 | 1,22 1338 | 1,38 1339 | 1,18 1340 | 1,11 1341 | 1,38 1342 | 1,16 1343 | 1,16 1344 | 1,20 1345 | 1,29 1346 | 1,30 1347 | 1,15 1348 | 1,19 1349 | 1,9 1350 | 1,23 1351 | 1,12 1352 | 2,8 1353 | 1,13 1354 | 1,14 1355 | 1,8 1356 | 1,15 1357 | 1,20 1358 | 1,13 1359 | 1,15 1360 | 1,25 1361 | 1,23 1362 | 1,6 1363 | 1,31 1364 | 1,26 1365 | 1,26 1366 | 2,40 1367 | 1,13 1368 | 1,20 1369 | 1,20 1370 | 1,33 1371 | 1,22 1372 | 1,21 1373 | 1,21 1374 | 1,18 1375 | 1,25 1376 | 1,14 1377 | 1,10 1378 | 1,12 1379 | 1,25 1380 | 1,35 1381 | 1,7 1382 | 1,13 1383 | 1,13 1384 | 1,10 1385 | 1,13 1386 | 1,9 1387 | 1,25 1388 | 1,19 1389 | 1,24 1390 | 1,32 1391 | 1,30 1392 | 2,42 1393 | 1,9 1394 | 1,2 1395 | 2,4 1396 | 1,13 1397 | 1,15 1398 | 1,18 1399 | 1,12 1400 | 1,32 1401 | 1,27 1402 | 1,23 1403 | 1,27 1404 | 1,34 1405 | 1,32 1406 | 1,13 1407 | 1,15 1408 | 1,23 1409 | 1,13 1410 | 1,14 1411 | 1,17 1412 | 1,27 1413 | 2,26 1414 | 1,11 1415 | 1,31 1416 | 1,18 1417 | 1,17 1418 | 1,25 1419 | 1,5 1420 | 1,11 1421 | 1,12 1422 | 1,24 1423 | 1,29 1424 | 2,9 1425 | 1,6 1426 | 1,10 1427 | 1,16 1428 | 1,10 1429 | 1,6 1430 | 1,19 1431 | 1,32 1432 | 1,15 1433 | 1,18 1434 | 1,13 1435 | 1,14 1436 | 1,11 1437 | 1,27 1438 | 1,41 1439 | 1,19 1440 | 1,36 1441 | 1,7 1442 | 1,37 1443 | 1,31 1444 | 1,14 1445 | 1,7 1446 | 2,34 1447 | 1,17 1448 | 1,13 1449 | 1,14 1450 | 1,17 1451 | 1,13 1452 | 1,24 1453 | 1,15 1454 | 1,5 1455 | 1,42 1456 | 1,14 1457 | 1,25 1458 | 2,8 1459 | 1,7 1460 | 1,2 1461 | 1,15 1462 | 1,11 1463 | 1,8 1464 | 1,35 1465 | 1,32 1466 | 2,12 1467 | 1,14 1468 | 2,27 1469 | 1,17 1470 | 1,9 1471 | 1,32 1472 | 1,20 1473 | 1,36 1474 | 1,17 1475 | 1,28 1476 | 1,14 1477 | 1,12 1478 | 2,17 1479 | 1,22 1480 | 1,11 1481 | 2,8 1482 | 1,13 1483 | 1,25 1484 | 1,33 1485 | 1,20 1486 | 1,12 1487 | 1,17 1488 | 1,29 1489 | 1,25 1490 | 1,21 1491 | 1,14 1492 | 1,26 1493 | 1,25 1494 | 1,15 1495 | 1,10 1496 | 1,24 1497 | 1,7 1498 | 1,9 1499 | 1,22 1500 | 1,34 1501 | 1,15 1502 | 1,28 1503 | 1,18 1504 | 1,12 1505 | 1,14 1506 | 1,15 1507 | 1,21 1508 | 1,8 1509 | 1,12 1510 | 1,20 1511 | 1,10 1512 | 1,10 1513 | 1,24 1514 | 1,13 1515 | 1,11 1516 | 1,35 1517 | 1,20 1518 | 1,6 1519 | 1,5 1520 | 1,23 1521 | 2,12 1522 | 1,20 1523 | 1,18 1524 | 1,19 1525 | 1,8 1526 | 1,12 1527 | 1,16 1528 | 1,29 1529 | 1,17 1530 | 1,28 1531 | 1,8 1532 | 1,32 1533 | 1,17 1534 | 1,27 1535 | 1,7 1536 | 1,17 1537 | 1,11 1538 | 1,22 1539 | 1,28 1540 | 1,14 1541 | 1,27 1542 | 1,11 1543 | 1,23 1544 | 1,11 1545 | 1,15 1546 | 1,13 1547 | 1,24 1548 | 1,26 1549 | 1,18 1550 | 1,37 1551 | 1,21 1552 | 1,29 1553 | 1,32 1554 | 1,8 1555 | 1,20 1556 | 1,29 1557 | 2,15 1558 | 1,15 1559 | 1,15 1560 | 1,8 1561 | 1,8 1562 | 1,35 1563 | 1,16 1564 | 1,41 1565 | 1,15 1566 | 1,22 1567 | 1,3 1568 | 1,11 1569 | 1,18 1570 | 1,17 1571 | 1,28 1572 | 1,14 1573 | 1,32 1574 | 1,13 1575 | 1,33 1576 | 1,19 1577 | 1,21 1578 | 1,25 1579 | 1,11 1580 | 1,6 1581 | 1,31 1582 | 1,17 1583 | 1,24 1584 | 2,8 1585 | 1,12 1586 | 1,17 1587 | 2,15 1588 | 1,29 1589 | 1,18 1590 | 1,27 1591 | 1,27 1592 | 1,28 1593 | 1,19 1594 | 1,24 1595 | 1,8 1596 | 1,17 1597 | 2,12 1598 | 1,14 1599 | 1,22 1600 | 1,13 1601 | 1,24 1602 | 1,20 1603 | 1,23 1604 | 1,20 1605 | 2,7 1606 | 1,11 1607 | 1,19 1608 | 1,13 1609 | 1,27 1610 | 1,31 1611 | 1,26 1612 | 1,16 1613 | 1,10 1614 | 1,25 1615 | 1,19 1616 | 1,39 1617 | 1,9 1618 | 1,15 1619 | 1,7 1620 | 1,15 1621 | 1,31 1622 | 1,20 1623 | 1,11 1624 | 1,7 1625 | 1,40 1626 | 1,16 1627 | 1,17 1628 | 1,8 1629 | 1,28 1630 | 1,11 1631 | 1,24 1632 | 1,26 1633 | 1,20 1634 | 1,10 1635 | 1,10 1636 | 1,27 1637 | 1,5 1638 | 1,26 1639 | 1,31 1640 | 1,22 1641 | 1,23 1642 | 1,10 1643 | 1,10 1644 | 1,29 1645 | 1,25 1646 | 1,30 1647 | 1,25 1648 | 1,33 1649 | 1,24 1650 | 1,19 1651 | 1,30 1652 | 1,18 1653 | 1,17 1654 | 1,5 1655 | 1,6 1656 | 1,24 1657 | 1,4 1658 | 1,29 1659 | 1,15 1660 | 1,19 1661 | 1,25 1662 | 1,19 1663 | 1,17 1664 | 1,6 1665 | 1,10 1666 | 2,24 1667 | 1,8 1668 | 1,25 1669 | 2,19 1670 | 1,17 1671 | 1,19 1672 | 1,16 1673 | 1,12 1674 | 1,5 1675 | 1,23 1676 | 1,18 1677 | 1,25 1678 | 1,14 1679 | 1,23 1680 | 2,23 1681 | 1,14 1682 | 1,18 1683 | 1,14 1684 | 1,12 1685 | 1,20 1686 | 1,9 1687 | 1,32 1688 | 1,5 1689 | 1,19 1690 | 1,21 1691 | 1,23 1692 | 1,20 1693 | 1,7 1694 | 1,18 1695 | 1,18 1696 | 1,16 1697 | 2,12 1698 | 1,27 1699 | 1,26 1700 | 1,20 1701 | 1,11 1702 | 1,11 1703 | 1,16 1704 | 1,33 1705 | 1,19 1706 | 2,14 1707 | 1,27 1708 | 1,18 1709 | 1,15 1710 | 1,19 1711 | 1,24 1712 | 1,30 1713 | 1,26 1714 | 1,33 1715 | 1,17 1716 | 1,21 1717 | 2,25 1718 | 1,9 1719 | 1,12 1720 | 1,35 1721 | 1,7 1722 | 1,6 1723 | 1,9 1724 | 1,28 1725 | 1,12 1726 | 1,29 1727 | 1,23 1728 | 1,26 1729 | 1,10 1730 | 1,27 1731 | 1,22 1732 | 1,23 1733 | 1,16 1734 | 1,11 1735 | 1,19 1736 | 1,24 1737 | 1,7 1738 | 1,24 1739 | 1,22 1740 | 1,17 1741 | 1,25 1742 | 1,10 1743 | 2,28 1744 | 1,30 1745 | 1,23 1746 | 1,8 1747 | 1,11 1748 | 1,26 1749 | 1,17 1750 | 1,16 1751 | 1,8 1752 | 1,34 1753 | 1,6 1754 | 1,6 1755 | 1,14 1756 | 1,15 1757 | 1,15 1758 | 1,16 1759 | 1,21 1760 | 1,24 1761 | 1,17 1762 | 1,25 1763 | 1,18 1764 | 1,7 1765 | 1,7 1766 | 1,11 1767 | 1,23 1768 | 1,20 1769 | 1,19 1770 | 1,29 1771 | 1,23 1772 | 1,19 1773 | 1,4 1774 | 1,13 1775 | 1,21 1776 | 1,27 1777 | 1,35 1778 | 1,18 1779 | 1,9 1780 | 1,12 1781 | 1,12 1782 | 1,17 1783 | 1,7 1784 | 1,8 1785 | 1,27 1786 | 1,23 1787 | 1,16 1788 | 1,22 1789 | 1,22 1790 | 1,13 1791 | 2,15 1792 | 2,31 1793 | 1,28 1794 | 1,20 1795 | 1,11 1796 | 1,19 1797 | 1,20 1798 | 1,9 1799 | 1,39 1800 | 1,13 1801 | 1,14 1802 | 1,16 1803 | 1,14 1804 | 1,13 1805 | 1,16 1806 | 1,48 1807 | 1,11 1808 | 1,32 1809 | 1,17 1810 | 1,21 1811 | 1,23 1812 | 1,9 1813 | 1,20 1814 | 1,9 1815 | 1,17 1816 | 1,4 1817 | 2,3 1818 | 2,8 1819 | 2,7 1820 | 2,7 1821 | 1,5 1822 | --------------------------------------------------------------------------------