├── .gitignore ├── README.md ├── load_sst_data.py ├── main.py ├── models ├── __init__.py ├── dynamic.py ├── dynamic2.py ├── fakedynamic.py ├── fakestatic.py ├── static.py └── static2.py ├── test_embeddings.txt ├── test_trees.txt ├── tests.py └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Data Dependencies: 2 | 3 | - SST 4 | - 50d GloVe embeddings 5 | 6 | Python Dependencies: 7 | 8 | - Python2.7 9 | - Pytorch 10 | - Numpy 11 | - gFlags (python-gflags==2.0) 12 | 13 | To run: 14 | 15 | ``` 16 | python main.py 17 | ``` 18 | 19 | 20 | -------------------------------------------------------------------------------- /load_sst_data.py: -------------------------------------------------------------------------------- 1 | LABEL_MAP = { 2 | "0": 0, 3 | "1": 1, 4 | "2": 2, 5 | "3": 3, 6 | "4": 4, 7 | } 8 | 9 | 10 | class Example(object): 11 | label = None 12 | sentence = None 13 | tokens = None 14 | transitions = None 15 | 16 | def __repr__(self): 17 | return str(self.__dict__) 18 | 19 | 20 | def convert_unary_binary_bracketed_data(filename, binary=False): 21 | # Build a binary tree out of a binary parse in which every 22 | # leaf node is wrapped as a unary constituent, as here: 23 | # (4 (2 (2 The ) (2 actors ) ) (3 (4 (2 are ) (3 fantastic ) ) (2 . ) ) ) 24 | examples = [] 25 | vocab = set() 26 | with open(filename, 'r') as f: 27 | for line in f: 28 | example = Example() 29 | line = line.strip() 30 | line = line.replace(')', ' )') 31 | if len(line) == 0: 32 | continue 33 | example.label = int(line[1]) 34 | 35 | if binary: 36 | label = example.label 37 | if label < 2: 38 | example.label = 0 39 | elif label > 2: 40 | example.label = 1 41 | else: 42 | continue 43 | 44 | example.sentence = line 45 | example.tokens = [] 46 | example.transitions = [] 47 | 48 | words = example.sentence.split(' ') 49 | for index, word in enumerate(words): 50 | if word[0] != "(": 51 | if word == ")": 52 | # Ignore unary merges 53 | if words[index - 1] == ")": 54 | example.transitions.append(1) 55 | else: 56 | # Downcase all words to match GloVe. 57 | w = word.lower() 58 | example.tokens.append(w) 59 | vocab.add(w) 60 | example.transitions.append(0) 61 | examples.append(example) 62 | return examples, vocab 63 | 64 | def load_data(filename): 65 | return convert_unary_binary_bracketed_data(filename, binary=True) 66 | 67 | if __name__ == '__main__': 68 | import sys 69 | 70 | def get(l, idx, default): 71 | try: 72 | return l[idx] 73 | except IndexError: 74 | return default 75 | 76 | path = get(sys.argv, 1, 'trees/dev.txt') 77 | binary = int(get(sys.argv, 2, '0')) == 1 78 | examples, vocab = convert_unary_binary_bracketed_data(path, binary) 79 | print(examples[0]) 80 | print(len(vocab)) 81 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Dynamic Batched RNNs 4 | ==================== 5 | 6 | A. Static - Straightforward RNN. 7 | B. Static2 - RNN using an RNNCell in a loop. 8 | C. Dynamic - RNN that has dynamically sized input at each timestep. No padding. 9 | D. Dynamic2 - RNN that has dynamically sized input at each timestep. Uses padding. 10 | 11 | 12 | Speed Ranking (CPU) 13 | ============= 14 | 15 | 1. Static/Static2 - Similar performance. Surprisingly faster than dynamic alternatives. 16 | 2. Dynamic 17 | 3. Dynamic2 18 | 19 | """ 20 | 21 | import load_sst_data 22 | import pprint 23 | import utils 24 | import time 25 | import sys 26 | import numpy as np 27 | import gflags 28 | 29 | import models 30 | from utils import Accumulator, Args, make_batch 31 | 32 | # PyTorch 33 | import torch 34 | import torch.nn as nn 35 | from torch.autograd import Variable 36 | import torch.nn.functional as F 37 | import torch.optim as optim 38 | 39 | 40 | FLAGS = gflags.FLAGS 41 | args = Args() 42 | 43 | gflags.DEFINE_enum("style", "dynamic", ["static", "static2", "dynamic", "dynamic2", "fakedynamic", "fakestatic"], 44 | "Specify dynamic or static RNN loops.") 45 | gflags.DEFINE_boolean("smart_batching", True, "Bucket batches for similar length.") 46 | 47 | # Parse command line flags. 48 | FLAGS(sys.argv) 49 | 50 | # Set args. 51 | args.training_data_path = 'trees/dev.txt' 52 | args.eval_data_path = 'trees/dev.txt' 53 | args.embedding_data_path = 'glove.6B.50d.txt' 54 | args.word_embedding_dim = 50 55 | args.model_dim = 100 56 | args.mlp_dim = 256 57 | args.batch_size = 32 58 | args.lr = 0.0001 59 | args.max_training_steps = 50000 60 | args.eval_interval_steps = 100 61 | args.statistics_interval_steps = 100 62 | args.num_classes = 2 63 | 64 | args.__dict__.update(FLAGS.FlagValuesDict()) 65 | 66 | pp = pprint.PrettyPrinter(indent=4) 67 | 68 | print("Args: {}".format(pp.pformat(args.__dict__))) 69 | 70 | # Specify data loader. 71 | data_manager = load_sst_data 72 | 73 | # Load data. 74 | training_data, training_vocab = data_manager.load_data(args.training_data_path) 75 | eval_data, eval_vocab = data_manager.load_data(args.eval_data_path) 76 | 77 | # Load embeddings. 78 | vocab = set.union(training_vocab, eval_vocab) 79 | vocab = utils.BuildVocabularyForTextEmbeddingFile( 80 | args.embedding_data_path, vocab, utils.CORE_VOCABULARY) 81 | initial_embeddings = utils.LoadEmbeddingsFromText( 82 | vocab, args.word_embedding_dim, args.embedding_data_path) 83 | 84 | # Tokenize data. 85 | training_data = utils.Tokenize(training_data, vocab) 86 | eval_data = utils.Tokenize(eval_data, vocab) 87 | 88 | # Create iterators. 89 | training_iter = utils.MakeDataIterator(training_data, args.batch_size, smart_batching=args.smart_batching, forever=True)() 90 | eval_iter = utils.MakeDataIterator(eval_data, args.batch_size, smart_batching=args.smart_batching, forever=False) 91 | 92 | # Pick model. 93 | Net = getattr(models, args.style) 94 | 95 | # Init model. 96 | model = Net( 97 | model_dim=args.model_dim, 98 | mlp_dim=args.mlp_dim, 99 | num_classes=args.num_classes, 100 | word_embedding_dim=args.word_embedding_dim, 101 | initial_embeddings=initial_embeddings, 102 | ) 103 | 104 | # Init optimizer. 105 | optimizer = optim.Adam(model.parameters(), lr=args.lr, betas=(0.9, 0.999)) 106 | 107 | print(model) 108 | print("Total Params: {}".format(sum(torch.numel(p.data) for p in model.parameters()))) 109 | 110 | A = Accumulator() 111 | 112 | # Train loop. 113 | for step in range(args.max_training_steps): 114 | 115 | start = time.time() 116 | 117 | data, target, lengths = make_batch(next(training_iter), args.style == "dynamic") 118 | 119 | model.train() 120 | optimizer.zero_grad() 121 | y = model(data, lengths) 122 | loss = F.nll_loss(y, Variable(target, volatile=False)) 123 | loss.backward() 124 | optimizer.step() 125 | 126 | pred = y.data.max(1)[1] 127 | acc = pred.eq(target).sum() / float(args.batch_size) 128 | 129 | end = time.time() 130 | 131 | avg_time = (end - start) / float(args.batch_size) 132 | 133 | A.add('time', avg_time) 134 | A.add('acc', acc) 135 | A.add('loss', loss.data[0]) 136 | 137 | if step % args.statistics_interval_steps == 0: 138 | print("Step: {} Acc: {:.5} Loss: {:.5} Time: {:.5}".format(step, 139 | A.get_avg('acc'), 140 | A.get_avg('loss'), 141 | A.get_avg('time'), 142 | )) 143 | 144 | if step % args.eval_interval_steps == 0: 145 | 146 | accum_acc = [] 147 | accum_loss = [] 148 | accum_time = [] 149 | 150 | # Eval loop. 151 | for batch in eval_iter(): 152 | start = time.time() 153 | 154 | data, target, lengths = make_batch(batch, args.style == "dynamic") 155 | 156 | model.eval() 157 | optimizer.zero_grad() 158 | y = model(data, lengths) 159 | pred = y.data.max(1)[1] 160 | acc = pred.eq(target).sum() / float(args.batch_size) 161 | loss = F.nll_loss(y, Variable(target, volatile=False)) 162 | 163 | end = time.time() 164 | 165 | avg_time = (end - start) / float(args.batch_size) 166 | 167 | accum_acc.append(acc) 168 | accum_loss.append(loss.data[0]) 169 | accum_time.append(avg_time) 170 | 171 | end = time.time() 172 | avg_acc = np.array(accum_acc).mean() 173 | avg_loss = np.array(accum_loss).mean() 174 | avg_time = np.array(accum_time).mean() 175 | print("Step: {} Eval Acc: {:.5} Loss: {:.5} Time: {:.5}".format(step, 176 | avg_acc, 177 | avg_loss, 178 | avg_time, 179 | )) 180 | -------------------------------------------------------------------------------- /models/__init__.py: -------------------------------------------------------------------------------- 1 | from dynamic import Net as dynamic 2 | from dynamic2 import Net as dynamic2 3 | from fakedynamic import Net as fakedynamic 4 | from fakestatic import Net as fakestatic 5 | from static import Net as static 6 | from static2 import Net as static2 -------------------------------------------------------------------------------- /models/dynamic.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # PyTorch 4 | import torch 5 | import torch.nn as nn 6 | from torch.autograd import Variable 7 | import torch.nn.functional as F 8 | import torch.optim as optim 9 | 10 | class Net(nn.Module): 11 | def __init__(self, 12 | model_dim=None, 13 | mlp_dim=None, 14 | num_classes=None, 15 | word_embedding_dim=None, 16 | initial_embeddings=None, 17 | **kwargs): 18 | super(Net, self).__init__() 19 | self.model_dim = model_dim 20 | self.initial_embeddings = initial_embeddings 21 | self.rnn = nn.RNNCell(word_embedding_dim, model_dim) 22 | self.l0 = nn.Linear(model_dim, mlp_dim) 23 | self.l1 = nn.Linear(mlp_dim, num_classes) 24 | 25 | def forward(self, x, lengths): 26 | batch_size = len(x) 27 | lengths = [len(s) for s in x] 28 | 29 | outputs = [Variable(torch.zeros(1, self.model_dim).float(), volatile=not self.training) 30 | for _ in range(batch_size)] 31 | 32 | for t in range(max(lengths)): 33 | batch = [] 34 | h = [] 35 | idx = [] 36 | for i, (s, l) in enumerate(zip(x, lengths)): 37 | if l >= max(lengths) - t: 38 | batch.append(s.pop()) 39 | h.append(outputs[i]) 40 | idx.append(i) 41 | 42 | batch = np.concatenate(np.array(batch).reshape(-1, 1), 0) 43 | emb = Variable(torch.from_numpy(self.initial_embeddings.take(batch, 0)), volatile=not self.training) 44 | h = torch.cat(h, 0) 45 | h_next = self.rnn(emb, h) 46 | h_next = torch.chunk(h_next, len(idx)) 47 | 48 | for i, o in zip(idx, h_next): 49 | outputs[i] = o 50 | 51 | outputs = torch.cat(outputs, 0) 52 | h = F.relu(self.l0(F.dropout(outputs, 0.5, self.training))) 53 | h = F.relu(self.l1(F.dropout(h, 0.5, self.training))) 54 | y = F.log_softmax(h) 55 | return y 56 | 57 | 58 | -------------------------------------------------------------------------------- /models/dynamic2.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # PyTorch 4 | import torch 5 | import torch.nn as nn 6 | from torch.autograd import Variable 7 | import torch.nn.functional as F 8 | import torch.optim as optim 9 | 10 | class Net(nn.Module): 11 | def __init__(self, 12 | model_dim=None, 13 | mlp_dim=None, 14 | num_classes=None, 15 | word_embedding_dim=None, 16 | initial_embeddings=None, 17 | **kwargs): 18 | super(Net, self).__init__() 19 | self.model_dim = model_dim 20 | self.initial_embeddings = initial_embeddings 21 | self.rnn = nn.RNNCell(word_embedding_dim, model_dim) 22 | self.l0 = nn.Linear(model_dim, mlp_dim) 23 | self.l1 = nn.Linear(mlp_dim, num_classes) 24 | 25 | def forward(self, x, lengths): 26 | batch_size = x.size(0) 27 | max_len = max(lengths) 28 | 29 | emb = Variable(torch.from_numpy( 30 | self.initial_embeddings.take(x.numpy(), 0)), 31 | volatile=not self.training) 32 | 33 | outputs = [Variable(torch.zeros(batch_size, self.model_dim).float(), volatile=not self.training)] 34 | 35 | for t in range(max_len): 36 | choose = torch.ByteTensor(batch_size) 37 | indices = [] 38 | not_indices = [] 39 | for i, l in enumerate(lengths): 40 | if l >= max(lengths) - t: 41 | indices.append(i) 42 | choose[i] = 1 43 | else: 44 | not_indices.append(i) 45 | choose[i] = 0 46 | 47 | # Build batch. 48 | batch = torch.index_select(emb[:,t,:], 0, Variable(torch.LongTensor(indices), volatile=not self.training)) 49 | h_prev = torch.index_select(outputs[-1], 0, Variable(torch.LongTensor(indices), volatile=not self.training)) 50 | h_next = self.rnn(batch, h_prev) 51 | 52 | # Some preparation for output for next step. 53 | if len(not_indices) > 0: 54 | not_h_prev = torch.index_select(outputs[-1], 0, Variable(torch.LongTensor(not_indices), volatile=not self.training)) 55 | _not_h_prev = torch.chunk(not_h_prev, len(not_indices)) 56 | _h_next = torch.chunk(h_next, len(indices)) 57 | 58 | # Make variable for next step. 59 | _h = [] 60 | _h_next_idx = 0 61 | _not_h_prev_idx = 0 62 | for c in choose: 63 | if c == 1: 64 | _h.append(_h_next[_h_next_idx]) 65 | _h_next_idx += 1 66 | else: 67 | _h.append(_not_h_prev[_not_h_prev_idx]) 68 | _not_h_prev_idx += 1 69 | h = torch.cat(_h, 0) 70 | 71 | outputs.append(h) 72 | 73 | hn = outputs[-1] 74 | h = F.relu(self.l0(F.dropout(hn, 0.5, self.training))) 75 | h = F.relu(self.l1(F.dropout(h, 0.5, self.training))) 76 | y = F.log_softmax(h) 77 | return y 78 | -------------------------------------------------------------------------------- /models/fakedynamic.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # PyTorch 4 | import torch 5 | import torch.nn as nn 6 | from torch.autograd import Variable 7 | import torch.nn.functional as F 8 | import torch.optim as optim 9 | 10 | class Net(nn.Module): 11 | def __init__(self, 12 | model_dim=None, 13 | mlp_dim=None, 14 | num_classes=None, 15 | word_embedding_dim=None, 16 | initial_embeddings=None, 17 | **kwargs): 18 | super(Net, self).__init__() 19 | self.word_embedding_dim = word_embedding_dim 20 | self.model_dim = model_dim 21 | self.initial_embeddings = initial_embeddings 22 | self.rnn = nn.RNNCell(word_embedding_dim, model_dim) 23 | self.l0 = nn.Linear(model_dim, mlp_dim) 24 | self.l1 = nn.Linear(mlp_dim, num_classes) 25 | 26 | def forward(self, x, lengths): 27 | batch_size = x.size(0) 28 | max_len = max(lengths) 29 | 30 | emb = Variable(torch.from_numpy( 31 | self.initial_embeddings.take(x.numpy(), 0)), 32 | volatile=not self.training) 33 | 34 | for t in range(max_len): 35 | indices = [] 36 | for i, l in enumerate(lengths): 37 | if l >= max(lengths) - t: 38 | indices.append(i) 39 | 40 | # Build batch. 41 | dynamic_batch_size = len(indices) 42 | inp = Variable(torch.FloatTensor(dynamic_batch_size, self.word_embedding_dim), volatile=not self.training) 43 | h = Variable(torch.FloatTensor(dynamic_batch_size, self.model_dim), volatile=not self.training) 44 | output = self.rnn(inp, h) 45 | 46 | hn = output 47 | h = F.relu(self.l0(F.dropout(hn.squeeze(), 0.5, self.training))) 48 | h = F.relu(self.l1(F.dropout(h, 0.5, self.training))) 49 | y = F.log_softmax(h) 50 | return y 51 | 52 | 53 | -------------------------------------------------------------------------------- /models/fakestatic.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # PyTorch 4 | import torch 5 | import torch.nn as nn 6 | from torch.autograd import Variable 7 | import torch.nn.functional as F 8 | import torch.optim as optim 9 | 10 | class Net(nn.Module): 11 | def __init__(self, 12 | model_dim=None, 13 | mlp_dim=None, 14 | num_classes=None, 15 | word_embedding_dim=None, 16 | initial_embeddings=None, 17 | **kwargs): 18 | super(Net, self).__init__() 19 | self.word_embedding_dim = word_embedding_dim 20 | self.model_dim = model_dim 21 | self.initial_embeddings = initial_embeddings 22 | self.rnn = nn.RNN(word_embedding_dim, model_dim, batch_first=True) 23 | self.l0 = nn.Linear(model_dim, mlp_dim) 24 | self.l1 = nn.Linear(mlp_dim, num_classes) 25 | 26 | def forward(self, x, lengths): 27 | batch_size = x.size(0) 28 | max_len = max(lengths) 29 | 30 | emb = Variable(torch.from_numpy( 31 | self.initial_embeddings.take(x.numpy(), 0)), 32 | volatile=not self.training) 33 | inp = Variable(torch.FloatTensor(emb.size()), volatile=not self.training) 34 | h0 = Variable(torch.FloatTensor(1, batch_size, self.model_dim), volatile=not self.training) 35 | 36 | _, hn = self.rnn(emb, h0) 37 | 38 | h = F.relu(self.l0(F.dropout(hn.squeeze(), 0.5, self.training))) 39 | h = F.relu(self.l1(F.dropout(h, 0.5, self.training))) 40 | y = F.log_softmax(h) 41 | return y 42 | 43 | -------------------------------------------------------------------------------- /models/static.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # PyTorch 4 | import torch 5 | import torch.nn as nn 6 | from torch.autograd import Variable 7 | import torch.nn.functional as F 8 | import torch.optim as optim 9 | 10 | class Net(nn.Module): 11 | def __init__(self, 12 | model_dim=None, 13 | mlp_dim=None, 14 | num_classes=None, 15 | word_embedding_dim=None, 16 | initial_embeddings=None, 17 | **kwargs): 18 | super(Net, self).__init__() 19 | self.model_dim = model_dim 20 | self.initial_embeddings = initial_embeddings 21 | self.rnn = nn.RNN(word_embedding_dim, model_dim, batch_first=True) 22 | self.l0 = nn.Linear(model_dim, mlp_dim) 23 | self.l1 = nn.Linear(mlp_dim, num_classes) 24 | 25 | def forward(self, x, lengths): 26 | batch_size = x.size(0) 27 | 28 | emb = Variable(torch.from_numpy( 29 | self.initial_embeddings.take(x.numpy(), 0)), 30 | volatile=not self.training) 31 | h0 = Variable(torch.zeros(1, batch_size, self.model_dim), volatile=not self.training) 32 | 33 | _, hn = self.rnn(emb, h0) 34 | 35 | h = F.relu(self.l0(F.dropout(hn.squeeze(), 0.5, self.training))) 36 | h = F.relu(self.l1(F.dropout(h, 0.5, self.training))) 37 | y = F.log_softmax(h) 38 | return y 39 | -------------------------------------------------------------------------------- /models/static2.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # PyTorch 4 | import torch 5 | import torch.nn as nn 6 | from torch.autograd import Variable 7 | import torch.nn.functional as F 8 | import torch.optim as optim 9 | 10 | class Net(nn.Module): 11 | def __init__(self, 12 | model_dim=None, 13 | mlp_dim=None, 14 | num_classes=None, 15 | word_embedding_dim=None, 16 | initial_embeddings=None, 17 | **kwargs): 18 | super(Net, self).__init__() 19 | self.model_dim = model_dim 20 | self.initial_embeddings = initial_embeddings 21 | self.rnn = nn.RNNCell(word_embedding_dim, model_dim) 22 | self.l0 = nn.Linear(model_dim, mlp_dim) 23 | self.l1 = nn.Linear(mlp_dim, num_classes) 24 | 25 | def forward(self, x, lengths): 26 | batch_size, seq_length = x.size()[:2] 27 | 28 | emb = Variable(torch.from_numpy( 29 | self.initial_embeddings.take(x.numpy(), 0)), 30 | volatile=not self.training) 31 | h = Variable(torch.zeros(batch_size, self.model_dim), volatile=not self.training) 32 | 33 | for t in range(seq_length): 34 | inp = emb[:,t,:] 35 | h = self.rnn(inp, h) 36 | 37 | h = F.relu(self.l0(F.dropout(h.squeeze(), 0.5, self.training))) 38 | h = F.relu(self.l1(F.dropout(h, 0.5, self.training))) 39 | y = F.log_softmax(h) 40 | return y 41 | -------------------------------------------------------------------------------- /test_embeddings.txt: -------------------------------------------------------------------------------- 1 | the 0.418 0.24968 -0.41242 0.1217 0.34527 -0.044457 -0.49688 -0.17862 -0.00066023 -0.6566 0.27843 -0.14767 -0.55677 0.14658 -0.0095095 0.011658 0.10204 -0.12792 -0.8443 -0.12181 -0.016801 -0.33279 -0.1552 -0.23131 -0.19181 -1.8823 -0.76746 0.099051 -0.42125 -0.19526 4.0071 -0.18594 -0.52287 -0.31681 0.00059213 0.0074449 0.17778 -0.15897 0.012041 -0.054223 -0.29871 -0.15749 -0.34758 -0.045637 -0.44251 0.18785 0.0027849 -0.18411 -0.11514 -0.78581 2 | , 0.013441 0.23682 -0.16899 0.40951 0.63812 0.47709 -0.42852 -0.55641 -0.364 -0.23938 0.13001 -0.063734 -0.39575 -0.48162 0.23291 0.090201 -0.13324 0.078639 -0.41634 -0.15428 0.10068 0.48891 0.31226 -0.1252 -0.037512 -1.5179 0.12612 -0.02442 -0.042961 -0.28351 3.5416 -0.11956 -0.014533 -0.1499 0.21864 -0.33412 -0.13872 0.31806 0.70358 0.44858 -0.080262 0.63003 0.32111 -0.46765 0.22786 0.36034 -0.37818 -0.56657 0.044691 0.30392 3 | . 0.15164 0.30177 -0.16763 0.17684 0.31719 0.33973 -0.43478 -0.31086 -0.44999 -0.29486 0.16608 0.11963 -0.41328 -0.42353 0.59868 0.28825 -0.11547 -0.041848 -0.67989 -0.25063 0.18472 0.086876 0.46582 0.015035 0.043474 -1.4671 -0.30384 -0.023441 0.30589 -0.21785 3.746 0.0042284 -0.18436 -0.46209 0.098329 -0.11907 0.23919 0.1161 0.41705 0.056763 -6.3681e-05 0.068987 0.087939 -0.10285 -0.13931 0.22314 -0.080803 -0.35652 0.016413 0.10216 4 | of 0.70853 0.57088 -0.4716 0.18048 0.54449 0.72603 0.18157 -0.52393 0.10381 -0.17566 0.078852 -0.36216 -0.11829 -0.83336 0.11917 -0.16605 0.061555 -0.012719 -0.56623 0.013616 0.22851 -0.14396 -0.067549 -0.38157 -0.23698 -1.7037 -0.86692 -0.26704 -0.2589 0.1767 3.8676 -0.1613 -0.13273 -0.68881 0.18444 0.0052464 -0.33874 -0.078956 0.24185 0.36576 -0.34727 0.28483 0.075693 -0.062178 -0.38988 0.22902 -0.21617 -0.22562 -0.093918 -0.80375 5 | to 0.68047 -0.039263 0.30186 -0.17792 0.42962 0.032246 -0.41376 0.13228 -0.29847 -0.085253 0.17118 0.22419 -0.10046 -0.43653 0.33418 0.67846 0.057204 -0.34448 -0.42785 -0.43275 0.55963 0.10032 0.18677 -0.26854 0.037334 -2.0932 0.22171 -0.39868 0.20912 -0.55725 3.8826 0.47466 -0.95658 -0.37788 0.20869 -0.32752 0.12751 0.088359 0.16351 -0.21634 -0.094375 0.018324 0.21048 -0.03088 -0.19722 0.082279 -0.09434 -0.073297 -0.064699 -0.26044 6 | and 0.26818 0.14346 -0.27877 0.016257 0.11384 0.69923 -0.51332 -0.47368 -0.33075 -0.13834 0.2702 0.30938 -0.45012 -0.4127 -0.09932 0.038085 0.029749 0.10076 -0.25058 -0.51818 0.34558 0.44922 0.48791 -0.080866 -0.10121 -1.3777 -0.10866 -0.23201 0.012839 -0.46508 3.8463 0.31362 0.13643 -0.52244 0.3302 0.33707 -0.35601 0.32431 0.12041 0.3512 -0.069043 0.36885 0.25168 -0.24517 0.25381 0.1367 -0.31178 -0.6321 -0.25028 -0.38097 7 | in 0.33042 0.24995 -0.60874 0.10923 0.036372 0.151 -0.55083 -0.074239 -0.092307 -0.32821 0.09598 -0.82269 -0.36717 -0.67009 0.42909 0.016496 -0.23573 0.12864 -1.0953 0.43334 0.57067 -0.1036 0.20422 0.078308 -0.42795 -1.7984 -0.27865 0.11954 -0.12689 0.031744 3.8631 -0.17786 -0.082434 -0.62698 0.26497 -0.057185 -0.073521 0.46103 0.30862 0.12498 -0.48609 -0.0080272 0.031184 -0.36576 -0.42699 0.42164 -0.11666 -0.50703 -0.027273 -0.53285 8 | a 0.21705 0.46515 -0.46757 0.10082 1.0135 0.74845 -0.53104 -0.26256 0.16812 0.13182 -0.24909 -0.44185 -0.21739 0.51004 0.13448 -0.43141 -0.03123 0.20674 -0.78138 -0.20148 -0.097401 0.16088 -0.61836 -0.18504 -0.12461 -2.2526 -0.22321 0.5043 0.32257 0.15313 3.9636 -0.71365 -0.67012 0.28388 0.21738 0.14433 0.25926 0.23434 0.4274 -0.44451 0.13813 0.36973 -0.64289 0.024142 -0.039315 -0.26037 0.12017 -0.043782 0.41013 0.1796 9 | " 0.25769 0.45629 -0.76974 -0.37679 0.59272 -0.063527 0.20545 -0.57385 -0.29009 -0.13662 0.32728 1.4719 -0.73681 -0.12036 0.71354 -0.46098 0.65248 0.48887 -0.51558 0.039951 -0.34307 -0.014087 0.86488 0.3546 0.7999 -1.4995 -1.8153 0.41128 0.23921 -0.43139 3.6623 -0.79834 -0.54538 0.16943 -0.82017 -0.3461 0.69495 -1.2256 -0.17992 -0.057474 0.030498 -0.39543 -0.38515 -1.0002 0.087599 -0.31009 -0.34677 -0.31438 0.75004 0.97065 10 | 's 0.23727 0.40478 -0.20547 0.58805 0.65533 0.32867 -0.81964 -0.23236 0.27428 0.24265 0.054992 0.16296 -1.2555 -0.086437 0.44536 0.096561 -0.16519 0.058378 -0.38598 0.086977 0.0033869 0.55095 -0.77697 -0.62096 0.092948 -2.5685 -0.67739 0.10151 -0.48643 -0.057805 3.1859 -0.017554 -0.16138 0.055486 -0.25885 -0.33938 -0.19928 0.26049 0.10478 -0.55934 -0.12342 0.65961 -0.51802 -0.82995 -0.082739 0.28155 -0.423 -0.27378 -0.007901 -0.030231 11 | for 0.15272 0.36181 -0.22168 0.066051 0.13029 0.37075 -0.75874 -0.44722 0.22563 0.10208 0.054225 0.13494 -0.43052 -0.2134 0.56139 -0.21445 0.077974 0.10137 -0.51306 -0.40295 0.40639 0.23309 0.20696 -0.12668 -0.50634 -1.7131 0.077183 -0.39138 -0.10594 -0.23743 3.9552 0.66596 -0.61841 -0.3268 0.37021 0.25764 0.38977 0.27121 0.043024 -0.34322 0.020339 0.2142 0.044097 0.14003 -0.20079 0.074794 -0.36076 0.43382 -0.084617 0.1214 12 | - -0.16768 1.2151 0.49515 0.26836 -0.4585 -0.23311 -0.52822 -1.3557 0.16098 0.37691 -0.92702 -0.43904 -1.0634 1.028 0.0053943 0.04153 -0.018638 -0.55451 0.026166 0.28066 -0.66245 0.23435 0.2451 0.025668 -1.0869 -2.844 -0.51272 0.27286 0.0071502 0.033984 3.9084 0.52766 -0.66899 1.8238 0.43436 -0.30084 -0.26996 0.4394 0.69956 0.14885 0.029453 1.4888 0.52361 0.099354 1.2515 0.099381 -0.079261 -0.30862 0.30893 0.11023 13 | that 0.88387 -0.14199 0.13566 0.098682 0.51218 0.49138 -0.47155 -0.30742 0.01963 0.12686 0.073524 0.35836 -0.60874 -0.18676 0.78935 0.54534 0.1106 -0.2923 0.059041 -0.69551 -0.18804 0.19455 0.32269 -0.49981 0.306 -2.3902 -0.60749 0.37107 0.078912 -0.23896 3.839 -0.20355 -0.35613 -0.69185 -0.17497 -0.35323 0.10598 -0.039303 0.015701 0.038279 -0.35283 0.44882 -0.16534 0.31579 0.14963 -0.071277 -0.53506 0.52711 -0.20148 0.0095952 14 | on 0.30045 0.25006 -0.16692 0.1923 0.026921 -0.079486 -0.91383 -0.1974 -0.053413 -0.40846 -0.26844 -0.28212 -0.5 0.1221 0.3903 0.17797 -0.4429 -0.40478 -0.9505 -0.16897 0.77793 0.33525 0.3346 -0.1754 -0.12017 -1.7861 0.29241 0.55933 0.029982 -0.32417 3.9297 0.1088 -0.57335 -0.17842 0.0041748 -0.16309 0.45077 -0.16123 -0.17311 -0.087889 -0.089032 0.062001 -0.19946 -0.38863 -0.18232 0.060751 0.098603 -0.07131 0.23052 -0.51939 15 | is 0.6185 0.64254 -0.46552 0.3757 0.74838 0.53739 0.0022239 -0.60577 0.26408 0.11703 0.43722 0.20092 -0.057859 -0.34589 0.21664 0.58573 0.53919 0.6949 -0.15618 0.05583 -0.60515 -0.28997 -0.025594 0.55593 0.25356 -1.9612 -0.51381 0.69096 0.066246 -0.054224 3.7871 -0.77403 -0.12689 -0.51465 0.066705 -0.32933 0.13483 0.19049 0.13812 -0.21503 -0.016573 0.312 -0.33189 -0.026001 -0.38203 0.19403 -0.12466 -0.27557 0.30899 0.48497 16 | was 0.086888 -0.19416 -0.24267 -0.33391 0.56731 0.39783 -0.97809 0.03159 -0.61469 -0.31406 0.56145 0.12886 -0.84193 -0.46992 0.47097 0.023012 -0.59609 0.22291 -1.1614 0.3865 0.067412 0.44883 0.17394 -0.53574 0.17909 -2.1647 -0.12827 0.29036 -0.15061 0.35242 3.124 -0.90085 -0.02567 -0.41709 0.40565 -0.22703 0.76829 0.60982 0.070068 -0.13271 -0.1201 0.096132 -0.43998 -0.48531 -0.5188 -0.3077 -0.75028 -0.77 0.3945 -0.16937 17 | said 0.38973 -0.2121 0.51837 0.80136 1.0336 -0.27784 -0.84525 -0.25333 0.12586 -0.90342 0.24975 0.22022 -1.2053 -0.53771 1.0446 0.62778 0.39704 -0.15812 0.38102 -0.54674 -0.44009 1.0976 0.013069 -0.89971 0.41226 -2.2309 0.28997 0.32175 -0.72738 -0.092244 3.028 -0.062599 0.038329 0.0072918 -0.35388 -0.92256 0.097932 0.10068 1.2116 0.88233 -0.46297 1.3186 0.32705 -0.73446 0.89301 -0.45324 -1.2698 0.86119 0.1415 1.2018 18 | with 0.25616 0.43694 -0.11889 0.20345 0.41959 0.85863 -0.60344 -0.31835 -0.6718 0.003984 -0.075159 0.11043 -0.73534 0.27436 0.054015 -0.23828 -0.13767 0.011573 -0.46623 -0.55233 0.083317 0.55938 0.51903 -0.27065 -0.28211 -1.3918 0.17498 0.26586 0.061449 -0.273 3.9032 0.38169 -0.056009 -0.004425 0.24033 0.30675 -0.12638 0.33436 0.075485 -0.036218 0.13691 0.37762 -0.12159 -0.13808 0.19505 0.22793 -0.17304 -0.07573 -0.25868 -0.39339 19 | he -0.20092 -0.060271 -0.61766 -0.8444 0.5781 0.14671 -0.86098 0.6705 -0.86556 -0.18234 0.15856 0.45814 -1.0163 -0.35874 0.73869 -0.24048 -0.33893 0.25742 -0.78192 0.083528 0.1775 0.91773 0.64531 -0.19896 0.37416 -2.7525 -0.091586 0.040349 -0.064792 -0.31466 3.3944 0.044941 -0.55038 -0.65334 0.10436 0.016394 0.24388 1.0085 0.31412 -0.33806 -0.16925 0.10228 -0.62143 0.19829 -0.36147 -0.24769 -0.38989 -0.33317 -0.041659 -0.013171 20 | as 0.20782 0.12713 -0.30188 -0.23125 0.30175 0.33194 -0.52776 -0.44042 -0.48348 0.03502 0.34782 0.54574 -0.2066 -0.083713 0.2462 0.15931 -0.0031349 0.32443 -0.4527 -0.22178 0.022652 -0.041714 0.31815 0.088633 -0.03801 -1.8212 -0.50917 -0.097544 -0.08953 0.050476 3.718 -0.16503 -0.078733 -0.57101 0.20418 0.13411 0.074281 0.087502 -0.25443 -0.15011 -0.15768 0.39606 -0.23646 -0.095054 0.07859 -0.012305 -0.49879 -0.35301 0.05058 0.019495 21 | it 0.61183 -0.22072 -0.10898 -0.052967 0.50804 0.34684 -0.33558 -0.19152 -0.035865 0.1051 0.07935 0.2449 -0.4373 -0.33344 0.57479 0.69052 0.29713 0.090669 -0.54992 -0.46176 0.10113 -0.02024 0.28479 0.043512 0.45735 -2.0466 -0.58084 0.61797 0.6518 -0.58263 4.0786 -0.2542 -0.14649 -0.34321 -0.25437 -0.44677 0.12657 0.28134 0.13331 -0.36974 0.050059 -0.10058 -0.017907 0.11142 -0.71798 0.491 -0.099974 -0.043688 -0.097922 0.16806 22 | by 0.35215 -0.35603 0.25708 -0.10611 -0.20718 0.63596 -1.0129 -0.45964 -0.48749 -0.080555 0.43769 0.46046 -0.80943 -0.23336 0.46623 -0.10866 -0.1221 -0.63544 -0.73486 -0.24848 0.4317 0.092264 0.52033 -0.46784 0.016798 -1.5124 -0.19986 -0.43351 -0.59247 0.18088 3.5194 -0.7024 0.23613 -0.68514 -0.37009 -0.080451 0.10635 -0.085495 -0.18451 0.29771 0.18123 0.53627 -0.1001 -0.55165 0.098833 -0.12942 -0.82628 -0.4329 -0.10301 -0.56079 23 | at 0.27724 0.88469 -0.26247 0.084104 0.40813 -1.1697 -0.68522 0.1427 -0.57345 -0.58575 -0.50834 -0.86411 -0.52596 -0.56379 0.32862 0.43393 -0.21248 0.49365 -1.8137 -0.035741 1.3227 0.80865 0.012217 -0.087017 -0.16813 -1.5935 0.47034 0.26097 -0.41666 -0.38526 3.4413 0.34383 -0.035895 -0.5678 0.18377 -0.48647 0.42646 0.4408 1.0931 0.063915 -0.064305 -0.29231 0.086502 0.35245 0.17891 0.25941 0.37069 -0.51611 0.023163 0.05779 24 | ( -0.24978 1.0476 0.21602 0.23278 0.12371 0.2761 0.51184 -1.36 -0.6902 -0.66679 0.49105 0.51671 -0.027218 -0.52056 0.49539 -0.097307 0.12779 0.44388 -1.2612 0.66209 -0.55461 -0.43498 0.81247 0.40855 -0.094327 -0.622 0.36498 -1.0038 -0.77693 -0.22408 3.6533 -0.52004 -0.57384 0.72381 -0.24887 -0.14347 0.69169 -0.51861 1.0806 0.20382 1.1045 0.31045 0.60765 -0.64538 -0.60249 0.60803 0.34393 -0.79411 0.15177 0.45779 25 | ) -0.28314 1.0028 0.14746 0.22262 0.0070985 0.23108 0.57082 -1.2767 -0.72415 -0.7527 0.52624 0.39498 0.0018922 -0.39396 0.44859 -0.019057 0.068143 0.45082 -1.2849 0.68088 -0.48318 -0.45829 0.85504 0.47712 -0.16152 -0.74784 0.40742 -0.97385 -0.7258 -0.17232 3.8901 -0.46535 -0.61925 0.63584 -0.20339 -0.080612 0.64959 -0.51208 0.91193 0.036208 1.0099 0.18802 0.59359 -0.61313 -0.66839 0.67479 0.40625 -0.6959 0.14553 0.37339 26 | from 0.41037 0.11342 0.051524 -0.53833 -0.12913 0.22247 -0.9494 -0.18963 -0.36623 -0.067011 0.19356 -0.33044 0.11615 -0.58585 0.36106 0.12555 -0.3581 -0.023201 -1.2319 0.23383 0.71256 0.14824 0.50874 -0.12313 -0.20353 -1.82 0.22291 0.020291 -0.081743 -0.27481 3.7343 -0.01874 -0.084522 -0.30364 0.27959 0.043328 -0.24621 0.015373 0.49751 0.15108 -0.01619 0.40132 0.23067 -0.10743 -0.36625 -0.051135 0.041474 -0.36064 -0.19616 -0.81066 27 | his -0.033537 0.47537 -0.68746 -0.72661 0.84028 0.64304 -0.75975 0.63242 -0.54176 0.11632 -0.20254 0.63321 -1.2677 -0.17674 0.35284 -0.55096 -0.65025 -0.3405 -0.31658 -0.077908 -0.11085 0.97299 -0.016844 -0.73752 0.47852 -2.7069 -0.42417 -0.053489 0.018467 -0.11892 3.3082 0.17864 -0.50702 -0.22894 0.24178 0.5698 0.097113 0.95422 0.0076093 -0.54154 0.09828 0.41533 -1.116 0.0050954 -0.14975 -0.45133 -0.081188 -0.62173 -0.022628 -0.4383 28 | '' 0.0028594 0.19457 -0.19449 -0.037583 0.9634 0.099237 -0.27993 -0.71535 -0.28148 0.073535 -0.47299 0.85916 -1.1857 0.12859 1.419 0.23505 0.77673 0.22569 0.20118 -0.62546 -0.53357 0.90877 0.14301 -0.31878 0.612 -2.1162 -1.1655 0.49382 0.87872 -0.77584 3.1332 0.021558 -0.4612 0.0059404 -0.84945 -0.38848 0.086459 -0.39445 0.83242 0.062272 -0.49093 0.68111 0.087143 -0.23992 0.22192 -0.12472 -0.28543 0.043905 -0.22286 1.6923 29 | `` 0.12817 0.15858 -0.38843 -0.39108 0.68366 0.00081259 -0.22981 -0.63358 -0.27663 0.40934 -0.65128 0.8461 -0.9904 0.20696 1.2567 0.064774 0.65813 0.39954 0.076104 -0.54083 -0.32438 0.8456 0.17273 -0.13504 0.39626 -2.3358 -1.6576 0.59957 1.0876 -1.0118 3.33 0.075853 -0.65637 -0.015799 -0.85429 -0.47358 0.082404 -0.69719 0.46647 -0.32044 -0.45517 0.30804 0.07502 -0.021783 0.10823 -0.03306 -0.2514 0.088184 -0.22215 1.4971 30 | an 0.36143 0.58615 -0.23718 0.079656 0.80192 0.49919 -0.33172 -0.19785 0.13876 0.16804 0.12557 -0.24494 -0.092315 0.35135 -0.024396 -0.31713 0.071206 0.37087 -0.82027 0.21193 -0.052153 0.29928 -0.49494 -0.12546 -0.012394 -2.2174 -0.082666 0.15184 0.050396 0.61229 3.7305 -0.93152 -0.28716 -0.48056 0.060682 0.058104 0.42065 -0.046598 0.083503 -0.23819 0.38828 0.36926 -0.44066 0.075673 -0.050556 -0.42269 -0.21577 0.39362 0.36523 0.36077 31 | be 0.91102 -0.22872 0.2077 -0.20237 0.50697 -0.057893 -0.41729 -0.075341 -0.30454 -0.003286 0.44481 0.41818 -0.33409 0.032917 0.98872 0.91984 0.40521 0.01925 -0.1052 -0.79865 -0.36403 -0.087995 0.72182 0.11114 0.2153 -1.9411 -0.26376 0.4455 0.27586 -0.21104 4.0212 -0.061943 -0.32134 -0.81922 0.2108 -0.20414 0.72625 0.47517 -0.39853 -0.39168 -0.34581 0.025928 0.13072 0.73562 -0.15199 -0.18439 -0.67128 0.16692 -0.050063 0.19241 32 | has 0.54822 0.038847 0.10127 0.31319 0.095487 0.41814 -0.79493 -0.58296 0.026643 0.12392 0.35194 -0.02163 -0.87018 -0.27178 0.65449 0.42934 0.097544 0.31779 -0.11921 -0.097106 -0.47585 0.24907 0.1223 -0.29079 -0.16866 -2.1072 0.022174 0.45277 -0.64485 0.13181 3.6594 -0.1714 0.23919 -0.42249 -0.088331 -0.32925 -0.12847 0.47055 -0.075953 -0.27747 -0.41905 0.60803 -0.24261 0.014885 -0.23204 0.020879 -0.82175 0.26588 -0.40267 -0.17111 33 | are 0.96193 0.012516 0.21733 -0.06539 0.26843 0.33586 -0.45112 -0.60547 -0.46845 -0.18412 0.060949 0.19597 0.22645 0.032802 0.42488 0.49678 0.65346 -0.0274 0.17809 -1.1979 -0.40634 -0.22659 1.1495 0.59342 -0.23759 -0.93254 -0.52502 0.05125 0.032248 -0.72774 4.2466 0.60592 0.33397 -0.85754 0.4895 0.21744 -0.13451 0.0094912 -0.54173 0.18857 -0.64506 0.012695 0.73452 1.0032 0.41874 0.16596 -0.71085 0.14032 -0.38468 -0.38712 34 | have 0.94911 -0.34968 0.48125 -0.19306 -0.0088384 0.28182 -0.9613 -0.13581 -0.43083 -0.092933 0.15689 0.059585 -0.49635 -0.17414 0.75661 0.4921 0.21773 -0.22778 -0.13686 -0.90589 -0.48781 0.19919 0.91447 -0.16203 -0.20645 -1.7312 -0.47622 -0.04854 -0.14027 -0.45828 4.0326 0.6052 0.10448 -0.7361 0.2485 -0.033461 -0.13395 0.052782 -0.27268 0.079825 -0.80127 0.30831 0.43567 0.88747 0.29816 -0.02465 -0.95075 0.36233 -0.72512 -0.6089 35 | but 0.35934 -0.2657 -0.046477 -0.2496 0.54676 0.25924 -0.64458 0.1736 -0.53056 0.13942 0.062324 0.18459 -0.75495 -0.19569 0.70799 0.44759 0.27031 -0.32885 -0.38891 -0.61606 -0.484 0.41703 0.34794 -0.19706 0.40734 -2.1488 -0.24284 0.33809 0.43993 -0.21616 3.7635 0.19002 -0.12503 -0.38228 0.12944 -0.18272 0.076803 0.51579 0.0072516 -0.29192 -0.27523 0.40593 -0.040394 0.28353 -0.024724 0.10563 -0.32879 0.10673 -0.11503 0.074678 36 | were 0.73363 -0.74815 0.45913 -0.56041 0.091855 0.33015 -1.2034 -0.15565 -1.1205 -0.5938 0.23299 -0.46278 -0.34786 -0.47901 0.57621 -0.16053 -0.26457 -0.13732 -0.91878 -0.65339 0.05884 0.61553 1.2607 -0.39821 -0.26056 -1.0127 -0.38517 -0.096929 -0.11701 -0.48536 3.6902 0.30744 0.50713 -0.6537 0.80491 0.23672 0.61769 0.030195 -0.57645 0.60467 -0.63949 -0.11373 0.84984 0.41409 0.083774 -0.28737 -1.4735 -0.20095 -0.17246 -1.0984 37 | not 0.55025 -0.24942 -0.0009386 -0.264 0.5932 0.2795 -0.25666 0.093076 -0.36288 0.090776 0.28409 0.71337 -0.4751 -0.24413 0.88424 0.89109 0.43009 -0.2733 0.11276 -0.81665 -0.41272 0.17754 0.61942 0.10466 0.33327 -2.3125 -0.52371 -0.021898 0.53801 -0.50615 3.8683 0.16642 -0.71981 -0.74728 0.11631 -0.37585 0.5552 0.12675 -0.22642 -0.10175 -0.35455 0.12348 0.16532 0.7042 -0.080231 -0.068406 -0.67626 0.33763 0.050139 0.33465 38 | this 0.53074 0.40117 -0.40785 0.15444 0.47782 0.20754 -0.26951 -0.34023 -0.10879 0.10563 -0.10289 0.10849 -0.49681 -0.25128 0.84025 0.38949 0.32284 -0.22797 -0.44342 -0.31649 -0.12406 -0.2817 0.19467 0.055513 0.56705 -1.7419 -0.91145 0.27036 0.41927 0.020279 4.0405 -0.24943 -0.20416 -0.62762 -0.054783 -0.26883 0.18444 0.18204 -0.23536 -0.16155 -0.27655 0.035506 -0.38211 -0.00075134 -0.24822 0.28164 0.12819 0.28762 0.1444 0.23611 39 | who -0.19461 -0.051277 0.26445 -0.57399 1.0236 0.58923 -1.3399 0.31032 -0.89433 -0.13192 0.21305 0.29171 -0.66079 0.084125 0.76578 -0.42393 0.32445 0.13603 -0.29987 -0.046415 -0.74811 1.2134 0.24988 0.22846 0.23546 -2.6054 0.12491 -0.94028 -0.58308 -0.32325 2.8419 0.33474 -0.33902 -0.23434 0.37735 0.093804 -0.25969 0.68889 0.37689 -0.2186 -0.24244 1.0029 0.18607 0.27486 0.48089 -0.43533 -1.1012 -0.67103 -0.21652 -0.025891 40 | they 0.70835 -0.57361 0.15375 -0.63335 0.46879 -0.066566 -0.86826 0.35967 -0.64786 -0.22525 0.09752 0.27732 -0.35176 -0.25955 0.62368 0.60824 0.34905 -0.27195 -0.27981 -1.0183 -0.1487 0.41932 1.0342 0.17783 0.13569 -1.9999 -0.56163 0.004018 0.60839 -1.0031 3.9546 0.68698 -0.53593 -0.7427 0.18078 0.034527 0.016026 0.12467 -0.084633 -0.10375 -0.47862 -0.22314 0.25487 0.69985 0.32714 -0.15726 -0.6202 -0.23113 -0.31217 -0.3049 41 | had 0.60348 -0.52096 0.40851 -0.37217 0.36978 0.61082 -1.3228 0.24375 -0.5942 -0.35708 0.39942 0.031911 -1.0643 -0.52327 0.71453 0.063384 -0.46383 -0.34641 -0.72445 -0.13714 -0.19179 0.72225 0.6295 -0.8086 -0.037694 -2.0355 0.10566 -0.038591 -0.23201 -0.29627 3.3215 0.032443 0.085368 -0.40771 0.45341 -0.099674 0.44704 0.5422 0.18185 0.17504 -0.33833 0.31697 -0.025268 0.095795 -0.25071 -0.47564 -1.0407 -0.15138 -0.22057 -0.59633 42 | i 0.11891 0.15255 -0.082073 -0.74144 0.75917 -0.48328 -0.31009 0.51476 -0.98708 0.00061757 -0.15043 0.8377 -1.0797 -0.5146 1.3188 0.62007 0.13779 0.47108 -0.072874 -0.72675 -0.74116 0.75263 0.8818 0.29561 1.3548 -2.5701 -1.3523 0.4588 1.0068 -1.1856 3.4737 0.77898 -0.72929 0.25102 -0.26156 -0.34684 0.55841 0.75098 0.4983 -0.26823 -0.0027443 -0.018298 -0.28096 0.55318 0.037706 0.18555 -0.15025 -0.57512 -0.26671 0.92121 43 | which 0.90558 0.054033 -0.024091 0.08111 0.08645 0.65504 -0.34224 -0.76129 0.10258 0.059494 0.30353 -0.10311 -0.28574 -0.35059 0.23319 0.27913 -0.0021905 0.16015 -0.65622 -0.13339 0.38494 -0.20867 0.26137 -0.090254 -0.34935 -1.5398 -0.46352 0.16734 -0.19253 -0.1979 4.008 -0.24514 -0.15461 -0.2889 -0.049511 -0.29696 0.2161 -0.15298 -0.12235 0.071447 -0.11104 -0.15518 -0.026936 -0.067826 -0.56607 0.20991 -0.40505 -0.12906 -0.18325 -0.58796 44 | will 0.81544 0.30171 0.5472 0.46581 0.28531 -0.56112 -0.43913 -0.0090877 0.10002 -0.17218 0.28133 0.37672 -0.40756 0.15836 0.89113 1.2997 0.51508 -0.1948 0.051856 -0.9338 0.069955 -0.24876 -0.016723 -0.2031 -0.033558 -1.8132 0.11199 -0.31961 -0.13746 -0.45499 3.8856 1.214 -1.0046 -0.056274 0.0038776 -0.40669 0.29452 0.30171 0.038848 -0.56088 -0.46582 0.17155 0.33729 -0.15247 0.023771 0.51415 -0.21759 0.31965 -0.34741 0.41672 45 | their 0.41519 0.13167 -0.0569 -0.56765 0.49924 0.21288 -0.81949 0.32257 -0.065374 -0.055513 0.11837 0.36933 -0.46424 -0.072383 0.068214 0.0014523 -0.07322 -0.65668 0.11368 -0.91816 0.029319 0.38103 0.34032 -0.21496 -0.26681 -1.6509 -0.71668 -0.41272 0.48465 -0.62432 4.1939 1.4292 -0.45902 -0.51709 0.2626 0.51086 -0.23999 -0.06962 -0.4561 -0.48333 -0.39544 -0.53831 -0.070727 0.54496 0.2351 -0.18746 -0.2242 -0.11806 -0.34499 -0.86949 46 | : -0.17587 1.3508 -0.18159 0.45197 0.37554 -0.20926 0.014956 -0.87286 -0.54443 -0.25731 -0.521 0.62242 -0.52387 -0.061782 1.1805 -0.041984 0.10582 -0.20913 -0.54508 0.027728 -0.31329 0.13439 0.55192 0.75419 0.30996 -1.3301 -0.9862 -0.33747 0.17633 -0.37547 3.4474 0.14171 -0.65033 0.10118 0.00014796 -0.074707 0.19146 -0.47977 0.39628 -0.13403 0.43043 0.45704 0.59387 -0.40308 0.067302 1.2784 0.49927 0.15617 0.5665 0.61385 47 | or 0.26358 0.18747 0.044394 -0.19119 0.45455 0.66445 0.25855 -0.64886 -0.67653 0.045254 0.071081 0.3645 0.74863 -0.17489 0.28723 0.43277 -0.39184 -0.048568 -0.21373 -0.72992 0.13902 -0.23308 0.70256 0.2176 -0.20647 -1.415 -0.32587 -0.075019 0.88536 -0.56679 4.0296 0.019803 -0.57259 -0.060878 0.14667 0.16532 0.21188 -0.38358 0.42748 -0.096921 0.19285 0.021779 0.58562 0.97633 0.20384 -0.2162 -0.021486 -0.42936 0.52879 -0.12598 48 | its 0.76719 0.1239 -0.11119 0.13355 0.18356 0.057912 -0.3341 -0.60423 0.47637 0.25451 0.19491 -0.061142 -0.45815 -0.17374 -0.32716 0.33472 -0.3218 0.090518 -0.24682 -0.35467 0.55269 -0.33177 -0.58048 -0.55391 -0.64466 -1.8028 -0.65173 0.4374 0.051813 0.22641 4.2766 0.19443 -0.13428 -0.10278 -0.062464 -0.39073 -0.29381 -0.013531 -0.58142 -0.69717 -0.068871 -0.50049 -0.013803 -0.11011 -0.64282 0.4396 -0.22455 0.4893 -0.26152 -0.46886 49 | one 0.31474 0.41662 0.1348 0.15854 0.88812 0.43317 -0.55916 0.030476 -0.14623 -0.14273 -0.17949 -0.17343 -0.49264 0.26775 0.48799 -0.29537 0.18485 0.14937 -0.75009 -0.35651 -0.23699 0.1849 0.17237 0.23611 0.14077 -1.9031 -0.65353 -0.022539 0.10383 -0.43705 3.781 -0.044077 -0.046643 0.027274 0.51883 0.13353 0.23231 0.25599 0.060888 -0.065618 -0.15556 0.30818 -0.093586 0.33296 -0.14613 0.016332 -0.24251 -0.20526 0.07009 -0.11568 50 | after 0.38315 -0.3561 -0.1283 -0.19527 0.047629 0.21468 -0.98765 0.82962 -0.42782 -0.22879 0.10712 -0.3087 -1.2069 -0.17713 0.88841 0.0056658 -0.77305 -0.66913 -1.3384 0.34676 0.5044 0.5125 0.26826 -0.65313 -0.081516 -2.1658 0.57974 0.036345 0.0090949 0.25772 3.4402 0.20732 -0.52028 0.026453 0.17895 -0.017802 0.36605 0.34539 0.41357 -0.2497 -0.49227 0.17745 -0.43764 -0.3484 -0.057061 -0.039578 -0.13517 -0.4258 0.13681 -0.77731 51 | new 0.19511 0.50739 0.0014709 0.041914 -0.16759 0.037517 -1.397 -0.92398 -0.24296 -0.15171 -0.47829 0.054612 -0.24986 0.38398 0.016182 0.34938 -0.22627 0.086618 -0.41001 -0.18139 0.75607 -0.0262 -0.69557 0.10874 -0.47539 -1.8095 -0.1694 -0.059863 -0.16806 -0.094546 3.661 0.041462 -0.29161 -0.69772 0.30805 -0.28457 0.13217 -0.007643 -0.09239 -0.49237 -0.27055 0.060425 0.095107 -0.23679 -0.086108 1.0243 -0.22779 0.030488 -0.14272 0.45411 52 | been 0.92884 -0.72457 0.068095 -0.3816 -0.038686 0.22314 -1.1041 0.0084314 -0.26638 -0.057147 0.33383 -0.02368 -0.7689 -0.17933 0.84499 0.28781 -0.12754 0.11154 -0.34022 -0.18687 -0.28446 0.32557 0.87015 -0.21355 -0.094175 -2.0216 -0.2176 0.45054 -0.14068 0.080753 3.7849 -0.4107 0.33195 -0.87998 0.17309 0.14065 0.22707 0.485 -0.51256 -0.021742 -0.69401 0.145 0.0082681 0.38385 0.16011 -0.35487 -1.1284 0.047085 -0.32297 -0.64192 53 | also 0.352 0.25323 -0.097659 0.26108 0.12976 0.33684 -0.73076 -0.42641 -0.22795 -0.083619 0.52963 0.34644 -0.32824 -0.28667 0.24876 0.22053 0.019356 -0.015447 -0.18319 -0.29729 0.11739 -0.071214 0.41086 0.013912 -0.17424 -1.5839 -0.051961 -0.18115 -0.76375 -0.17817 3.749 -0.045559 0.10721 -0.51313 0.25279 -0.051714 0.31911 0.28 -0.19937 0.17819 0.018623 0.47641 -0.15655 -0.38287 0.26989 -0.011186 -0.7244 0.036514 -0.011489 -0.025882 54 | we 0.57387 -0.32729 0.070521 -0.4198 0.862 -0.80001 -0.40604 0.15312 -0.29788 -0.1105 -0.097119 0.59642 -0.99814 -0.28148 1.0152 0.87544 1.0282 -0.05036 0.24194 -1.1426 -0.50601 0.64976 0.74833 0.020473 0.9595 -1.9204 -0.80656 0.29247 1.0009 -0.98565 4.0094 1.0407 -0.82849 -0.4847 -0.36146 -0.39552 0.27891 0.15312 0.15848 0.018686 -0.50905 -0.22916 0.1868 0.44946 0.10229 0.21882 -0.30608 0.48759 -0.18439 0.69939 55 | would 0.7619 -0.29773 0.51396 -0.13303 0.24156 0.066799 -0.54084 0.2071 -0.28225 -0.11638 0.21666 0.54908 -0.36744 -0.10543 0.81567 1.1743 0.56055 -0.3345 0.099767 -0.87465 0.12229 -0.18532 0.086783 -0.36343 0.008002 -2.2268 -0.20079 -0.10313 0.24318 -0.39819 3.7136 0.59088 -1.1013 -0.25292 0.0057067 -0.60475 0.35965 -0.059581 -0.029059 -0.3989 -0.52631 0.12436 0.13609 0.12699 -0.23032 -0.044567 -0.6545 0.43088 -0.22768 0.4026 56 | two 0.58289 0.36258 0.34065 0.36416 0.34337 0.79387 -0.9362 0.11432 -0.63005 -0.55524 -0.28706 -0.47143 -0.75673 0.63868 0.22479 -0.64652 -0.074314 -0.34903 -0.97285 -0.53981 0.015171 0.24479 0.62661 0.070447 -0.51629 -1.2004 0.3122 -0.44053 -0.29869 -0.56326 4.022 0.38463 -0.028468 0.068716 1.0746 0.48309 0.2475 0.22802 -0.35743 0.40392 -0.54738 0.15244 0.41 0.15702 0.0077935 -0.015106 -0.28653 -0.16158 -0.35169 -0.82555 57 | more 0.87943 -0.11176 0.4338 -0.42919 0.41989 0.2183 -0.3674 -0.60889 -0.41072 0.4899 -0.4006 -0.50159 0.24187 -0.1564 0.67703 -0.021355 0.33676 0.35209 -0.24232 -1.0745 -0.13775 0.29949 0.44603 -0.14464 0.16625 -1.3699 -0.38233 -0.011387 0.38127 0.038097 4.3657 0.44172 0.34043 -0.35538 0.30073 -0.09223 -0.33221 0.37709 -0.29665 -0.30311 -0.49652 0.34285 0.77089 0.60848 0.15698 0.029356 -0.42687 0.37183 -0.71368 0.30175 58 | ' -0.039369 1.2036 0.35401 -0.55999 -0.52078 -0.66988 -0.75417 -0.6534 -0.23246 0.58686 -0.40797 1.2057 -1.11 0.51235 0.1246 0.05306 0.61041 -1.1295 -0.11834 0.26311 -0.72112 -0.079739 0.75497 -0.023356 -0.56079 -2.1037 -1.8793 -0.179 -0.14498 -0.63742 3.181 0.93412 -0.6183 0.58116 0.58956 -0.19806 0.42181 -0.85674 0.33207 0.020538 -0.60141 0.50403 -0.083316 0.20239 0.443 -0.060769 -0.42807 -0.084135 0.49164 0.085654 59 | first -0.14168 0.41108 -0.31227 0.16633 0.26124 0.45708 -1.2001 0.014923 -0.22779 -0.16937 0.34633 -0.12419 -0.65711 0.29226 0.62407 -0.57916 -0.33947 -0.22046 -1.4832 0.28958 0.081396 -0.21696 0.0056613 -0.054199 0.098504 -1.5874 -0.22867 -0.62957 -0.39542 -0.080841 3.5949 -0.16872 -0.39024 0.026912 0.52646 -0.022844 0.63289 0.62702 -0.22171 -0.45045 -0.14998 -0.27723 -0.46658 -0.44268 -0.43691 0.38455 0.1369 -0.25424 0.017821 -0.1489 60 | about 0.89466 0.36604 0.37588 -0.41818 0.58462 0.18594 -0.41907 -0.46621 -0.54903 0.02477 -0.90816 -0.48271 -0.050742 -0.74039 1.4377 -0.01974 -0.2384 0.43154 -0.6612 -0.41275 0.25475 0.93498 0.81404 -0.17296 0.61296 -1.8475 -0.27616 0.27701 0.42347 -0.11599 3.6243 0.12306 -0.023526 -0.24843 -0.22376 -0.53941 -0.62444 -0.27711 0.49406 0.020234 -0.2346 0.44512 0.53397 0.66654 -0.093662 -0.035203 -0.064194 0.55998 -0.66593 0.12177 61 | up 0.032286 -0.27071 0.68108 -0.27942 0.5797 -0.0081097 -0.82792 -0.53342 -0.47851 -0.068256 -0.46964 -0.31717 -0.49372 0.09808 0.49961 0.27305 0.099922 -0.16148 -0.69952 -0.70435 0.59084 0.62031 0.30467 -0.41578 -0.0222 -1.6312 0.54676 0.25754 0.44541 -0.72799 3.9129 0.80075 -0.18839 0.42435 0.039207 -0.093939 -0.39516 0.20976 0.59488 -0.3907 -0.31555 0.24074 0.41694 0.10415 -0.044305 -0.09516 0.25464 -0.56699 0.033216 -0.58123 62 | when 0.27062 -0.36596 0.097193 -0.50708 0.37375 0.16736 -0.94185 0.54004 -0.66669 -0.24236 0.25876 0.28084 -0.86643 -0.068961 0.90346 0.40877 -0.39563 -0.25604 -1.0316 -0.26669 -0.080584 0.40841 0.55885 -0.18299 0.46494 -2.2671 0.14102 0.19841 0.5153 -0.27608 3.3604 0.15123 -0.36693 -0.28804 0.076042 -0.076662 0.21897 0.39001 0.38684 -0.16961 -0.33674 0.37094 -0.45911 0.00066285 -0.17797 0.12467 -0.015418 -0.75256 -0.17335 -0.22587 63 | year -0.098793 0.26983 0.35304 -0.10727 -0.015183 0.053398 -1.0824 -0.53005 0.0095416 0.070428 -0.08925 -0.62666 -0.52662 -0.56571 1.8044 0.01686 -0.44871 -0.04146 -1.1136 0.17488 0.49561 -0.38238 0.30185 -0.70675 -0.35891 -1.5164 -0.024403 -0.54107 -0.36163 0.52803 3.6553 0.71214 -0.16995 0.36368 0.3399 -0.48186 0.10936 0.61428 0.15697 -0.70716 -1.2359 -0.014258 0.095588 -0.30634 -0.49741 -0.049394 -0.16697 0.11972 -0.37511 0.098348 64 | there 0.68491 0.32385 -0.11592 -0.35925 0.49889 0.042541 -0.40153 -0.36793 -0.61441 -0.41148 -0.3482 -0.21952 -0.22393 -0.64966 0.85443 0.33582 0.2931 0.16552 -0.55082 -0.61277 -0.14768 0.47551 0.65877 -0.07103 0.56147 -1.2651 -0.74117 0.36365 0.5623 -0.27365 3.8506 0.27645 -0.1009 -0.71568 0.18511 -0.12312 0.56631 -0.22377 -0.016831 0.57539 -0.51761 0.033823 0.19643 0.63498 -0.24866 0.038716 -0.50559 0.17874 -0.1693 0.062375 65 | all 0.19253 0.10006 0.063798 -0.087664 0.52217 0.39105 -0.41975 -0.45671 -0.34053 -0.11175 0.014754 0.31734 -0.50853 -0.1156 0.74303 0.097618 0.34407 -0.1213 -0.16938 -0.84088 -0.11231 0.40602 0.76801 0.091138 0.10782 -1.2673 -0.57709 -0.36208 0.34824 -0.75458 4.0426 0.94967 -0.22668 -0.35777 0.3413 0.13072 0.23045 -0.036997 -0.25889 0.12977 -0.39031 -0.049607 0.45766 0.56782 -0.46165 0.41933 -0.5492 0.081191 -0.30485 -0.30513 66 | -- 0.49806 1.2382 0.86976 0.0025293 -0.56263 -0.38781 -0.47155 -0.95717 -0.12314 0.63262 -0.73375 -0.063457 -0.66477 0.68096 0.63595 0.81748 0.15183 -0.97582 0.23674 0.17767 -0.41731 0.12586 0.88072 -0.049402 -0.83049 -2.4283 -1.3054 0.165 -0.056037 -0.011953 3.0108 0.89362 -0.49819 0.56545 0.95535 -0.68507 -0.1452 0.14026 0.22841 0.1977 -0.92491 1.5205 0.81055 0.059992 0.83629 0.17134 -0.5262 -0.1575 0.17409 0.1079 67 | out 0.32112 -0.69306 0.47922 -0.54602 0.28352 0.20346 -0.98445 -0.14103 -0.13147 -0.085975 -0.49509 0.00276 -1.1173 0.33729 0.61312 -0.06711 0.3538 -0.35183 -0.58191 -0.69525 -0.025032 0.61675 0.78522 -0.19594 0.26324 -1.8976 0.14645 0.48885 0.61818 -1.012 3.7285 0.66615 -0.33364 0.31896 -0.15174 0.3098 0.04967 0.27144 0.34595 -0.08185 -0.37469 0.39981 0.084925 0.31237 -0.12677 0.036322 -0.069533 -0.43547 -0.1108 -0.585 68 | she 0.060382 0.37821 -0.75142 -0.72159 0.58648 0.79126 -0.72947 0.68248 -0.12999 -0.22988 0.11595 0.22427 -0.44679 -0.11515 1.0334 -0.088019 -0.78531 0.34305 -0.11457 -0.11905 0.45883 1.6333 0.68546 0.22308 1.0099 -2.6332 -0.52128 0.25665 0.023468 -0.82616 3.1084 0.27219 -0.29227 -0.47259 -0.12297 -0.13545 0.11192 0.86438 0.33121 -0.96616 -0.044785 -0.06674 0.0030367 -0.33905 0.017784 -0.58499 -0.005014 -1.257 -0.060723 0.42247 69 | other 0.64756 0.16 0.029191 0.35118 0.089119 0.61115 -0.66362 -0.51724 -0.46521 -0.08845 0.0502 0.26329 0.12407 0.043832 0.17283 0.01317 0.14168 -0.15827 -0.10427 -0.9307 0.21646 -0.10753 0.62087 0.36761 -0.48144 -1.28 -0.55152 -0.72023 -0.17097 -0.47993 4.0165 0.47054 0.093614 -0.86341 0.50881 0.33353 -0.35962 -0.16648 -0.31803 0.49003 -0.36697 0.32051 0.70932 0.62878 0.70128 0.1302 -0.73769 0.10325 -0.30964 -0.44213 70 | people 0.95281 -0.20608 0.55618 -0.46323 0.73354 0.029137 -0.19367 -0.090066 -0.22958 -0.19058 -0.34857 -1.0231 0.743 -0.5489 0.88484 -0.14051 0.0040139 0.58448 0.10767 -0.44657 -0.43205 0.9868 0.78288 0.51513 0.85788 -1.7713 -0.88259 -0.59728 0.084934 -0.48112 3.9678 0.8893 -0.27064 -0.44094 -0.26213 0.085597 0.022099 -0.58376 0.10908 0.77973 -0.95447 0.40482 0.8941 0.65251 0.39858 0.20884 -1.3281 -0.10882 -0.22822 -0.46303 71 | n't 0.028702 -0.2163 0.27153 -0.28594 0.42404 -0.18155 -0.85966 0.30447 -0.51645 0.3559 -0.10131 0.8152 -0.77987 -0.044123 1.3768 0.96711 0.59098 -0.16521 0.094372 -1.2292 -0.59056 0.42275 0.52645 0.17536 0.62117 -2.3875 -0.90795 0.26418 1.1507 -1.4289 3.511 0.96796 -0.5905 -0.21382 -0.13049 -0.34336 0.15822 0.2306 0.55332 -0.59173 -0.4403 0.23583 0.082353 0.83847 0.26719 0.063263 -0.080607 0.018159 -0.22789 1.0025 72 | her 0.13403 0.89178 -0.76761 -0.64184 0.86204 1.3122 -0.64018 0.82067 0.32783 0.021457 -0.095194 0.40825 -0.63602 -0.018275 0.69708 -0.29531 -1.1912 -0.23897 0.34341 -0.33196 0.23702 1.8364 0.12295 -0.18624 0.86503 -2.636 -0.7791 0.203 0.18985 -0.79897 2.9882 0.44336 -0.28367 -0.19588 0.061875 0.38558 -0.027622 0.71847 0.17156 -1.2168 0.081636 0.17293 -0.31718 -0.37039 0.18977 -0.89175 0.18492 -1.6251 0.039134 -0.10279 73 | percent -0.2366 -0.40252 1.7612 0.010445 0.8568 0.84684 -0.84495 -1.4338 -0.54639 0.73395 0.63492 -1.7583 0.63655 -0.9863 0.78947 0.61309 -0.81924 -0.65641 -0.64784 -0.38374 0.94314 -0.1943 0.47471 -0.85115 -0.53061 -0.82218 0.1577 -0.37006 -0.15451 0.86707 3.8332 1.0311 0.48523 0.73698 0.49241 -2.1361 -0.73352 0.51982 1.2672 -0.48397 -0.97365 -0.041408 1.1572 -0.072509 -0.60518 -0.44327 -0.75365 0.48777 1.0828 0.073102 74 | than 0.63139 0.15527 0.78529 -0.49967 0.61277 0.38457 -0.24403 -0.65573 -0.34068 0.48923 -0.28453 -0.72291 0.30131 -0.31671 0.98247 -0.020175 0.17482 0.54319 -0.64372 -0.97383 -0.32015 0.053901 0.47347 -0.1922 -0.025826 -1.419 -0.44523 0.018065 0.34342 0.088355 4.0942 0.45804 0.32831 -0.069244 0.26257 -0.2969 -0.28702 0.31656 -0.12812 -0.33309 -0.49709 0.43707 0.67268 0.72685 -0.21323 -0.095752 -0.25779 0.13059 -0.87146 0.19353 75 | over 0.12972 0.088073 0.24375 0.078102 -0.12783 0.27831 -0.48693 0.19649 -0.39558 -0.28362 -0.47425 -0.59317 -0.58804 -0.31702 0.49593 0.0087594 0.039613 -0.42495 -0.97641 -0.46534 0.020675 0.086042 0.39317 -0.51255 -0.17913 -1.8333 0.5622 0.41626 0.075127 0.02189 3.784 0.71067 -0.073943 0.15373 -0.3853 -0.070163 -0.35374 0.074501 -0.084228 -0.45548 -0.081068 0.39157 0.173 0.2254 -0.12836 0.40951 -0.26079 0.090912 -0.60515 -0.9827 76 | into 0.66749 -0.41321 0.065755 -0.46653 0.00027619 0.18348 -0.65269 0.093383 -0.0086802 -0.18874 -0.0063057 0.044894 -0.66801 0.48506 -0.1185 0.19968 0.1818 0.033144 -0.59108 -0.21829 0.41438 0.05674 0.42155 0.27798 -0.11322 -1.9227 0.035513 0.61928 0.62206 -0.63987 3.9115 -0.021078 -0.24685 -0.13922 -0.22545 0.59131 -0.7322 0.1162 0.4155 -0.15188 -0.14933 0.040739 -0.10415 0.23733 -0.438 0.06059 0.55073 -0.96571 -0.26875 -1.1741 77 | last 0.32269 -0.11823 0.15135 0.43472 0.0047741 -0.076197 -1.1967 0.25108 -0.33441 -0.11988 -0.34367 -0.97407 -1.0005 -0.20005 1.5067 -0.0010902 -0.58248 -0.49877 -1.2107 0.19451 0.054259 0.16623 0.26524 -0.7074 -0.47836 -1.9295 0.11138 0.16526 -0.2542 0.26121 3.4639 0.34021 -0.25121 0.31737 0.2334 -0.40882 0.32076 0.31702 0.021898 -0.56169 -0.90129 0.23734 -0.057173 -0.2459 -0.061878 0.20674 -0.48696 0.3648 -0.1861 -0.34686 78 | some 0.92871 -0.10834 0.21497 -0.50237 0.10379 0.22728 -0.54198 -0.29008 -0.64607 0.12664 -0.41487 -0.29343 0.36855 -0.41733 0.69116 0.067341 0.19715 -0.030465 -0.21723 -1.2238 0.0095469 0.19594 0.56595 -0.067473 0.059208 -1.3909 -0.89275 -0.13546 0.162 -0.4021 4.1644 0.37816 0.15797 -0.48892 0.23131 0.23258 -0.25314 -0.19977 -0.12258 0.1562 -0.31995 0.38314 0.47266 0.877 0.32223 0.0013292 -0.4986 0.5558 -0.70359 -0.52693 79 | government 0.38797 -1.0825 0.45025 -0.23341 0.086307 -0.25721 -0.18281 -0.10037 -0.50099 -0.58361 -0.052635 -0.14224 0.0090217 -0.38308 0.18503 0.42444 0.10611 -0.1487 1.0801 0.065757 0.64552 0.1908 -0.14561 -0.87237 -0.35568 -2.435 0.28428 -0.33436 -0.56139 0.91404 4.0129 0.072234 -1.2478 -0.36592 -0.50236 0.011731 -0.27409 -0.50842 -0.2584 -0.096172 -0.67109 0.40226 0.27912 -0.37317 -0.45049 -0.30662 -1.6426 1.1936 0.65343 -0.76293 80 | time 0.02648 0.33737 0.065667 -0.11609 0.41651 -0.21142 -0.69582 0.2822 -0.36077 -0.13822 0.012094 0.086227 -0.84638 0.057195 1.1582 0.14703 -0.0049197 -0.24899 -0.96014 -0.3038 0.23972 0.21058 0.40608 0.17789 0.55253 -1.6357 -0.17784 -0.45222 0.45805 0.14239 3.7087 0.40289 -0.4083 -0.29304 0.030857 -0.15361 0.10607 0.63397 0.12397 -0.25349 -0.10344 0.0069768 -0.17328 0.35536 -0.46369 0.15285 0.41475 -0.3398 -0.23043 0.19069 81 | $ 0.43889 0.90301 1.406 0.20469 0.69453 0.26449 -0.91118 -1.4847 0.20981 0.52693 -1.3998 -0.31563 0.73779 -1.0641 1.8671 -0.3536 -0.66203 0.41229 -0.87078 -0.6704 1.3467 -0.026579 -0.18787 -1.1795 -1.4423 -1.0407 0.38038 -0.40186 0.21573 -0.7167 3.2422 0.61623 -0.014502 1.4616 0.54571 -0.69571 -0.12738 0.015536 1.2232 -1.4741 0.19271 0.41512 1.1185 0.67059 -1.3985 -0.13803 -0.37563 0.074431 -0.6935 0.81354 82 | you -0.0010919 0.33324 0.35743 -0.54041 0.82032 -0.49391 -0.32588 0.0019972 -0.23829 0.35554 -0.60655 0.98932 -0.21786 0.11236 1.1494 0.73284 0.51182 0.29287 0.28388 -1.359 -0.37951 0.50943 0.7071 0.62941 1.0534 -2.1756 -1.3204 0.40001 1.5741 -1.66 3.7721 0.86949 -0.80439 0.1839 -0.34332 0.010714 0.23969 0.066748 0.70117 -0.73702 0.20877 0.11564 -0.1519 0.85908 0.2262 0.16519 0.36309 -0.45697 -0.048969 1.1316 83 | years 0.16962 0.4344 -0.042106 -0.63324 -0.1278 0.53668 -1.0662 -0.32629 -0.50079 0.10247 -0.021968 -0.35105 -0.64153 -0.42454 1.3836 -0.13543 -0.24754 0.22156 -0.65563 0.44424 0.17017 0.35816 0.56379 -0.48044 -0.14765 -1.629 -0.31308 -0.47217 0.02659 0.47603 3.4619 0.12069 -0.045344 -0.47303 0.28569 -0.077584 -0.16447 0.7181 0.2617 -0.16841 -1.245 -0.076188 0.17493 0.24507 -0.63801 -0.21096 -0.49918 -0.50108 -0.7704 -0.32234 84 | if 0.49861 -0.12284 0.44772 -0.082727 0.78117 0.12032 -0.044677 0.47959 -0.24538 0.07315 0.13542 0.47475 -0.45838 -0.125 1.2397 1.1176 0.52392 -0.25142 0.094939 -0.87224 -0.45383 -0.0098866 0.46122 0.23339 0.3067 -2.4232 -0.21251 0.37548 0.92848 -0.47064 3.6259 0.3914 -0.94158 -0.31952 -0.12849 -0.47342 0.41335 0.041173 0.30079 -0.61249 -0.23717 0.24716 -0.0447 0.62406 -0.27345 0.0087021 -0.22906 0.26395 -0.062214 0.6292 85 | no 0.34957 0.40147 -0.012561 0.13743 0.4008 0.46682 -0.09743 -0.0024548 -0.33564 -0.004639 -0.059101 0.27532 -0.3974 -0.29267 0.97442 0.4188 0.18395 -0.20602 -0.061437 -0.61576 -0.53471 0.41536 0.34851 -0.31878 0.27404 -1.832 -0.82363 0.48816 1.1372 -0.38025 3.8114 0.2551 -0.70637 -0.2582 0.040929 -0.097378 0.79571 -0.49484 0.1087 0.14838 -0.1839 0.13312 0.21469 0.53932 -0.19338 -0.42216 -0.61411 0.70374 0.57591 0.43506 86 | world -0.41486 0.71848 -0.3045 0.87445 0.22441 -0.56488 -0.37566 -0.44801 0.61347 -0.11359 0.74556 -0.10598 -1.1882 0.50974 1.3511 0.069851 0.73314 0.26773 -1.1787 -0.148 0.039853 0.033107 -0.27406 0.25125 0.41507 -1.6188 -0.81778 -0.73892 -0.28997 0.57277 3.4719 0.73817 -0.044495 -0.15119 -0.93503 -0.13152 -0.28562 0.76327 -0.83332 -0.6793 -0.39099 -0.64466 1.0044 -0.2051 0.46799 0.99314 -0.16221 -0.46022 -0.37639 -0.67542 87 | can 0.8052 0.37121 0.55933 -0.011405 0.17319 0.195 0.057701 -0.12447 -0.011342 0.20654 0.41079 0.89578 0.31893 0.030787 0.60194 1.2023 0.68283 -0.13267 0.16984 -1.4674 -0.41844 -0.47395 0.7267 0.61088 0.44584 -1.4793 -0.50037 -0.12249 0.75994 -0.77112 3.9653 0.38077 -0.6439 -0.84899 0.07554 0.17522 0.30117 0.12964 0.27253 -0.32951 0.34211 0.15608 0.20953 0.97948 0.35927 0.19116 0.45494 -0.1895 -0.20902 0.47612 88 | three 0.40545 0.43805 0.36237 0.25683 0.38254 0.68255 -0.97853 0.12741 -0.46129 -0.54809 -0.35384 -0.56697 -0.65756 0.50184 0.53248 -0.77956 -0.089944 -0.37572 -1.1097 -0.30734 -0.022657 0.11632 0.67704 -0.051499 -0.59719 -1.02 0.24289 -0.60216 -0.35183 -0.54053 3.9844 0.41521 0.040419 0.26909 1.1193 0.52924 0.37308 0.28924 -0.14714 0.23566 -0.72709 0.053276 0.45373 0.20374 -0.13384 0.015313 -0.22037 -0.15662 -0.30289 -0.77536 89 | do 0.29605 -0.13841 0.043774 -0.38744 0.12262 -0.6518 -0.2824 0.090312 -0.55186 0.3206 0.0037422 0.93229 -0.22034 -0.21922 0.9217 0.75724 0.84892 -0.0042197 0.53626 -1.2667 -0.61028 0.167 0.82753 0.65765 0.48959 -1.9744 -1.149 -0.21461 0.80539 -1.4745 3.749 1.0141 -1.1293 -0.52661 -0.12029 -0.27931 0.065092 -0.043639 0.60426 -0.20892 -0.45739 0.010441 0.41458 0.689 0.14468 -0.031973 -0.048073 -0.00011279 0.13854 0.96954 90 | ; -0.11604 1.1429 0.026043 -0.0084921 0.31898 0.65984 -0.25055 -0.98368 -0.83334 -0.21147 -0.37307 0.41228 0.029478 -0.42162 0.26829 -0.3826 -0.26704 -0.17551 -0.69986 0.08844 -0.05785 0.14381 0.43372 0.31841 -0.28405 -0.96445 0.09619 -0.53046 0.59122 -0.42788 3.3692 0.067626 -0.10391 -0.047914 0.11318 -0.056322 0.14699 0.057051 1.0275 0.41663 0.69818 0.54682 0.91148 -0.26655 -0.35268 1.1042 0.074058 -0.86765 0.074969 0.80395 91 | president -0.11875 0.6722 0.19444 0.55269 0.53698 -0.37237 -0.73494 -0.30575 -0.92601 -0.43276 0.026956 0.66861 -0.79097 -0.015932 0.53918 0.30341 -0.67042 0.0051129 0.62272 -0.55823 -0.10887 0.57305 -0.016149 -1.1889 -0.24318 -2.6289 0.41262 -0.12904 -1.3238 0.64731 2.3595 0.34048 -1.9889 -0.79084 -0.79739 -0.87998 -0.72991 0.011697 0.090612 -0.17287 -0.83274 1.1932 -0.75211 -1.1603 -0.10074 0.60224 -1.3739 0.33674 -0.31224 0.097583 92 | only 0.24887 0.21487 0.22899 -0.12671 0.63105 0.51149 -0.4651 0.068288 -0.30937 -0.0070085 0.19937 -0.18019 -0.18827 -0.28697 0.78777 0.067418 0.20876 -0.077169 -0.69009 -0.42171 -0.39751 -0.050562 0.50292 0.10793 0.23047 -1.5503 -0.18474 -0.18316 0.26182 -0.2971 3.9005 0.27993 -0.16575 -0.19005 0.46649 -0.034911 0.57901 0.44878 -0.29949 -0.26163 -0.16132 0.022983 0.29275 0.46456 -0.59639 0.12681 -0.35771 0.029972 -0.14917 -0.084244 93 | state -0.94222 -0.056474 0.089059 0.71375 -0.17706 -0.13514 -0.27893 0.32983 0.19097 -0.92034 -0.51169 -0.45742 0.45137 -0.52448 -0.11619 0.079781 0.19833 0.32391 0.42857 0.38807 0.091932 -0.58581 0.02845 -0.15399 -0.40969 -2.6746 0.14042 -0.13159 -0.42858 -0.35305 3.4471 -0.26412 -0.61443 -0.99791 0.060979 -0.1172 -0.23474 0.15544 0.5794 0.29043 -1.1877 0.51911 0.19409 -0.010452 -1.2179 0.67739 -1.039 0.53553 0.20895 0.10081 94 | million 1.1414 0.045188 1.8586 -0.050447 0.39759 0.31558 -0.66984 -1.6864 0.151 0.69105 -0.87842 -0.77406 0.67346 -1.6115 1.8154 -0.48631 0.09541 0.56202 -1.2105 0.60549 0.66933 -0.3355 0.30825 -1.2069 -0.58988 -0.8496 0.18481 -0.99578 -0.35671 -0.40894 3.6743 0.79897 0.31498 1.3646 0.0036529 -0.51161 0.24875 -0.21306 0.58295 -0.70832 -0.33408 0.071599 1.4238 0.21819 -1.1946 0.38572 -1.3817 0.35649 -0.46031 -0.13773 95 | could 0.90754 -0.38322 0.67648 -0.20222 0.15156 0.13627 -0.48813 0.48223 -0.095715 0.18306 0.27007 0.41415 -0.48933 -0.0076005 0.79662 1.0989 0.53802 -0.54468 -0.16063 -0.98348 -0.19188 -0.2144 0.19959 -0.31341 0.24101 -2.2662 -0.25926 -0.10898 0.66177 -0.48104 3.6298 0.45397 -0.64484 -0.52244 0.042922 -0.16605 0.097102 0.044836 0.20389 -0.46322 -0.46434 0.32394 0.25984 0.40849 0.20351 0.058722 -0.16408 0.20672 -0.1844 0.071147 96 | us 0.19086 0.24339 1.2768 -0.038207 0.6094 -0.70188 0.040862 -0.44903 0.0080416 -0.18819 -0.68578 -0.12465 -0.32855 -0.073507 0.79112 0.31981 0.081126 -0.033057 -0.6007 0.014536 0.42773 0.71318 0.13327 -0.64247 0.066402 -2.2346 0.013668 -0.45647 0.40542 -0.0042052 3.4561 0.54602 -0.3789 0.58198 -0.22852 -0.8409 -0.30465 -0.69669 -0.4232 -0.81757 0.036113 0.25739 1.745 -0.61482 0.41547 0.40002 -0.51528 0.89973 -0.54324 0.69393 97 | most 0.53248 0.030684 -0.12955 -0.15673 0.25168 0.20269 -0.71869 -0.27819 -0.47384 0.49715 -0.12525 -0.249 0.23788 0.11087 0.44788 -0.10767 0.44033 0.16702 -0.34068 -0.5413 -0.56092 -0.12457 0.23586 0.39872 0.13578 -1.4765 -1.1334 -0.23475 0.17915 0.20182 3.9566 0.0092012 0.61391 -0.75382 0.42119 0.092947 -0.2623 0.48914 -0.78757 -0.10654 -0.68392 0.34472 0.4279 0.71161 0.051375 0.33759 -0.72084 0.069335 -0.34333 -0.067937 98 | _ 0.14994 0.73181 0.42015 0.18773 0.22898 0.096767 -0.67454 -0.50735 -0.54904 0.12615 -0.79615 -0.43379 -0.38704 0.31491 1.0122 0.28632 -0.054057 -0.18844 -0.35734 -0.41306 -0.39265 0.49582 0.3837 0.27166 -0.32146 -1.8197 -0.68194 0.074313 0.85356 -0.24763 3.0711 0.38602 -0.63301 -0.023847 0.079512 -0.33825 -0.29351 0.13634 0.30527 -0.18491 -0.25944 0.52184 0.5986 0.57588 0.22136 0.61199 -0.14507 -0.15006 -0.41098 0.39281 99 | against -0.61258 -0.81097 -0.18426 0.33997 -0.22861 0.53968 0.29663 0.51186 -0.80497 0.12117 -0.34306 -0.35708 -1.1692 -0.90632 0.1854 -0.43402 0.2754 -0.92921 -1.2241 0.50564 -0.46115 -0.15904 0.32354 -0.38564 -1.0403 -2.6112 0.84161 -0.59595 0.28015 -0.24324 3.2053 0.64918 -0.45331 -0.42506 -0.19658 -0.018363 -0.045932 -0.11967 -0.64881 -0.75525 -0.14162 0.088434 0.41846 0.14613 0.39816 0.52819 -0.795 0.014036 0.17373 -0.67675 100 | u.s. -0.28052 -0.083189 1.0143 0.36427 0.38697 -0.2753 -0.64484 -0.23685 0.51044 -0.9375 -0.45352 -0.46154 -0.11637 0.00085619 0.3517 0.012925 -0.3284 -0.43992 -0.52025 -0.22776 0.74714 0.090108 -0.033931 -1.0446 -0.48629 -2.5933 0.43117 -0.40985 0.12545 0.39377 3.1525 -0.12479 -0.11758 -0.32871 0.29547 -0.87783 -0.47704 -0.14781 -0.79774 -0.40848 -0.5714 0.45574 1.7386 -0.82407 0.45335 0.18106 -0.66649 0.82415 -0.77676 0.66471 101 | -------------------------------------------------------------------------------- /test_trees.txt: -------------------------------------------------------------------------------- 1 | (3 (2 (2 The) (2 Rock)) (4 (3 (2 is) (4 (2 destined) (2 (2 (2 (2 (2 to) (2 (2 be) (2 (2 the) (2 (2 21st) (2 (2 (2 Century) (2 's)) (2 (3 new) (2 (2 ``) (2 Conan)))))))) (2 '')) (2 and)) (3 (2 that) (3 (2 he) (3 (2 's) (3 (2 going) (3 (2 to) (4 (3 (2 make) (3 (3 (2 a) (3 splash)) (2 (2 even) (3 greater)))) (2 (2 than) (2 (2 (2 (2 (1 (2 Arnold) (2 Schwarzenegger)) (2 ,)) (2 (2 Jean-Claud) (2 (2 Van) (2 Damme)))) (2 or)) (2 (2 Steven) (2 Segal))))))))))))) (2 .))) 2 | (4 (4 (4 (2 The) (4 (3 gorgeously) (3 (2 elaborate) (2 continuation)))) (2 (2 (2 of) (2 ``)) (2 (2 The) (2 (2 (2 Lord) (2 (2 of) (2 (2 the) (2 Rings)))) (2 (2 '') (2 trilogy)))))) (2 (3 (2 (2 is) (2 (2 so) (2 huge))) (2 (2 that) (3 (2 (2 (2 a) (2 column)) (2 (2 of) (2 words))) (2 (2 (2 (2 can) (1 not)) (3 adequately)) (2 (2 describe) (2 (3 (2 (2 co-writer\/director) (2 (2 Peter) (3 (2 Jackson) (2 's)))) (3 (2 expanded) (2 vision))) (2 (2 of) (2 (2 (2 J.R.R.) (2 (2 Tolkien) (2 's))) (2 Middle-earth))))))))) (2 .))) 3 | (3 (3 (2 (2 (2 (2 (2 Singer\/composer) (2 (2 Bryan) (2 Adams))) (2 (2 contributes) (2 (2 (2 a) (2 slew)) (2 (2 of) (2 songs))))) (2 (2 --) (2 (2 (2 (2 a) (2 (2 few) (3 potential))) (2 (2 (2 hits) (2 ,)) (2 (2 (2 a) (2 few)) (1 (1 (2 more) (1 (2 simply) (2 intrusive))) (2 (2 to) (2 (2 the) (2 story))))))) (2 --)))) (2 but)) (3 (4 (2 the) (3 (2 whole) (2 package))) (2 (3 certainly) (3 (2 captures) (2 (1 (2 the) (2 (2 (2 intended) (2 (2 ,) (2 (2 er) (2 ,)))) (3 spirit))) (2 (2 of) (2 (2 the) (2 piece)))))))) (2 .)) 4 | (2 (2 (2 You) (2 (2 'd) (2 (2 think) (2 (2 by) (2 now))))) (2 (2 America) (2 (2 (2 would) (1 (2 have) (2 (2 (2 had) (1 (2 enough) (2 (2 of) (2 (2 plucky) (2 (2 British) (1 eccentrics)))))) (4 (2 with) (4 (3 hearts) (3 (2 of) (3 gold))))))) (2 .)))) 5 | (3 (2 Yet) (3 (2 (2 the) (2 act)) (3 (4 (3 (2 is) (3 (2 still) (4 charming))) (2 here)) (2 .)))) 6 | (4 (2 (2 Whether) (2 (2 (2 (2 or) (1 not)) (3 (2 you) (2 (2 're) (3 (3 enlightened) (2 (2 by) (2 (2 any) (2 (2 of) (2 (2 Derrida) (2 's))))))))) (2 (2 lectures) (2 (2 on) (2 (2 ``) (2 (2 (2 (2 (2 (2 the) (2 other)) (2 '')) (2 and)) (2 ``)) (2 (2 the) (2 self)))))))) (3 (2 ,) (3 (2 '') (3 (2 Derrida) (3 (3 (2 is) (4 (2 an) (4 (4 (2 undeniably) (3 (4 (3 fascinating) (2 and)) (4 playful))) (2 fellow)))) (2 .)))))) 7 | (4 (3 (2 (2 Just) (2 (2 the) (2 labour))) (3 (2 involved) (3 (2 in) (4 (2 creating) (3 (3 (2 the) (3 (3 layered) (2 richness))) (3 (2 of) (3 (2 (2 the) (2 imagery)) (2 (2 in) (3 (2 (2 this) (2 chiaroscuro)) (2 (2 of) (2 (2 (2 madness) (2 and)) (2 light)))))))))))) (3 (3 (2 is) (4 astonishing)) (2 .))) 8 | (3 (3 (2 Part) (3 (2 of) (4 (2 (2 the) (3 charm)) (2 (2 of) (2 (2 Satin) (2 Rouge)))))) (3 (3 (2 is) (3 (2 that) (3 (2 it) (2 (1 (2 avoids) (2 (2 the) (1 obvious))) (3 (2 with) (3 (3 (3 humour) (2 and)) (2 lightness))))))) (2 .))) 9 | (4 (2 (2 a) (2 (2 screenplay) (2 more))) (3 (4 ingeniously) (2 (2 constructed) (2 (2 (2 (2 than) (2 ``)) (2 Memento)) (2 ''))))) 10 | (3 (2 ``) (3 (2 (2 Extreme) (2 Ops)) (3 (2 '') (4 (4 (3 exceeds) (2 expectations)) (2 .))))) 11 | (4 (3 (3 Good) (4 fun)) (4 (2 ,) (4 (4 (4 (4 (4 (4 (4 (4 (4 (3 (3 good) (2 action)) (2 ,)) (3 (3 good) (2 acting))) (2 ,)) (3 (3 good) (2 dialogue))) (2 ,)) (4 (3 good) (2 pace))) (2 ,)) (3 (3 good) (2 cinematography))) (2 .)))) 12 | (2 (2 You) (2 (1 (2 Should) (2 (2 (1 (2 Pay) (2 (2 (2 Nine) (2 Bucks)) (2 (2 for) (2 This)))) (2 :)) (1 (2 Because) (2 (2 you) (2 (2 can) (2 (2 (2 (2 (2 hear) (2 (2 about) (1 (1 (1 suffering) (2 (2 Afghan) (2 refugees))) (2 (2 on) (2 (2 the) (2 news)))))) (2 and)) (2 still)) (1 (2 be) (2 unaffected)))))))) (2 .))) 13 | (4 (2 (2 Dramas) (2 (2 like) (2 this))) (2 (3 (2 make) (2 (2 it) (2 human))) (2 .))) 14 | (2 (2 (3 (2 (3 (3 (2 A) (3 (3 thunderous) (2 ride))) (3 (2 at) (3 (3 (2 first) (2 (2 ,) (2 (2 quiet) (2 cadences)))) (3 (2 of) (4 (4 pure) (3 finesse)))))) (2 (2 (2 are) (2 few)) (2 (2 and) (2 (2 far) (2 between))))) (2 ;)) (1 (2 (2 their) (1 shortage)) (3 (2 dilutes) (2 (2 (2 the) (2 potency)) (2 (2 of) (3 (3 (2 otherwise) (3 respectable)) (2 action))))))) (2 .)) 15 | (4 (2 Still) (4 (2 ,) (4 (2 (2 this) (2 flick)) (3 (4 (2 is) (4 (3 (3 (4 fun) (2 ,)) (2 and)) (3 (2 host) (3 (2 to) (3 (2 some) (4 (4 (3 truly) (4 excellent)) (2 sequences))))))) (2 .))))) 16 | (3 (2 (2 (2 (2 Australian) (2 (2 actor\/director) (2 (2 John) (2 Polson)))) (2 and)) (3 (4 award-winning) (2 (2 English) (2 (2 cinematographer) (2 (2 Giles) (2 Nuttgens)))))) (2 (3 (3 (2 make) (4 (2 a) (4 (4 terrific) (2 effort)))) (2 (2 at) (3 (2 (2 disguising) (2 (2 the) (1 obvious))) (3 (2 with) (3 (2 (3 energy) (2 and)) (3 innovation)))))) (2 .))) 17 | (3 (2 You) (2 (2 (1 (2 (1 (2 (2 walk) (1 out)) (2 (2 of) (2 (2 The) (2 (3 Good) (2 Girl))))) (2 (2 with) (2 (2 mixed) (2 emotions)))) (2 --)) (2 (2 (1 disapproval) (2 (2 of) (2 Justine))) (3 (2 combined) (2 (2 with) (2 (2 (2 a) (2 tinge)) (2 (2 of) (2 (2 understanding) (2 (2 for) (2 (2 her) (2 actions)))))))))) (2 .))) 18 | (2 (2 (2 (2 (2 Post) (2 9\/11)) (2 (2 the) (2 (3 philosophical) (2 message)))) (2 (2 (2 (2 of) (2 ``)) (2 (2 Personal) (2 (4 Freedom) (3 First)))) (2 ''))) (1 (1 (2 (2 might) (1 not)) (2 (2 be) (2 (2 as) (2 (2 palatable) (2 (2 as) (2 intended)))))) (2 .))) 19 | (4 (2 (3 Absorbing) (2 character)) (2 (2 (2 study) (2 (2 by) (2 (2 André) (2 Turpin)))) (2 .))) 20 | (3 (3 (2 If) (3 (2 you) (3 (4 love) (2 (2 reading) (2 (2 and\/or) (3 poetry)))))) (3 (2 ,) (3 (2 (2 then) (2 (2 by) (2 all))) (3 (2 (2 means) (4 (2 (3 check) (2 it)) (1 out))) (2 .))))) 21 | (4 (2 You) (4 (3 (2 (2 'll) (2 probably)) (4 (4 love) (2 it))) (2 .))) 22 | (3 (2 ``) (3 (2 Frailty) (4 (2 '') (3 (4 (3 (2 has) (3 (2 been) (3 (4 (3 (3 written) (3 (2 so) (3 well))) (2 ,)) (2 (2 (2 that) (2 even)) (1 (2 (2 a) (2 simple)) (1 (2 ``) (0 Goddammit))))))) (2 !)) (2 ''))))) 23 | (2 (2 (2 near) (2 (2 the) (2 end))) (3 (3 (2 takes) (2 (2 on) (2 (2 a) (2 (2 whole) (2 (2 other) (2 meaning)))))) (2 .))) 24 | (4 (2 Grenier) (4 (3 (4 (4 (2 is) (4 terrific)) (2 ,)) (3 (3 (2 bringing) (3 (2 an) (3 (3 unforced) (3 (2 ,) (3 (2 rapid-fire) (2 delivery)))))) (2 (2 to) (2 (2 (2 (2 (2 (2 Toback) (2 's)) (2 Heidegger)) (2 -)) (2 and)) (2 (2 Nietzsche-referencing) (2 dialogue)))))) (2 .))) 25 | (2 (2 (2 The) (2 (2 Sundance) (3 (2 Film) (3 Festival)))) (2 (2 (2 has) (1 (2 (2 become) (2 (2 so) (1 buzz-obsessed))) (2 (2 that) (2 (2 (2 (3 fans) (2 and)) (2 producers)) (2 (2 (2 (2 descend) (2 (2 upon) (2 (2 Utah) (2 each)))) (2 January)) (2 (2 to) (2 (2 (2 ferret) (1 out)) (3 (2 The) (3 (2 Next) (3 (4 Great) (2 Thing))))))))))) (2 .))) 26 | (2 (2 `) (2 (2 (2 (2 (2 (2 (2 Tadpole) (2 ')) (2 (2 was) (2 (2 one) (2 (2 of) (2 (2 (2 the) (2 films)) (2 (2 so) (2 (2 declared) (2 (2 this) (2 year))))))))) (2 ,)) (2 but)) (2 (2 it) (2 (2 's) (2 (2 (2 really) (2 more)) (2 (2 of) (2 (2 The) (3 (2 Next) (3 (3 Pretty) (3 (3 Good) (2 Thing)))))))))) (2 .))) 27 | (4 (2 (2 The) (2 actors)) (3 (4 (2 are) (3 fantastic)) (2 .))) 28 | (3 (2 They) (3 (4 (2 are) (3 (2 what) (3 (2 (2 makes) (2 it)) (4 (2 worth) (2 (2 (2 the) (2 trip)) (2 (2 to) (2 (2 the) (2 theatre)))))))) (2 .))) 29 | (4 (2 (1 -LRB-) (2 (2 Taymor) (3 -RRB-))) (4 (3 (2 utilizes) (3 (2 (2 the) (2 idea)) (3 (2 of) (4 (3 (3 (3 (3 (3 (2 (2 making) (2 (2 (2 Kahlo) (2 's)) (2 art))) (2 (2 a) (2 living))) (2 ,)) (2 (2 breathing) (2 (2 part) (2 (2 of) (2 (2 the) (2 movie)))))) (2 ,)) (2 often)) (3 (3 (3 catapulting) (2 (2 the) (2 artist))) (2 (2 into) (2 (2 her) (2 (2 own) (2 work))))))))) (2 .))) 30 | (1 (2 This) (2 (1 (1 (2 is) (2 n't)) (3 (2 a) (2 (3 new) (2 idea)))) (2 .))) 31 | (4 (2 It) (4 (4 (2 's) (4 (3 (3 (2 been) (3 (2 done) (3 (2 before) (2 (2 but) (3 (2 never) (3 (2 so) (3 vividly))))))) (2 or)) (3 (2 with) (3 (2 (2 so) (2 much)) (2 passion))))) (2 .))) 32 | (3 (2 (1 -LRB-) (2 (2 ``) (2 (2 (2 Take) (2 (3 Care) (2 (2 of) (2 (2 My) (2 (2 Cat) (2 '')))))) (3 -RRB-)))) (3 (4 (2 is) (3 (3 (2 an) (3 (4 (3 honestly) (3 nice)) (2 (2 little) (2 film)))) (2 (2 that) (3 (2 (2 (2 takes) (3 us)) (2 (2 on) (2 (2 (2 an) (2 examination)) (2 (2 of) (3 (2 (2 young) (2 (2 adult) (3 life))) (2 (2 in) (3 (2 urban) (2 (2 South) (2 Korea))))))))) (2 (2 through) (2 (3 (2 the) (3 (2 (3 hearts) (2 and)) (2 minds))) (2 (2 of) (2 (2 the) (2 (2 five) (2 principals)))))))))) (2 .))) 33 | (2 (2 (0 (0 (0 (2 It) (0 (1 (1 (2 (1 (2 's) (1 (1 not) (3 life-affirming))) (2 --)) (1 (2 its) (0 vulgar))) (2 and)) (1 mean))) (2 ,)) (2 but)) (3 (2 I) (3 (3 liked) (2 it)))) (2 .)) 34 | (3 (2 (2 What) (1 (2 ``) (1 (2 Empire) (1 (2 '') (1 (1 lacks) (2 (2 in) (2 depth))))))) (3 (2 it) (3 (3 (2 (2 makes) (2 up)) (4 (2 for) (2 (2 with) (3 (2 its) (2 heart))))) (2 .)))) 35 | (4 (2 (3 (2 (2 Light) (2 ,)) (3 silly)) (2 ,)) (3 (4 (2 photographed) (3 (2 with) (3 (2 (3 (3 (2 (2 (2 colour) (2 and)) (2 depth)) (2 ,)) (2 and)) (2 rather)) (3 (2 a) (3 (3 good) (2 time)))))) (2 .))) 36 | (3 (2 But) (3 (2 (3 tongue-in-cheek) (1 preposterousness)) (3 (3 (2 (2 has) (2 always)) (3 (2 been) (2 (2 part) (2 (2 of) (3 (3 (2 (2 (2 For) (2 (2 the) (2 (2 most) (2 part)))) (3 (3 (2 (2 Wilde) (2 's)) (1 (2 droll) (3 whimsy))) (3 (2 helps) (3 (2 ``) (3 (2 (2 Being) (3 Earnest)) (3 (2 '') (3 (2 overcome) (1 (2 its) (1 weaknesses))))))))) (2 and)) (3 (2 (2 Parker) (2 's)) (3 (4 creative) (2 interference)))))))) (2 ...)))) 37 | (3 (4 (2 Much) (3 (2 of) (3 (2 (2 the) (2 (2 movie) (2 's))) (3 charm)))) (3 (3 (2 lies) (3 (2 in) (3 (4 (2 the) (3 (2 utter) (3 cuteness))) (2 (2 of) (2 (2 (2 Stuart) (2 and)) (2 Margolo)))))) (2 .))) 38 | (3 (1 (2 Their) (2 (2 computer-animated) (2 faces))) (3 (3 (2 are) (3 (2 very) (3 expressive))) (2 .))) 39 | (2 (2 (2 (2 The) (2 path)) (2 (2 (2 Ice) (2 Age)) (2 (2 follows) (3 (2 most) (2 closely))))) (2 (2 ,) (2 (2 though) (2 (2 ,) (2 (2 (2 is) (2 (2 (2 the) (2 one)) (2 (2 established) (2 (2 by) (2 (2 (2 (2 Warner) (2 (2 Bros.) (3 (2 giant) (2 (2 Chuck) (2 Jones))))) (2 ,)) (1 (2 who) (1 (1 (1 died) (2 (2 (2 a) (2 matter)) (3 (2 of) (2 weeks)))) (2 (2 before) (2 (2 (2 the) (2 (2 movie) (2 's))) (2 release)))))))))) (2 .)))))) 40 | (3 (3 (3 (2 ...) (3 (3 spiced) (2 (2 with) (2 (4 humor) (2 (1 -LRB-) (2 (2 (2 ') (2 (2 I) (3 (2 (2 (1 (2 speak) (2 (2 fluent) (1 flatula))) (2 ,)) (2 ')) (2 (2 (2 advises) (2 Denlopp)) (2 (2 after) (2 (2 (2 a) (2 (2 (2 rather) (3 (2 ,) (3 (2 er) (2 (2 ,) (3 bubbly))))) (2 exchange))) (2 (2 with) (2 (2 an) (2 (2 alien) (2 deckhand)))))))))) (3 -RRB-))))))) (2 and)) (3 (2 (3 witty) (2 updatings)) (3 (1 -LRB-) (2 (2 (2 (2 (2 Silver) (2 's)) (2 parrot)) (2 (2 has) (3 (2 been) (3 (2 replaced) (3 (2 with) (2 (2 (2 Morph) (2 ,)) (3 (3 (2 a) (3 (3 (3 cute) (2 alien)) (2 creature))) (2 (2 who) (2 (1 (2 mimics) (2 (2 (2 everyone) (2 and)) (2 everything))) (2 around)))))))))) (3 -RRB-))))) 41 | (3 (2 There) (3 (2 (2 's) (3 (2 (2 an) (3 energy)) (2 (2 to) (2 (2 Y) (2 (2 Tu) (2 (2 Mamá) (2 También))))))) (2 .))) 42 | (4 (2 (2 Much) (2 (2 of) (2 it))) (4 (4 (3 (3 comes) (3 (2 from) (3 (2 the) (4 (3 (2 brave) (3 (2 ,) (3 uninhibited))) (2 performances))))) (2 (2 by) (2 (2 its) (2 (2 lead) (2 actors))))) (2 .))) 43 | (2 (2 ``) (2 (2 (2 13) (2 Conversations)) (2 (2 '') (1 (1 (2 (3 (3 (2 holds) (3 (2 its) (2 (3 goodwill) (2 close)))) (2 ,)) (2 but)) (1 (2 is) (1 (2 relatively) (1 (1 slow) (2 (2 to) (2 (2 come) (2 (2 to) (2 (2 the) (2 point))))))))) (2 .))))) 44 | (3 (2 (2 ``) (2 (2 Auto) (2 (3 Focus) (2 '')))) (3 (2 works) (2 (2 as) (3 (2 (2 an) (3 (2 unusual) (2 (2 (2 biopic) (2 and)) (3 document)))) (2 (2 of) (2 (2 (2 male) (2 swingers)) (2 (2 in) (2 (2 the) (2 (2 Playboy) (2 era)))))))))) 45 | (2 (2 (2 If) (2 (2 (2 (2 Mr.) (2 (2 Zhang) (2 's))) (2 (2 subject) (2 matter))) (3 (2 (2 (2 (2 (2 is) (2 ,)) (2 (2 to) (2 (2 some) (2 degree)))) (2 (2 at) (1 least))) (2 ,)) (3 (2 quintessentially) (2 American))))) (1 (2 ,) (2 (2 (2 (2 his) (2 approach)) (2 (2 to) (2 storytelling))) (2 (2 (2 might) (2 (2 be) (2 (2 called) (2 Iranian)))) (2 .))))) 46 | (0 (1 (0 (1 (2 (1 (0 (2 ...) (0 (0 (2 a) (1 (1 sour) (2 (2 little) (2 movie)))) (2 (2 at) (2 (2 (3 (2 its) (2 core)) (2 ;)) (2 (2 (2 (2 an) (2 exploration)) (2 (2 of) (2 (2 the) (1 emptiness)))) (2 (2 that) (2 (2 underlay) (1 (2 the) (2 (2 relentless) (2 gaiety)))))))))) (2 (2 of) (2 (2 the) (2 1920)))) (2 's)) (2 ...)) (1 (2 (2 (2 The) (2 (2 film) (2 's))) (2 ending)) (1 (2 has) (2 (2 (2 a) (2 ``)) (2 (2 What) (2 (2 (2 was) (2 it)) (2 (2 all) (2 for)))))))) (2 ?)) (2 '')) 47 | (3 (2 (2 feeling) (2 (2 (2 to) (2 it)) (2 (2 (2 ,) (2 but)) (2 (2 like) (2 (2 the) (2 (2 1920) (2 's))))))) (3 (2 ,) (3 (2 (2 the) (2 trip)) (4 (2 there) (4 (4 (2 is) (4 (3 (2 a) (3 (4 great) (2 deal))) (3 (2 of) (4 fun)))) (2 .)))))) 48 | (3 (2 ``) (1 (2 (2 Cremaster) (2 3)) (2 (2 '') (3 (2 (1 (2 should) (2 (2 (1 (1 (2 come) (2 (2 with) (2 (2 the) (2 warning)))) (2 ``)) (3 (2 For) (2 (2 serious) (3 (2 film) (2 buffs))))) (2 only))) (2 !)) (2 ''))))) 49 | (1 (1 (2 (3 (2 (2 Made) (3 me)) (2 (3 unintentionally) (2 famous))) (2 --)) (0 (2 as) (1 (2 (2 the) (2 (1 queasy-stomached) (2 critic))) (1 (2 who) (1 (2 (1 (2 staggered) (2 (2 from) (2 (2 the) (3 theater)))) (2 and)) (1 (1 (2 blacked) (1 out)) (2 (2 in) (2 (2 the) (2 lobby))))))))) (2 .)) 50 | (4 (2 (2 But) (2 (3 (3 (2 believe) (2 it)) (2 or)) (1 not))) (4 (2 ,) (4 (2 it) (4 (4 (2 's) (4 (2 one) (4 (2 of) (4 (4 (2 the) (4 (4 (2 most) (4 (4 beautiful) (3 (2 ,) (3 evocative)))) (2 works))) (2 (2 I) (2 (2 've) (2 seen))))))) (2 .))))) 51 | (3 (3 (4 (4 (3 (2 (2 García) (2 (2 (2 Bernal) (2 and)) (2 Talancón))) (4 (2 are) (3 (2 an) (4 (4 (3 immensely) (3 appealing)) (2 couple))))) (2 ,)) (2 and)) (3 (2 (2 even) (1 (2 though) (1 (2 (2 their) (2 story)) (1 (2 is) (2 predictable))))) (2 (2 ,) (3 (2 you) (2 (2 'll) (2 (2 want) (2 (2 things) (2 (2 to) (2 (2 work) (1 out)))))))))) (2 .)) 52 | (4 (2 ...) (3 (4 (3 (2 a) (2 (2 spoof) (3 comedy))) (3 (2 that) (3 (2 carries) (3 (2 (2 its) (3 share)) (3 (2 of) (3 (4 (3 (2 (2 (3 (3 (3 laughs) (2 --)) (3 (2 sometimes) (3 (2 a) (3 chuckle)))) (2 ,)) (2 (2 sometimes) (2 (2 a) (2 guffaw)))) (2 and)) (3 (2 ,) (3 (4 (2 to) (3 (2 my) (4 (4 great) (4 pleasure)))) (2 ,)))) (3 (2 the) (3 (2 occasional) (3 (2 belly) (3 laugh)))))))))) (2 .))) 53 | (3 (2 (1 -LRB-) (2 (2 City) (3 -RRB-))) (2 (3 (2 (2 reminds) (3 us)) (2 (2 how) (1 (3 realistically) (1 (2 nuanced) (2 (2 (2 a) (3 (2 Robert) (2 (2 De) (3 (2 Niro) (2 performance))))) (2 (2 can) (1 (2 be) (1 (2 when) (1 (2 he) (1 (2 (1 (2 is) (1 not)) (2 more)) (1 (3 lucratively) (2 (3 engaged) (1 (2 in) (1 (1 (1 (1 (2 the) (1 (1 shameless) (1 self-caricature))) (2 (2 of) (2 (2 `) (2 (2 (2 Analyze) (2 This)) (2 (2 ') (2 (1 -LRB-) (2 (2 1999) (3 -RRB-)))))))) (2 and)) (2 (2 `) (2 (2 (2 (2 (2 Analyze) (2 That)) (2 ,)) (2 ')) (1 (2 (2 (2 promised) (2 (2 (2 (1 -LRB-) (2 or)) (2 threatened)) (3 -RRB-))) (2 for)) (2 (2 later) (2 (2 this) (2 year)))))))))))))))))))) (2 .))) 54 | (4 (2 (1 (2 The) (2 (2 wanton) (2 slipperiness))) (2 (2 of) (3 (2 (1 (2 (2 (2 \*) (2 Corpus)) (2 and)) (1 (2 its) (2 (3 amiable) (2 jerking)))) (2 and)) (2 (2 reshaping) (2 (2 of) (2 (2 physical) (2 (2 (2 time) (2 and)) (2 space)))))))) (4 (3 (2 would) (4 (2 (2 make) (2 it)) (4 (2 a) (4 (4 great) (4 (2 piece) (3 (2 to) (3 (2 (2 (2 watch) (2 (2 with) (2 kids))) (2 and)) (2 (2 use) (2 (2 to) (3 (2 (2 introduce) (2 video)) (2 (2 as) (2 art)))))))))))) (2 .))) 55 | (4 (4 (2 (2 (2 (2 ``) (1 (2 Frailty) (1 (2 '') (1 (3 (2 starts) (1 out)) (1 (2 like) (1 (2 a) (2 (1 typical) (2 (2 Bible) (2 (1 killer) (2 story)))))))))) (2 ,)) (2 but)) (4 (2 it) (4 (2 (2 turns) (1 out)) (4 (2 to) (3 (3 (2 be) (4 (3 (2 significantly) (3 (2 different) (3 (3 (2 (1 -LRB-) (2 and)) (4 better)) (3 -RRB-)))) (2 (2 than) (2 (2 most) (2 films))))) (2 (2 with) (2 (2 this) (2 theme)))))))) (2 .)) 56 | (4 (2 (2 For) (2 (2 those) (2 (2 who) (3 (2 (2 pride) (2 themselves)) (3 (2 on) (4 (3 sophisticated) (3 (2 ,) (3 (2 discerning) (2 taste))))))))) (4 (2 ,) (4 (4 (2 (1 (2 (2 this) (1 (2 (2 might) (1 not)) (3 (2 seem) (2 (2 like) (2 (2 (2 the) (2 (2 proper) (2 cup))) (2 (2 of) (2 tea))))))) (2 ,)) (2 however)) (4 (2 it) (4 (2 is) (3 (2 (2 almost) (2 guaranteed)) (4 (2 that) (4 (3 (2 even) (1 (2 the) (1 (1 stuffiest) (2 (2 cinema) (2 goers))))) (4 (2 will) (3 (3 (2 (3 laugh) (2 (2 their) (2 \*\*\*))) (1 off)) (2 (2 for) (2 (2 an) (2 hour-and-a-half))))))))))) (2 .)))) 57 | (4 (2 It) (4 (3 (2 cuts) (3 (3 (3 (3 (2 (3 (2 (2 to) (2 (2 (2 the) (2 core)) (3 (2 of) (2 (2 what) (2 (2 it) (3 (2 actually) (3 (2 means) (2 (2 to) (2 (2 face) (2 (2 your) (1 fears))))))))))) (2 (2 ,) (2 (2 to) (2 (2 be) (2 (2 (2 a) (2 girl)) (2 (2 in) (2 (2 (2 a) (2 world)) (2 (2 of) (2 boys))))))))) (2 ,)) (3 (2 to) (2 (2 (2 be) (2 (2 a) (2 boy))) (3 (3 truly) (3 (2 in) (2 (4 love) (2 (2 with) (2 (2 a) (2 girl))))))))) (2 ,)) (2 and)) (3 (2 to) (3 (2 ride) (3 (2 (2 the) (2 (2 big) (2 (2 metaphorical) (2 wave)))) (2 (2 that) (2 (2 (2 (2 is) (3 life)) (2 --)) (2 (2 wherever) (2 (2 it) (2 (2 takes) (2 you))))))))))) (2 .))) 58 | (3 (3 (3 (2 A) (3 (2 welcome) (2 relief))) (1 (2 from) (0 (2 (2 baseball) (2 movies)) (1 (2 that) (1 (2 try) (1 (2 too) (1 (2 hard) (2 (2 to) (2 (2 be) (2 mythic)))))))))) (3 (2 ,) (4 (2 (2 this) (2 one)) (4 (3 (2 is) (3 (3 (4 (2 a) (3 (3 (4 sweet) (2 and)) (3 modest))) (2 and)) (4 (2 ultimately) (3 (3 winning) (2 story))))) (2 .))))) 59 | (3 (3 (3 (3 (2 A) (3 (2 crisp) (2 (3 psychological) (1 drama)))) (2 (1 -LRB-) (2 (2 and) (3 -RRB-)))) (3 (3 (2 a) (3 (3 fascinating) (2 (2 little) (3 thriller)))) (2 (2 that) (2 (2 would) (3 (2 have) (3 (2 (2 been) (4 perfect)) (2 (2 for) (2 (2 an) (2 (2 old) (2 (2 ``) (2 (2 Twilight) (3 (2 Zone) (2 (2 '') (2 episode)))))))))))))) (2 .)) 60 | (3 (2 It) (3 (3 (2 has) (4 (2 (2 (2 more) (2 (2 (2 than) (2 a)) (2 few))) (2 moments)) (4 (2 that) (3 (2 are) (3 (3 (3 insightful) (2 enough)) (3 (2 to) (3 (2 be) (3 (3 fondly) (3 (2 remembered) (2 (2 in) (3 (2 (2 the) (2 (3 (1 endlessly) (2 challenging)) (2 maze))) (2 (2 of) (2 moviegoing))))))))))))) (2 .))) 61 | (2 (2 (2 Opening) (1 (2 with) (0 (1 (2 (1 (2 (2 some) (1 (2 contrived) (2 banter))) (2 ,)) (2 cliches)) (2 and)) (2 (2 some) (2 (1 loose) (2 ends)))))) (3 (2 ,) (2 (2 (2 the) (2 screenplay)) (2 (2 only) (3 (2 (3 (3 comes) (2 (2 into) (3 (2 its) (2 own)))) (2 (2 in) (2 (2 the) (2 (2 second) (2 half))))) (2 .)))))) 62 | (3 (3 (3 (4 (2 An) (3 (3 uncluttered) (3 (2 ,) (4 (3 resonant) (3 gem))))) (3 (2 that) (3 (2 (2 relays) (2 (2 its) (2 (2 universal) (2 points)))) (3 (2 without) (2 (2 (2 lectures) (2 or)) (1 confrontations)))))) (2 .)) (2 ')) 63 | (3 (1 -LRB-) (3 (2 (2 The) (2 Cockettes)) (2 (3 -RRB-) (3 (3 (3 (2 (3 provides) (2 (2 a) (2 window))) (2 (2 into) (3 (2 (2 a) (2 (2 subculture) (1 hell-bent))) (3 (2 on) (3 (2 (2 expressing) (2 itself)) (2 (2 in) (2 (2 every) (2 (2 way) (3 imaginable))))))))) (2 .)) (2 '))))) 64 | (3 (4 (3 (2 A) (3 (4 smart) (2 (2 ,) (2 (3 steamy) (2 mix))))) (3 (2 of) (3 (3 (2 (2 (2 (1 road) (2 movie)) (2 ,)) (2 (2 coming-of-age) (2 story))) (2 and)) (2 (2 political) (2 satire))))) (2 .)) 65 | (1 (2 (2 The) (3 (2 modern-day) (2 royals))) (2 (2 (2 have) (2 (1 nothing) (2 (2 on) (2 (2 (2 these) (2 guys)) (2 (2 when) (2 (2 it) (2 (3 comes) (2 (2 to) (2 scandals))))))))) (2 .))) 66 | (1 (2 It) (2 (1 (1 (2 (2 's) (2 only)) (2 (2 in) (2 (2 fairy) (2 tales)))) (2 (2 that) (2 (2 (3 princesses) (2 (2 that) (1 (2 are) (2 (2 married) (2 (2 for) (2 (2 political) (2 reason))))))) (3 (4 (3 (2 live) (3 happily)) (2 ever)) (2 after))))) (2 .))) 67 | (4 (4 (3 (4 (2 A) (4 (4 terrific) (1 (2 B) (2 movie)))) (2 --)) (4 (2 in) (4 (3 (2 fact) (2 ,)) (4 (4 (2 the) (4 best)) (2 (2 in) (2 (2 recent) (2 memory))))))) (2 .)) 68 | (3 (2 ``) (3 (2 (2 Birthday) (2 Girl)) (3 (2 '') (2 (2 (2 (2 is) (2 (2 (2 an) (2 (2 actor) (2 's))) (2 movie))) (2 (1 (2 first) (2 and)) (2 foremost))) (2 .))))) 69 | (4 (2 I) (3 (4 (0 (1 (2 walked) (2 away)) (2 (2 from) (2 (2 (2 this) (2 (3 new) (2 version))) (2 (2 of) (2 E.T.))))) (3 (2 just) (2 (2 as) (2 (2 I) (3 (2 hoped) (2 (2 I) (3 (2 (2 would) (2 --)) (2 (2 with) (2 (2 moist) (2 eyes)))))))))) (2 .))) 70 | (4 (3 (2 For) (2 (2 devotees) (2 (2 of) (2 (2 French) (2 cinema))))) (3 (2 ,) (3 (2 (2 Safe) (2 Conduct)) (4 (4 (3 (2 is) (3 (2 so) (3 (3 rich) (2 (2 with) (2 (2 period) (2 minutiae)))))) (3 (2 it) (4 (2 's) (2 (2 like) (2 (1 (1 (1 dying) (2 and)) (2 going)) (2 (2 to) (3 (2 celluloid) (3 heaven)))))))) (2 .))))) 71 | (3 (2 (2 What) (3 (2 (2 's) (2 really)) (3 (2 so) (3 (3 appealing) (2 (2 about) (2 (2 the) (2 characters))))))) (3 (2 (2 is) (2 (2 (2 their) (2 resemblance)) (2 (2 to) (2 (2 everyday) (3 children))))) (2 .))) 72 | (2 (1 Shamelessly) (2 (2 resorting) (2 (2 to) (3 (2 (2 (1 (1 (2 (1 pee-related) (1 (2 sight) (2 gags))) (1 (2 that) (2 (2 (2 might) (2 even)) (1 (2 (1 (2 cause) (1 (2 (2 Tom) (2 Green)) (1 (2 a) (1 grimace)))) (2 ;)) (2 still))))) (2 ,)) (2 (2 (2 Myer) (2 's)) (3 energy))) (2 and)) (3 (2 (3 (2 the) (1 silliness)) (2 (2 of) (2 it))) (2 (2 all) (3 (2 eventually) (3 prevail)))))))) 73 | (1 (2 An) (1 (1 absurdist) (1 (2 spider) (2 (2 web) (2 .))))) 74 | (3 (3 (2 If) (4 (2 you) (3 (2 (2 're) (3 (2 as) (3 (4 happy) (2 (2 listening) (2 (2 to) (2 movies)))))) (3 (2 as) (3 (2 (2 (2 (2 you) (2 (2 are) (2 (2 watching) (2 them)))) (2 ,)) (2 and)) (3 (4 (1 (2 the) (2 (1 slow) (2 parade))) (3 (2 of) (2 (2 human) (3 (2 frailty) (3 fascinates))))) (2 you))))))) (3 (2 ,) (3 (2 then) (2 (2 you) (3 (3 (2 're) (3 (2 at) (3 (2 the) (3 (3 right) (2 film))))) (2 .)))))) 75 | (2 (2 (2 This) (2 version)) (4 (2 (3 (2 moves) (3 (2 beyond) (3 (2 (3 (2 the) (2 (3 original) (2 's))) (2 nostalgia)) (2 (2 for) (2 (2 (2 the) (2 (2 communal) (2 (2 film) (2 experiences)))) (2 (2 of) (2 yesteryear))))))) (2 (2 to) (2 (2 (2 a) (3 (2 deeper) (2 realization))) (2 (2 of) (1 (2 (2 cinema) (2 's)) (1 (2 inability) (3 (2 to) (2 (2 (2 stand) (2 in)) (2 (2 for) (3 (3 true) (2 (2 ,) (2 (2 lived) (2 experience))))))))))))) (2 .))) 76 | (2 (2 (2 Some) (2 movies)) (3 (2 (2 (2 blend) (2 together)) (2 (2 as) (2 (2 they) (2 (2 become) (2 (1 distant) (2 memories)))))) (2 .))) 77 | (3 (2 (1 (2 (2 (2 Mention) (2 (2 ``) (2 (2 Solaris) (2 '')))) (2 (2 (2 five) (2 years)) (2 (2 from) (2 now)))) (2 and)) (2 (2 I) (2 (2 (2 'm) (2 sure)) (2 (2 those) (2 (2 who) (2 (2 saw) (2 (2 it) (2 (2 will) (2 (2 have) (2 (2 (2 an) (2 opinion)) (2 (2 to) (3 share)))))))))))) (2 .)) 78 | (4 (2 Allen) (4 (4 (2 's) (4 (4 (3 (3 (4 funniest) (2 and)) (4 (2 most) (3 likeable))) (2 movie)) (2 (2 in) (2 years)))) (2 .))) 79 | (4 (2 It) (3 (3 (2 's) (3 (4 (2 a) (4 (4 glorious) (3 spectacle))) (3 (2 like) (2 (2 (2 those) (2 (2 D.W.) (2 Griffith))) (2 (2 made) (2 (2 in) (2 (2 (2 the) (2 (2 early) (2 days))) (2 (2 of) (2 (2 silent) (2 film)))))))))) (2 .))) 80 | (4 (4 (2 This) (3 (2 comic) (3 gem))) (3 (4 (3 (2 is) (3 (2 as) (4 delightful))) (2 (2 as) (1 (2 it) (1 (2 is) (3 derivative))))) (2 .))) 81 | (4 (3 (2 More) (3 (2 timely) (2 (2 than) (3 (2 (2 its) (2 director)) (2 (2 (2 could) (2 ever)) (2 (2 have) (2 dreamed))))))) (3 (2 ,) (3 (2 this) (2 (2 quietly) (3 (3 (2 lyrical) (2 (2 (2 tale) (2 probes)) (3 (3 (2 the) (2 (3 ambiguous) (2 welcome))) (2 (2 (2 extended) (2 (2 by) (2 Iran))) (3 (2 to) (2 (2 (2 the) (2 (2 Afghani) (2 refugees))) (2 (2 who) (2 (2 (3 (2 streamed) (2 (2 across) (2 (2 its) (2 borders)))) (2 ,)) (1 (1 desperate) (2 (2 for) (2 (2 (2 work) (2 and)) (2 food)))))))))))) (2 .)))))) 82 | (3 (3 (4 (2 (2 (2 The) (2 (2 leaping) (2 (2 story) (2 line)))) (2 ,)) (3 (2 (2 shaped) (2 (2 by) (3 (2 director) (2 (2 Peter) (2 Kosminsky))))) (2 (2 into) (2 (2 (2 (3 sharp) (2 slivers)) (2 and)) (3 (2 cutting) (2 impressions)))))) (2 ,)) (3 (3 (2 (2 shows) (4 (2 (2 all) (2 (2 the) (2 signs))) (3 (2 of) (3 (3 rich) (2 (3 detail) (2 condensed)))))) (3 (2 into) (2 (2 (3 (2 a) (2 (2 few) (2 (3 evocative) (2 images)))) (2 and)) (3 (3 striking) (2 (2 character) (2 traits)))))) (2 .))) 83 | (2 (3 (2 With) (4 (3 (3 (2 three) (4 (4 excellent) (2 (2 principal) (2 singers)))) (2 ,)) (4 (3 (3 (2 a) (3 (3 (2 (2 youthful) (2 and)) (3 good-looking)) (3 diva))) (2 and)) (3 (2 (2 tenor) (2 and)) (3 (3 richly) (3 (2 handsome) (2 locations))))))) (1 (2 ,) (1 (2 it) (1 (1 (2 's) (1 (2 enough) (1 (2 to) (1 (2 make) (1 (2 you) (1 (2 wish) (1 (2 Jacquot) (2 (2 had) (2 (1 (2 (2 (2 left) (2 (3 well) (2 (2 enough) (2 alone)))) (2 and)) (2 just)) (2 (2 (2 filmed) (2 (2 the) (2 opera))) (2 (2 without) (2 (1 (2 all) (2 (2 these) (1 distortions))) (2 (2 of) (2 perspective)))))))))))))) (2 .))))) 84 | (4 (3 (3 (3 (3 (2 (2 The) (2 production)) (3 (2 has) (4 (2 been) (3 (2 made) (4 (2 with) (3 (2 (2 an) (2 (3 enormous) (2 amount))) (2 (2 of) (3 affection)))))))) (2 ,)) (2 so)) (2 (2 we) (2 (2 believe) (3 (2 (2 these) (2 characters)) (2 (4 love) (2 (2 each) (2 other))))))) (2 .)) 85 | (3 (3 Certainly) (4 (2 (2 the) (2 performances)) (3 (3 (2 are) (3 worthwhile)) (2 .)))) 86 | (3 (4 (2 Winds) (3 (2 up) (2 (2 (2 (2 being) (2 (2 both) (1 (3 (3 revelatory) (2 and)) (1 narcissistic)))) (2 ,)) (3 (3 (3 (2 achieving) (3 (2 some) (3 (4 honest) (2 insight)))) (2 (2 into) (2 relationships))) (2 (2 that) (3 (2 (2 most) (3 (3 high-concept) (2 films))) (3 (2 candy-coat) (3 (2 with) (4 (2 (3 (2 (1 (2 pat) (2 storylines)) (2 ,)) (3 (3 precious) (2 circumstances))) (2 and)) (3 (4 beautiful) (2 stars))))))))))) (2 .)) 87 | (4 (2 (2 Watching) (2 (2 these) (1 eccentrics))) (4 (3 (2 is) (3 (2 both) (3 (3 (4 (3 inspiring) (2 and)) (4 pure)) (4 joy)))) (2 .))) 88 | (4 (2 (2 Steven) (2 Spielberg)) (4 (2 (2 brings) (3 us)) (4 (2 another) (4 masterpiece)))) 89 | (3 (2 Finally) (3 (2 ,) (3 (2 (2 (2 (2 (2 the) (2 French-produced)) (2 ``)) (1 (2 Read) (2 (2 My) (2 Lips)))) (2 '')) (2 (3 (2 is) (3 (2 (2 a) (2 movie)) (2 (2 that) (3 (2 understands) (2 (2 characters) (2 (2 must) (2 (2 come) (2 first)))))))) (2 .))))) 90 | (4 (2 (2 Ms.) (2 (2 Seigner) (2 (2 and) (2 (2 Mr.) (2 Serrault))))) (3 (3 (3 bring) (4 (3 (4 fresh) (2 (2 ,) (3 (3 unforced) (2 naturalism)))) (2 (2 to) (2 (2 their) (2 characters))))) (2 .))) 91 | (3 (2 Allen) (4 (2 (2 shows) (4 (2 he) (3 (2 can) (3 (2 outgag) (2 (2 any) (2 (2 of) (1 (2 (2 those) (2 (2 young) (2 whippersnappers))) (3 (2 making) (2 (2 (3 moving) (2 pictures)) (2 today)))))))))) (2 .))) 92 | (3 (3 (4 (2 A) (4 (3 good) (2 film))) (4 (2 with) (4 (3 (2 a) (3 (3 solid) (2 pedigree))) (2 (2 both) (2 (2 (2 in) (2 (2 front) (2 of))) (2 (2 and) (2 (2 (2 ,) (2 (2 (2 more) (2 specifically)) (2 ,))) (2 (2 behind) (2 (2 the) (2 camera)))))))))) (2 .)) 93 | (1 (1 (2 By) (1 (1 (1 (2 (1 no) (2 means)) (3 (2 a) (4 slam-dunk))) (2 and)) (1 (0 (0 (2 sure) (1 (2 to) (0 (2 ultimately) (1 (0 disappoint) (2 (2 the) (2 action)))))) (3 fans)) (3 (2 who) (3 (2 will) (4 (2 be) (3 (3 (3 moved) (3 (2 to) (4 (3 (2 the) (3 edge)) (2 (2 of) (3 (2 their) (2 seats)))))) (3 (2 by) (4 (2 the) (4 (3 dynamic) (2 (2 first) (2 act)))))))))))) (4 (2 ,) (4 (2 it) (4 (2 still) (3 (3 (2 (3 comes) (1 off)) (3 (2 as) (3 (2 a) (4 (3 touching) (2 (2 ,) (3 (3 transcendent) (2 (4 love) (2 story)))))))) (2 .)))))) 94 | (4 (4 (4 (3 (2 I) (4 (4 encourage) (4 (2 (2 (2 young) (2 and)) (2 old)) (3 (2 alike) (3 (2 to) (3 (2 go) (3 (2 see) (3 (3 (2 this) (3 (3 (3 (3 unique) (2 and)) (4 entertaining)) (2 twist))) (2 (2 on) (2 (2 (2 the) (2 (3 classic) (2 (2 whale) (2 's)))) (2 tale))))))))))) (2 --)) (2 (2 you) (3 (2 (2 wo) (2 n't)) (1 (2 be) (2 sorry))))) (2 !)) 95 | (3 (3 (2 (1 (2 (2 A) (3 (2 literary) (3 (2 detective) (2 story)))) (2 (2 (2 is) (2 still)) (2 (2 a) (3 (2 detective) (2 story))))) (2 and)) (4 (2 (2 aficionados) (2 (2 of) (2 (2 the) (2 whodunit)))) (3 (2 (2 wo) (2 n't)) (0 (2 be) (1 disappointed))))) (2 .)) 96 | (3 (2 (2 High) (2 Crimes)) (2 (2 (1 (1 (1 (2 (0 steals) (3 (2 so) (3 freely))) (2 (2 from) (2 (2 other) (2 movies)))) (2 and)) (2 (2 combines) (2 (2 (2 enough) (2 (2 disparate) (2 types))) (2 (2 of) (2 films))))) (3 (2 that) (4 (2 it) (3 (2 (2 ca) (2 n't)) (3 (3 (2 (2 help) (2 but)) (3 engage)) (2 (2 an) (2 audience))))))) (2 .))) 97 | (4 (2 (2 If) (3 (2 you) (2 (2 're) (2 (3 (2 a) (3 fan)) (2 (2 of) (2 (2 the) (2 series))))))) (4 (2 you) (4 (4 (4 (4 (2 'll) (4 (4 love) (2 it))) (2 and)) (3 (2 probably) (4 (2 want) (3 (2 to) (4 (3 (2 see) (2 it)) (2 twice)))))) (2 .)))) 98 | (3 (2 I) (2 (2 (2 will) (2 be)) (2 .))) 99 | (3 (2 It) (3 (3 (3 (3 (3 (3 celebrates) (3 (3 (2 (2 the) (2 (2 group) (2 's))) (3 (4 playful) (2 spark))) (2 (2 of) (2 nonconformity)))) (2 ,)) (2 (3 (2 glancing) (3 (3 vividly) (2 back))) (2 (2 at) (3 (2 what) (2 (2 Hibiscus) (2 (3 grandly) (2 (2 called) (3 (2 (2 his) (2 (2 `) (2 angels))) (2 (2 of) (2 light)))))))))) (2 .)) (2 '))) 100 | (4 (2 (2 The) (2 story)) (3 (2 ...) (3 (2 (3 (2 is) (3 (3 (4 (4 (4 (3 inspiring) (2 ,)) (2 ironic)) (2 ,)) (2 and)) (2 (3 revelatory) (2 (2 of) (2 just))))) (1 (0 (2 how) (0 (1 (1 ridiculous) (2 and)) (1 money-oriented))) (2 (2 (2 the) (2 (2 record) (2 industry))) (2 (2 really) (2 is))))) (2 .)))) 101 | -------------------------------------------------------------------------------- /tests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | # PyTorch 4 | import torch 5 | import torch.nn as nn 6 | from torch.autograd import Variable 7 | import torch.nn.functional as F 8 | import torch.optim as optim 9 | 10 | import models 11 | import load_sst_data 12 | import utils 13 | 14 | 15 | def default_args(style="dynamic"): 16 | args = utils.Args() 17 | 18 | # Set args. 19 | args.style = style 20 | args.smart_batching = True 21 | args.training_data_path = 'test_trees.txt' 22 | args.embedding_data_path = 'test_embeddings.txt' 23 | args.word_embedding_dim = 50 24 | args.model_dim = 100 25 | args.mlp_dim = 256 26 | args.batch_size = 2 27 | args.lr = 0.0001 28 | args.max_training_steps = 50000 29 | args.eval_interval_steps = 100 30 | args.statistics_interval_steps = 100 31 | args.num_classes = 2 32 | 33 | return args 34 | 35 | 36 | def model_suite(self, mdl): 37 | data_iter = self.data_iter 38 | args = self.args 39 | initial_embeddings = self.initial_embeddings 40 | 41 | args.style = mdl 42 | 43 | # Model Class. 44 | model_cls = getattr(models, mdl) 45 | 46 | # Test model. 47 | model = model_cls( 48 | model_dim=args.model_dim, 49 | mlp_dim=args.mlp_dim, 50 | num_classes=args.num_classes, 51 | word_embedding_dim=args.word_embedding_dim, 52 | initial_embeddings=initial_embeddings, 53 | ) 54 | 55 | data, target, lengths = utils.make_batch(next(data_iter), args.style == "dynamic") 56 | y = model(data, lengths) 57 | 58 | 59 | class ModelsTestCase(unittest.TestCase): 60 | 61 | def setUp(self): 62 | args = default_args() 63 | 64 | # Specify data loader. 65 | data_manager = load_sst_data 66 | 67 | # Load data. 68 | raw_data, vocab = data_manager.load_data(args.training_data_path) 69 | 70 | # Load embeddings. 71 | vocab = utils.BuildVocabularyForTextEmbeddingFile( 72 | args.embedding_data_path, vocab, utils.CORE_VOCABULARY) 73 | initial_embeddings = utils.LoadEmbeddingsFromText( 74 | vocab, args.word_embedding_dim, args.embedding_data_path) 75 | 76 | # Tokenize data. 77 | tokenized = utils.Tokenize(raw_data, vocab) 78 | 79 | # Create iterators. 80 | data_iter = utils.MakeDataIterator(tokenized, args.batch_size, smart_batching=args.smart_batching, forever=True)() 81 | 82 | # Cache useful values. 83 | self.args = args 84 | self.data_iter = data_iter 85 | self.initial_embeddings = initial_embeddings 86 | 87 | def test_dynamic(self): 88 | mdl = 'dynamic' 89 | model_suite(self, mdl) 90 | 91 | def test_dynamic2(self): 92 | mdl = 'dynamic2' 93 | model_suite(self, mdl) 94 | 95 | def test_static(self): 96 | mdl = 'static' 97 | model_suite(self, mdl) 98 | 99 | def test_static2(self): 100 | mdl = 'static2' 101 | model_suite(self, mdl) 102 | 103 | def test_fakedynamic(self): 104 | mdl = 'fakedynamic' 105 | model_suite(self, mdl) 106 | 107 | def test_fakestatic(self): 108 | mdl = 'fakestatic' 109 | model_suite(self, mdl) 110 | 111 | 112 | if __name__ == '__main__': 113 | unittest.main() 114 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import random 3 | from collections import deque 4 | 5 | # PyTorch 6 | import torch 7 | import torch.nn as nn 8 | from torch.autograd import Variable 9 | import torch.nn.functional as F 10 | import torch.optim as optim 11 | 12 | 13 | # With loaded embedding matrix, the padding vector will be initialized to zero 14 | # and will not be trained. Hopefully this isn't a problem. It seems better than 15 | # random initialization... 16 | PADDING_TOKEN = "*PADDING*" 17 | 18 | # Temporary hack: Map UNK to "_" when loading pretrained embedding matrices: 19 | # it's a common token that is pretrained, but shouldn't look like any content words. 20 | UNK_TOKEN = "_" 21 | 22 | 23 | CORE_VOCABULARY = {PADDING_TOKEN: 0, 24 | UNK_TOKEN: 1} 25 | 26 | 27 | def BuildVocabularyForTextEmbeddingFile(path, types_in_data, core_vocabulary): 28 | """Quickly iterates through a GloVe-formatted text vector file to 29 | extract a working vocabulary of words that occur both in the data and 30 | in the vector file.""" 31 | 32 | vocabulary = {} 33 | vocabulary.update(core_vocabulary) 34 | next_index = len(vocabulary) 35 | with open(path, 'rU') as f: 36 | for line in f: 37 | spl = line.split(" ", 1) 38 | word = unicode(spl[0].decode('UTF-8')) 39 | if word in types_in_data and word not in vocabulary: 40 | vocabulary[word] = next_index 41 | next_index += 1 42 | return vocabulary 43 | 44 | 45 | def LoadEmbeddingsFromText(vocabulary, embedding_dim, path): 46 | """Prepopulates a numpy embedding matrix indexed by vocabulary with 47 | values from a GloVe - format vector file. 48 | 49 | For now, values not found in the file will be set to zero.""" 50 | 51 | emb = np.zeros( 52 | (len(vocabulary), embedding_dim), dtype=np.float32) 53 | with open(path, 'r') as f: 54 | for line in f: 55 | spl = line.split(" ") 56 | word = spl[0] 57 | if word in vocabulary: 58 | emb[vocabulary[word], :] = [float(e) for e in spl[1:]] 59 | return emb 60 | 61 | 62 | def MakeDataIterator(examples, batch_size, forever=True, smart_batching=True, num_buckets=10): 63 | if smart_batching: 64 | def data_iter(): 65 | def build_bucketed_batch_indices(): 66 | batches = [] 67 | lengths = [(i, len(e.tokens)) for i, e in enumerate(examples)] 68 | 69 | # Shuffle before bucketing. 70 | random.shuffle(lengths) 71 | 72 | bucket_size = len(examples) // num_buckets 73 | buckets = [lengths[i*bucket_size:(i+1)*bucket_size] for i in range(num_buckets)] 74 | buckets = [sorted(b, key=lambda x: x[1]) for b in buckets] 75 | for b in buckets: 76 | num_batches = len(b) // batch_size 77 | for i in range(num_batches): 78 | _batch = b[i*batch_size:(i+1)*batch_size] 79 | _batch = [x[0] for x in _batch] 80 | batches.append(_batch) 81 | 82 | # Shuffle after bucketing 83 | random.shuffle(batches) 84 | 85 | return batches 86 | 87 | batch_indices = build_bucketed_batch_indices() 88 | num_batches = len(batch_indices) 89 | start = -1 90 | 91 | while True: 92 | start += 1 93 | if start >= num_batches: 94 | 95 | if not forever: 96 | break 97 | 98 | # Start another epoch. 99 | batch_indices = build_bucketed_batch_indices() 100 | num_batches = len(batch_indices) 101 | start = 0 102 | yield tuple(examples[i] for i in batch_indices[start]) 103 | 104 | else: 105 | def data_iter(): 106 | dataset_size = len(examples) 107 | start = -1 * batch_size 108 | order = range(dataset_size) 109 | random.shuffle(order) 110 | 111 | while True: 112 | start += batch_size 113 | if start > dataset_size - batch_size: 114 | 115 | if not forever: 116 | break 117 | 118 | # Start another epoch. 119 | start = 0 120 | random.shuffle(order) 121 | batch_indices = order[start:start + batch_size] 122 | yield tuple(examples[i] for i in batch_indices) 123 | 124 | return data_iter 125 | 126 | 127 | def Tokenize(examples, vocabulary): 128 | for e in examples: 129 | e.tokens = [vocabulary.get(w, vocabulary.get(UNK_TOKEN)) for w in e.tokens] 130 | return examples 131 | 132 | 133 | class Accumulator(object): 134 | 135 | cache = dict() 136 | 137 | def __init__(self, trail=100): 138 | self.trail = trail 139 | 140 | def add(self, key, val): 141 | self.cache.setdefault(key, deque(maxlen=self.trail)).append(val) 142 | 143 | def get(self, key): 144 | ret = self.cache.get(key, []) 145 | try: 146 | del self.cache[key] 147 | except: 148 | pass 149 | return ret 150 | 151 | def get_avg(self, key): 152 | return np.array(self.get(key)).mean() 153 | 154 | 155 | class Args(object): 156 | def __repr__(self): 157 | s = "{}" 158 | return s.format(self.__dict__) 159 | 160 | 161 | def make_batch(examples, dynamic=True): 162 | # Build lengths. 163 | lengths = [] 164 | for e in examples: 165 | lengths.append(len(e.tokens)) 166 | 167 | # Build input. 168 | if dynamic: # dynamic: list of lists 169 | data = [] 170 | for e in examples: 171 | d = list(reversed(e.tokens[:])) 172 | data.append(d) 173 | else: # static: batch matrix 174 | batch_size = len(examples) 175 | max_len = max(len(e.tokens) for e in examples) 176 | data = torch.zeros(batch_size, max_len).long() 177 | for i, e in enumerate(examples): 178 | l = len(e.tokens) 179 | offset = max_len - l 180 | data[i,offset:max_len] = torch.Tensor(e.tokens[:]) 181 | 182 | # Build labels. 183 | target = [] 184 | for e in examples: 185 | target.append(e.label) 186 | target = torch.LongTensor(target) 187 | 188 | return data, target, lengths 189 | --------------------------------------------------------------------------------